Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-08-28 15:11:45 +00:00
parent 2f9b2f549e
commit 6d2f26ca35
47 changed files with 1079 additions and 245 deletions

View File

@ -106,7 +106,6 @@ Layout/EmptyLineAfterMagicComment:
- 'config/initializers/fog_core_patch.rb'
- 'config/initializers/rubyzip.rb'
- 'config/initializers/sawyer_patch.rb'
- 'config/initializers/seed_fu.rb'
- 'config/initializers/sidekiq.rb'
- 'config/routes/merge_requests.rb'
- 'danger/ce_ee_vue_templates/Dangerfile'

View File

@ -261,7 +261,6 @@ Style/IfUnlessModifier:
- 'config/initializers/jira.rb'
- 'config/initializers/kaminari_active_record_relation_methods_with_limit.rb'
- 'config/initializers/remove_active_job_execute_callback.rb'
- 'config/initializers/seed_fu.rb'
- 'config/initializers/stackprof.rb'
- 'config/initializers/validate_database_config.rb'
- 'config/initializers_before_autoloader/002_sidekiq.rb'

View File

@ -74,7 +74,7 @@ export default {
</script>
<template>
<div>
<div class="gl-flex gl-flex-wrap gl-items-center gl-gap-2">
<gl-sprintf
:message="
s__('MergeRequestDiffs|Commenting on lines %{selectStart}start%{selectEnd} to %{end}')
@ -89,7 +89,6 @@ export default {
:value="commentLineStart"
:options="commentLineOptions"
width="sm"
class="gl-w-auto gl-align-baseline"
@change="updateCommentLineStart"
/>
</template>

View File

@ -88,10 +88,13 @@ module Integrations
end
def notify(message, _opts)
context = project_level? ? { project: project } : { skip_project_check: true }
body = {
body: message.summary,
msgtype: 'm.text',
format: 'org.matrix.custom.html'
format: 'org.matrix.custom.html',
formatted_body: Banzai.render_and_post_process(message.summary, context)
}.compact_blank
header = { 'Content-Type' => 'application/json' }
@ -101,5 +104,9 @@ module Integrations
response if response.success?
end
def custom_data(data)
super(data).merge(markdown: true)
end
end
end

View File

@ -5,6 +5,7 @@ module VirtualRegistries
module Maven
class CachedResponse < ApplicationRecord
include FileStoreMounter
include Gitlab::SQL::Pattern
belongs_to :group
belongs_to :upstream, class_name: 'VirtualRegistries::Packages::Maven::Upstream', inverse_of: :cached_responses
@ -31,6 +32,10 @@ module VirtualRegistries
if: -> { object_storage_key.blank? && relative_path && upstream && upstream.registry }
attr_readonly :object_storage_key
scope :search_by_relative_path, ->(query) do
fuzzy_search(query, [:relative_path], use_minimum_char_limit: false)
end
private
def set_object_storage_key

View File

@ -47,7 +47,7 @@ module Packages
yield unless entry_path.is_a?(String)
tar_reader.rewind
entry = tar_reader.find { |e| e.full_name == entry_path }
entry = tar_reader.find { |e| path_for(e) == entry_path }
yield entry
end
@ -60,9 +60,16 @@ module Packages
# We cannot get the entry directly when using #reverse_each because
# TarReader closes the stream after iterating over all entries
tar_reader.reverse_each do |entry|
break entry.full_name if entry.full_name.match?(PACKAGE_JSON_ENTRY_REGEX)
entry_path = path_for(entry)
break entry_path if entry_path.match?(PACKAGE_JSON_ENTRY_REGEX)
end
end
def path_for(entry)
entry.full_name
rescue ::Gem::Package::TarInvalidError
entry.header.name
end
end
end
end

View File

@ -0,0 +1,9 @@
---
name: dont_ignore_alternate_directories
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/438245
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/164082
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/480875
milestone: '17.4'
group: group::source code
type: beta
default_enabled: false

View File

@ -6,4 +6,4 @@ rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/473358
milestone: '17.3'
group: group::custom models
type: development
default_enabled: false
default_enabled: true

View File

@ -2,7 +2,7 @@
name: merge_request_dashboard
feature_issue_url: https://gitlab.com/groups/gitlab-org/-/epics/13448
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150661
rollout_issue_url:
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/480854
milestone: '17.0'
group: group::code review
type: wip

View File

@ -1,4 +1,23 @@
# frozen_string_literal: true
if Gitlab.ee?
SeedFu.fixture_paths += %W[ee/db/fixtures ee/db/fixtures/#{Rails.env}]
SeedFu.fixture_paths += %W[ee/db/fixtures ee/db/fixtures/#{Rails.env}] if Gitlab.ee?
require 'benchmark'
seed_timer = Module.new do
def run
duration = Benchmark.realtime { super }
printf "== Seeding took %.2f seconds\n", duration
end
private
def run_file(filename)
duration = Benchmark.realtime { super }
printf "== %s took %.2f seconds\n", filename, duration
end
end
SeedFu::Runner.prepend seed_timer

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class AddIdxVirtualRegistriesPackagesMavenCachedResponsesRelativePathTrigram < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.4'
INDEX_NAME = 'idx_vreg_pkgs_maven_cached_responses_on_relative_path_trigram'
def up
add_concurrent_index :virtual_registries_packages_maven_cached_responses, :relative_path,
using: :gin, opclass: :gin_trgm_ops, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :virtual_registries_packages_maven_cached_responses, INDEX_NAME
end
end

View File

@ -1,31 +1,16 @@
# frozen_string_literal: true
class DropWorkItemWidgetDefinitionsNamespaceId < Gitlab::Database::Migration[2.2]
UNIQUE_INDEX_NAME = 'index_work_item_widget_definitions_on_namespace_type_and_name'
UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME = 'index_work_item_widget_definitions_on_default_witype_and_name'
disable_ddl_transaction!
milestone '17.4'
def up
remove_column :work_item_widget_definitions, :namespace_id
# no-op
# Rescheduling migration as described in
# https://gitlab.com/gitlab-org/gitlab/-/issues/480503
# Making it safer to execute due to the locks that are required to acquire
end
def down
add_column :work_item_widget_definitions, :namespace_id, :bigint
add_concurrent_index :work_item_widget_definitions,
[:namespace_id, :work_item_type_id, :name],
unique: true,
name: UNIQUE_INDEX_NAME
add_concurrent_index :work_item_widget_definitions,
[:work_item_type_id, :name],
where: "namespace_id is NULL",
unique: true,
name: UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME
add_concurrent_foreign_key :work_item_widget_definitions, :namespaces, column: :namespace_id, on_delete: :cascade
# no-op
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class DropWorkItemWidgetDefinitionsNamespaceIndex < Gitlab::Database::Migration[2.2]
UNIQUE_INDEX_NAME = 'index_work_item_widget_definitions_on_namespace_type_and_name'
milestone '17.4'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :work_item_widget_definitions, name: UNIQUE_INDEX_NAME
end
def down
add_concurrent_index :work_item_widget_definitions,
[:namespace_id, :work_item_type_id, :name],
unique: true,
name: UNIQUE_INDEX_NAME
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class DropWorkItemWidgetDefinitionsDefaultNamespaceIndex < Gitlab::Database::Migration[2.2]
UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME = 'index_work_item_widget_definitions_on_default_witype_and_name'
milestone '17.4'
disable_ddl_transaction!
def up
remove_concurrent_index_by_name :work_item_widget_definitions, name: UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME
end
def down
add_concurrent_index :work_item_widget_definitions,
[:work_item_type_id, :name],
where: "namespace_id is NULL",
unique: true,
name: UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
class DropWorkItemWidgetDefinitionsNamespaceIdFk < Gitlab::Database::Migration[2.2]
FK_NAME = 'fk_ecf57512f7'
milestone '17.4'
disable_ddl_transaction!
def up
with_lock_retries do
remove_foreign_key_if_exists(
:work_item_widget_definitions,
:namespaces,
name: FK_NAME,
reverse_lock_order: true
)
end
end
def down
add_concurrent_foreign_key :work_item_widget_definitions,
:namespaces,
column: :namespace_id,
on_delete: :cascade,
name: FK_NAME
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class DropWorkItemWidgetDefinitionsNamespaceIdAttempt2 < Gitlab::Database::Migration[2.2]
milestone '17.4'
def up
remove_column :work_item_widget_definitions, :namespace_id, if_exists: true
end
def down
add_column :work_item_widget_definitions, :namespace_id, :bigint, if_not_exists: true
end
end

View File

@ -0,0 +1,138 @@
# frozen_string_literal: true
class AddFkReferencingPCiPipelines < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
milestone '17.4'
disable_ddl_transaction!
FOREIGN_KEYS = [
{
source_table: :ci_pipeline_chat_data,
name: :fk_64ebfab6b3_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_sources_pipelines,
name: :fk_d4e29af7d7_p_tmp,
column: [:source_partition_id, :source_pipeline_id]
},
{
source_table: :ci_sources_pipelines,
name: :fk_e1bad85861_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_sources_projects,
name: :fk_rails_10a1eb379a_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_pipeline_metadata,
name: :fk_rails_50c1e9ea10_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_pipeline_messages,
name: :fk_rails_8d3b04e3e1_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_pipelines_config,
name: :fk_rails_906c9a2533_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_pipeline_artifacts,
name: :fk_rails_a9e811a466_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :ci_daily_build_group_report_results,
name: :fk_rails_ee072d13b3_p_tmp,
column: [:partition_id, :last_pipeline_id]
}
]
P_FOREIGN_KEYS = [
{
source_table: :p_ci_pipelines,
name: :fk_262d4c2d19_p_tmp,
column: [:auto_canceled_by_partition_id, :auto_canceled_by_id],
on_delete: :nullify
},
{
source_table: :p_ci_builds,
name: :fk_87f4cefcda_p_tmp,
column: [:upstream_pipeline_partition_id, :upstream_pipeline_id]
},
{
source_table: :p_ci_builds,
name: :fk_a2141b1522_p_tmp,
column: [:auto_canceled_by_partition_id, :auto_canceled_by_id],
on_delete: :nullify
},
{
source_table: :p_ci_builds,
name: :fk_d3130c9a7f_p_tmp,
column: [:partition_id, :commit_id]
},
{
source_table: :p_ci_pipeline_variables,
name: :fk_f29c5f4380_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :p_ci_stages,
name: :fk_fb57e6cc56_p_tmp,
column: [:partition_id, :pipeline_id]
},
{
source_table: :p_ci_builds_execution_configs,
name: :fk_rails_c26408d02c_p_tmp,
column: [:partition_id, :pipeline_id]
}
]
def up
FOREIGN_KEYS.each do |fk|
add_concurrent_foreign_key(fk[:source_table], :p_ci_pipelines, **with_defaults(fk))
prepare_async_foreign_key_validation(fk[:source_table], name: fk[:name])
end
P_FOREIGN_KEYS.each do |fk|
add_concurrent_partitioned_foreign_key(fk[:source_table], :p_ci_pipelines, **with_defaults(fk))
prepare_partitioned_async_foreign_key_validation(fk[:source_table], name: fk[:name])
end
end
def down
FOREIGN_KEYS.each do |fk|
unprepare_async_foreign_key_validation(fk[:source_table], name: fk[:name])
with_lock_retries do
remove_foreign_key_if_exists(fk[:source_table], name: fk[:name], reverse_lock_order: true)
end
end
P_FOREIGN_KEYS.each do |fk|
unprepare_partitioned_async_foreign_key_validation(fk[:source_table], name: fk[:name])
Gitlab::Database::PostgresPartitionedTable.each_partition(fk[:source_table]) do |partition|
with_lock_retries do
remove_foreign_key_if_exists partition.identifier, name: fk[:name], reverse_lock_order: true
end
end
end
end
private
def with_defaults(options)
options.except(:source_table).with_defaults(
target_column: [:partition_id, :id],
on_update: :cascade,
on_delete: :cascade,
reverse_lock_order: true,
validate: false
)
end
end

View File

@ -0,0 +1 @@
a7b97be0ae36d65520be47e553a9338849681a9dc8ef069881d9790037eea41b

View File

@ -0,0 +1 @@
3cdc16dc3367ab1c1b1bca009de8c0f84ca160d2c5fdd432e533bf6d6b70d168

View File

@ -0,0 +1 @@
035ef8e722d17497e3c0ae660b6800e74d2168af38cd8186dc052c1db73c8a59

View File

@ -0,0 +1 @@
a56e39bc76ddcc40261911dd95793037c541a4823089945a88d2be4fe680e382

View File

@ -0,0 +1 @@
6ff0f6143ab1ea96f909e02de70043caa4a1e3194a4ff058963971af1c66985d

View File

@ -0,0 +1 @@
aa8f4b316e7bcbb92a8df81c757204efbdb8330b49c44b97053fb7d6c0c72f4a

View File

@ -26665,6 +26665,8 @@ CREATE INDEX idx_user_credit_card_validations_on_similar_to_meta_data ON user_cr
CREATE INDEX idx_user_details_on_provisioned_by_group_id_user_id ON user_details USING btree (provisioned_by_group_id, user_id);
CREATE INDEX idx_vreg_pkgs_maven_cached_responses_on_relative_path_trigram ON virtual_registries_packages_maven_cached_responses USING gin (relative_path gin_trgm_ops);
CREATE UNIQUE INDEX idx_vregs_pkgs_mvn_cached_resp_on_uniq_upstrm_id_and_rel_path ON virtual_registries_packages_maven_cached_responses USING btree (upstream_id, relative_path);
CREATE INDEX idx_vuln_reads_for_filtering ON vulnerability_reads USING btree (project_id, state, dismissal_reason, severity DESC, vulnerability_id DESC NULLS LAST);
@ -33082,6 +33084,9 @@ ALTER TABLE ONLY zoekt_repositories
ALTER TABLE ONLY ci_pipelines
ADD CONSTRAINT fk_262d4c2d19_p FOREIGN KEY (auto_canceled_by_partition_id, auto_canceled_by_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE SET NULL;
ALTER TABLE ONLY ci_pipelines
ADD CONSTRAINT fk_262d4c2d19_p_tmp FOREIGN KEY (auto_canceled_by_partition_id, auto_canceled_by_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE SET NULL NOT VALID;
ALTER TABLE ONLY user_namespace_callouts
ADD CONSTRAINT fk_27a69fd1bd FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -33427,6 +33432,9 @@ ALTER TABLE ONLY approval_group_rules
ALTER TABLE ONLY ci_pipeline_chat_data
ADD CONSTRAINT fk_64ebfab6b3_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_chat_data
ADD CONSTRAINT fk_64ebfab6b3_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY cluster_agent_tokens
ADD CONSTRAINT fk_64f741f626 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -33616,6 +33624,9 @@ ALTER TABLE ONLY packages_package_files
ALTER TABLE p_ci_builds
ADD CONSTRAINT fk_87f4cefcda_p FOREIGN KEY (upstream_pipeline_partition_id, upstream_pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_builds
ADD CONSTRAINT fk_87f4cefcda_p_tmp FOREIGN KEY (upstream_pipeline_partition_id, upstream_pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY approval_group_rules_users
ADD CONSTRAINT fk_888a0df3b7 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -33766,6 +33777,9 @@ ALTER TABLE ONLY subscription_add_on_purchases
ALTER TABLE p_ci_builds
ADD CONSTRAINT fk_a2141b1522_p FOREIGN KEY (auto_canceled_by_partition_id, auto_canceled_by_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE SET NULL;
ALTER TABLE ONLY ci_builds
ADD CONSTRAINT fk_a2141b1522_p_tmp FOREIGN KEY (auto_canceled_by_partition_id, auto_canceled_by_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE SET NULL NOT VALID;
ALTER TABLE ONLY protected_environment_approval_rules
ADD CONSTRAINT fk_a3cc825836 FOREIGN KEY (protected_environment_project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -34093,6 +34107,9 @@ ALTER TABLE ONLY dast_pre_scan_verifications
ALTER TABLE p_ci_builds
ADD CONSTRAINT fk_d3130c9a7f_p FOREIGN KEY (partition_id, commit_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_builds
ADD CONSTRAINT fk_d3130c9a7f_p_tmp FOREIGN KEY (partition_id, commit_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY boards_epic_user_preferences
ADD CONSTRAINT fk_d32c3d693c FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -34102,6 +34119,9 @@ ALTER TABLE ONLY vulnerability_state_transitions
ALTER TABLE ONLY ci_sources_pipelines
ADD CONSTRAINT fk_d4e29af7d7_p FOREIGN KEY (source_partition_id, source_pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_sources_pipelines
ADD CONSTRAINT fk_d4e29af7d7_p_tmp FOREIGN KEY (source_partition_id, source_pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY operations_strategies_user_lists
ADD CONSTRAINT fk_d4f7076369 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -34186,6 +34206,9 @@ ALTER TABLE ONLY ci_resources
ALTER TABLE ONLY ci_sources_pipelines
ADD CONSTRAINT fk_e1bad85861_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_sources_pipelines
ADD CONSTRAINT fk_e1bad85861_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE p_ci_builds_metadata
ADD CONSTRAINT fk_e20479742e_p FOREIGN KEY (partition_id, build_id) REFERENCES p_ci_builds(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
@ -34309,6 +34332,9 @@ ALTER TABLE ONLY observability_metrics_issues_connections
ALTER TABLE p_ci_pipeline_variables
ADD CONSTRAINT fk_f29c5f4380_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_variables
ADD CONSTRAINT fk_f29c5f4380_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY zoekt_indices
ADD CONSTRAINT fk_f34800a202 FOREIGN KEY (zoekt_node_id) REFERENCES zoekt_nodes(id) ON DELETE CASCADE;
@ -34351,6 +34377,9 @@ ALTER TABLE ONLY application_settings
ALTER TABLE p_ci_stages
ADD CONSTRAINT fk_fb57e6cc56_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_stages
ADD CONSTRAINT fk_fb57e6cc56_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY agent_group_authorizations
ADD CONSTRAINT fk_fb70782616 FOREIGN KEY (agent_id) REFERENCES cluster_agents(id) ON DELETE CASCADE;
@ -34525,6 +34554,9 @@ ALTER TABLE ONLY audit_events_streaming_headers
ALTER TABLE ONLY ci_sources_projects
ADD CONSTRAINT fk_rails_10a1eb379a_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_sources_projects
ADD CONSTRAINT fk_rails_10a1eb379a_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY virtual_registries_packages_maven_cached_responses
ADD CONSTRAINT fk_rails_1167f21441 FOREIGN KEY (upstream_id) REFERENCES virtual_registries_packages_maven_upstreams(id) ON DELETE SET NULL;
@ -34990,6 +35022,9 @@ ALTER TABLE ONLY status_page_settings
ALTER TABLE ONLY ci_pipeline_metadata
ADD CONSTRAINT fk_rails_50c1e9ea10_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_metadata
ADD CONSTRAINT fk_rails_50c1e9ea10_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY project_repository_storage_moves
ADD CONSTRAINT fk_rails_5106dbd44a FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
@ -35443,6 +35478,9 @@ ALTER TABLE ONLY vulnerability_feedback
ALTER TABLE ONLY ci_pipeline_messages
ADD CONSTRAINT fk_rails_8d3b04e3e1_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_messages
ADD CONSTRAINT fk_rails_8d3b04e3e1_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE incident_management_pending_alert_escalations
ADD CONSTRAINT fk_rails_8d8de95da9 FOREIGN KEY (alert_id) REFERENCES alert_management_alerts(id) ON DELETE CASCADE;
@ -35473,6 +35511,9 @@ ALTER TABLE ONLY organization_details
ALTER TABLE ONLY ci_pipelines_config
ADD CONSTRAINT fk_rails_906c9a2533_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipelines_config
ADD CONSTRAINT fk_rails_906c9a2533_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY approval_project_rules_groups
ADD CONSTRAINT fk_rails_9071e863d1 FOREIGN KEY (approval_project_rule_id) REFERENCES approval_project_rules(id) ON DELETE CASCADE;
@ -35659,6 +35700,9 @@ ALTER TABLE ONLY saved_replies
ALTER TABLE ONLY ci_pipeline_artifacts
ADD CONSTRAINT fk_rails_a9e811a466_p FOREIGN KEY (partition_id, pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_pipeline_artifacts
ADD CONSTRAINT fk_rails_a9e811a466_p_tmp FOREIGN KEY (partition_id, pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY merge_request_user_mentions
ADD CONSTRAINT fk_rails_aa1b2961b1 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
@ -36175,6 +36219,9 @@ ALTER TABLE ONLY packages_debian_group_distributions
ALTER TABLE ONLY ci_daily_build_group_report_results
ADD CONSTRAINT fk_rails_ee072d13b3_p FOREIGN KEY (partition_id, last_pipeline_id) REFERENCES ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY ci_daily_build_group_report_results
ADD CONSTRAINT fk_rails_ee072d13b3_p_tmp FOREIGN KEY (partition_id, last_pipeline_id) REFERENCES p_ci_pipelines(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE NOT VALID;
ALTER TABLE ONLY import_source_users
ADD CONSTRAINT fk_rails_ee30e569be FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;

View File

@ -30,7 +30,7 @@ There are still instances where the GitLab REST API is used, such as when creati
For [client-side state management](state_management.md) in Vue, depending on the specific needs of the feature,
we use:
- [Apollo](https://www.apollographql.com/) (our primary [GraphQL client](graphql.md))
- [Apollo](https://www.apollographql.com/) (default choice for applications relying on [GraphQL](graphql.md))
- [Pinia](pinia.md) (in [pilot phase](https://gitlab.com/gitlab-org/gitlab/-/issues/479279))
- Stateful components.

View File

@ -56,7 +56,7 @@ If you're still uncertain, prefer using Apollo before Pinia.
### Pick Apollo when
- You rely on the GraphQL server state
- You rely on the GraphQL API
- You need specific Apollo features, for example:
- [Parametrized cache, cache invalidation](graphql.md#immutability-and-cache-updates)
- [Polling](graphql.md#polling-and-performance)
@ -83,14 +83,14 @@ If you're considering using Pinia please drop a message in the `#frontend` inter
### Weaknesses
- Can't do any advanced GraphQL request handling out of the box (data normalization, polling, caching, etc.)
- Can't do any advanced request handling out of the box (data normalization, polling, caching, etc.)
- Can lead to same pitfalls as Vuex without guidance (overblown stores)
### Pick Pinia when you have any of these
- Significant percentage of Vue application state is client-side state
- Migrating from Vuex is a high priority
- You're not considering using Apollo for client state management
- Your application does not rely primarily on GraphQL API, and you don't plan the migration to GraphQL API in the near future
## Combining Pinia and Apollo

View File

@ -70,10 +70,23 @@ NOTE:
Before you use a new Elasticsearch cluster in production, see the
[Elasticsearch documentation on important settings](https://www.elastic.co/guide/en/elasticsearch/reference/current/important-settings.html).
### Elasticsearch access control configuration
### Service-linked role for AWS OpenSearch
Elasticsearch offers role based access control to secure the cluster. To access and perform operations in the
Elasticsearch cluster, the `Username` configured in the Admin UI must have role(s) assigned that grant the following
You must have a [service-linked role](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/slr.html) in your AWS account named `AWSServiceRoleForAmazonOpenSearchService` when you create OpenSearch domains.
This role is account wide and is used by **all** OpenSearch domains.
In most cases, this role is created automatically when you use the AWS Management Console to create the first OpenSearch domain.
To create a service-linked role manually, see the [AWS documentation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/slr-aos.html#create-slr).
### Access requirements
GitLab supports both [HTTP and role-based authentication methods](#advanced-search-configuration)
depending on your requirements and the backend service you use.
#### Role-based access control for Elasticsearch
Elasticsearch can offer role-based access control to further secure a cluster. To access and perform operations in the
Elasticsearch cluster, the `Username` configured in the **Admin** area must have roles that grant the following
privileges. The `Username` makes requests from GitLab to the search cluster.
For more information,
@ -99,25 +112,41 @@ and [Elasticsearch security privileges](https://www.elastic.co/guide/en/elastics
}
```
### AWS OpenSearch service configuration
#### Access control for AWS OpenSearch
AWS OpenSearch offers multiple methods of access control which are supported by GitLab:
Prerequisites:
- [Domain level access policy](#domain-level-access-policy-configuration)
- Fine-grained access control
- [With IAM ARN as master user](#connecting-with-an-iam-user)
- [With master user](#connecting-with-a-master-user-in-the-internal-database)
- The domain access policy for AWS OpenSearch must allow `es:ESHttp*` actions.
For more details on fine-grained access control see
[recommended configurations](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html#fgac-recommendations)
GitLab supports the following methods of access control for AWS OpenSearch:
#### Domain level access policy configuration
- [**VPC domain access policy**](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/vpc.html#vpc-security): where the AWS OpenSearch domain is deployed and accessible in a VPC internally
- [**Resource-based (domain) access policy**](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html#ac-types-resource): where the AWS OpenSearch domain is configured with an IAM policy
- [**Identity-based policy**](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html#ac-types-identity): where clients use IAM principals with policies to configure access
Configure the AWS OpenSearch domain access policy to allow `es:ESHttp*` actions. You can customize
the following example configuration to limit principals or resources:
Advanced options such as [fine-grained access control](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac.html) are also available.
NOTE:
All `es:ESHttp` actions are required by GitLab.
##### Resource-based policy examples
Here's an example of a resource-based (domain) access policy where `es:ESHttp*` actions are allowed:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": "*",
"Action": [
"es:ESHttp*"
],
"Resource": "arn:aws:es:us-west-1:987654321098:domain/test-domain/*"
}
]
}
```
Here's an example of a resource-based (domain) access policy where `es:ESHttp*` actions are allowed only for a specific IAM principal:
```json
{
@ -127,42 +156,41 @@ All `es:ESHttp` actions are required by GitLab.
"Effect": "Allow",
"Principal": {
"AWS": [
"*"
"arn:aws:iam::123456789012:user/test-user"
]
},
"Action": [
"es:ESHttp*"
],
"Resource": "arn:aws:es:REGION:AWS_ACCOUNT_ID:domain/DOMAIN_NAME/*"
"Resource": "arn:aws:es:us-west-1:987654321098:domain/test-domain/*"
}
]
}
```
For more information,
see [Identity and Access Management in Amazon OpenSearch Service](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html).
#### Identity-based policy examples
##### Service linked role configuration
Here's an example of an identity-based access policy attached to an IAM principal where `es:ESHttp*` actions are allowed:
The GitLab Rails and Sidekiq nodes require permissions to communicate with the search cluster.
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"es:ESHttp*",
],
"Effect": "Allow",
"Resource": "*"
}
]
}
```
Create an IAM role with the following options and attach the role to the GitLab Rails and Sidekiq nodes:
##### Fine-grained access control examples
- Trusted entity type: `AWS Service` for `EC2` service
- Permission policy: `AmazonOpenSearchServiceFullAccess`
##### Connecting with a domain level access policy only
When using a domain level access policy, you must check the box **Use AWS OpenSearch Service with IAM credentials** and
fill in **AWS region** while leaving **AWS Access Key** and **AWS Secret Access Key** blank in the advanced search settings.
NOTE:
Domain level access policy can be used standalone or in addition to fine-grained access control policies.
#### Fine-grained access control configuration
To access and perform operations in the AWS OpenSearch cluster, the user in **Username** must have role(s) assigned that
grant the following privileges. This user makes requests from GitLab to the search cluster.
To access and perform operations in the AWS OpenSearch cluster with fine-grained access control,
your GitLab user must have the following privileges.
For more information,
see [OpenSearch access control permissions](https://opensearch.org/docs/latest/security/access-control/permissions/)
@ -204,19 +232,38 @@ The index pattern `*` requires a few permissions for Advanced search to work.
}
```
##### Connecting with a master user in the internal database
#### Connecting to AWS OpenSearch Service
When using fine-grained access control with a user in the internal database, you should use HTTP basic
authentication to connect to AWS OpenSearch. You can provide the master username and password as part of the
AWS OpenSearch URL or in the **Username** and **Password** text boxes in the advanced search settings. See
[Tutorial: Internal user database and HTTP basic authentication](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac-walkthrough-basic.html)
for details.
Depending on your access requirements, your GitLab user can have:
##### Connecting with an IAM user
- HTTP basic authentication
- Role-based authentication
When using fine-grained access control with IAM credentials, you must check the box **Use AWS OpenSearch Service with
IAM credentials** in the **AWS OpenSearch IAM credentials** section in the advanced search settings.
Provide the **AWS region**, **AWS Access Key**, and **AWS Secret Access Key**.
##### HTTP basic authentication
By default, GitLab attempts to connect to the configured backend directly without authentication.
If you created a user for AWS OpenSearch (for example, with fine-grained access control),
you can enter the username and password in the AWS OpenSearch URL or the advanced search settings.
For more information, see the
[AWS documentation](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/fgac-http-auth.html).
##### Role-based authentication
To use role-based authentication:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Settings > Search**.
1. Expand **Advanced Search**.
1. In the **AWS OpenSearch IAM credentials** section,
select the **Use AWS OpenSearch Service with IAM credentials** checkbox.
1. Select **Save changes**.
For an IAM role, you can use:
- [**The instance profile**](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html):
set **AWS region** only for GitLab to use the IAM role attached to the instance or pod (EKS IRSA).
- **A specific role:** set **AWS region**, **AWS access key ID**, and **AWS Secret access key** for GitLab to use the keys to authenticate directly.
## Upgrade to a new Elasticsearch major version
@ -419,7 +466,7 @@ The following Elasticsearch settings are available:
| `Password` | The password of your Elasticsearch instance. |
| `Number of Elasticsearch shards and replicas per index` | Elasticsearch indices are split into multiple shards for performance reasons. In general, you should use at least five shards. Indices with tens of millions of documents should have more shards ([see the guidance](#guidance-on-choosing-optimal-cluster-configuration)). Changes to this value do not take effect until you re-create the index. For more information about scalability and resilience, see the [Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/scalability.html). Each Elasticsearch shard can have a number of replicas. These replicas are a complete copy of the shard and can provide increased query performance or resilience against hardware failure. Increasing this value increases the total disk space required by the index. You can set the number of shards and replicas for each of the indices. |
| `Limit the amount of namespace and project data to index` | When you enable this setting, you can specify namespaces and projects to index. All other namespaces and projects use database search instead. If you enable this setting but do not specify any namespace or project, [only project records are indexed](#all-project-records-are-indexed). For more information, see [Limit the amount of namespace and project data to index](#limit-the-amount-of-namespace-and-project-data-to-index). |
| `Using AWS OpenSearch Service with IAM credentials` | Sign your OpenSearch requests using [AWS IAM authorization](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html), [AWS EC2 Instance Profile Credentials](https://docs.aws.amazon.com/codedeploy/latest/userguide/getting-started-create-iam-instance-profile.html#getting-started-create-iam-instance-profile-cli), or [AWS ECS Tasks Credentials](https://docs.aws.amazon.com/AmazonECS/latest/userguide/task-iam-roles.html). Refer to [Identity and Access Management in Amazon OpenSearch Service](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html) for details of AWS hosted OpenSearch domain access policy configuration. |
| `Use AWS OpenSearch Service with IAM credentials` | Sign your OpenSearch requests using [AWS IAM authorization](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html), [AWS EC2 Instance Profile Credentials](https://docs.aws.amazon.com/codedeploy/latest/userguide/getting-started-create-iam-instance-profile.html#getting-started-create-iam-instance-profile-cli), or [AWS ECS Tasks Credentials](https://docs.aws.amazon.com/AmazonECS/latest/userguide/task-iam-roles.html). Refer to [Identity and Access Management in Amazon OpenSearch Service](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/ac.html) for details of AWS hosted OpenSearch domain access policy configuration. |
| `AWS Region` | The AWS region in which your OpenSearch Service is located. |
| `AWS Access Key` | The AWS access key. |
| `AWS Secret Access Key` | The AWS secret access key. |

View File

@ -411,7 +411,7 @@ To view project import history:
1. If there are any errors for a particular import, select **Details** to see them.
The history also includes projects created from [built-in](../index.md#create-a-project-from-a-built-in-template)
or [custom](../index.md#create-a-project-from-a-built-in-template)
or [custom](../index.md#create-a-project-from-a-custom-template)
templates. GitLab uses [import repository by URL](repo_by_url.md)
to create a new project from a template.

View File

@ -10,7 +10,8 @@ DETAILS:
**Tier:** Free, Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
You can create a project in many ways in GitLab.
You have different options to create a project. You can create a blank project, create a project
from built-in or custom templates, or create a project with `git push`.
## Create a blank project
@ -19,29 +20,25 @@ To create a blank project:
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Select **Create blank project**.
1. Enter the project details:
- In the **Project name** field, enter the name of your project. See the [limitations on project names](../../user/reserved_names.md).
- In the **Project slug** field, enter the path to your project. The GitLab instance uses the
slug as the URL path to the project. To change the slug, first enter the project name,
then change the slug.
- In the **Project deployment target (optional)** field, select your project's deployment target.
This information helps GitLab better understand its users and their deployment requirements.
- To modify the project's [viewing and access rights](../public_access.md) for
users, change the **Visibility Level**.
- To create README file so that the Git repository is initialized, has a default branch, and
can be cloned, select **Initialize repository with a README**.
- To analyze the source code in the project for known security vulnerabilities,
select **Enable Static Application Security Testing (SAST)**.
1. **Project name**: Enter the name of your project.
See the [limitations on project names](../../user/reserved_names.md#limitations-on-usernames-project-and-group-names-and-slugs).
1. **Project slug**: Enter the path to your project. GitLab uses the slug as the URL path.
1. **Project deployment target (optional)**: If you want to deploy your project to specific environment,
select the relevant deployment target.
1. **Visibility Level**: Select the appropriate visibility level.
See the [viewing and access rights](../public_access.md) for users.
1. **Initialize repository with a README**: Select this option to initialize the Git repository,
create a default branch, and enable cloning of this project's repository.
1. **Enable Static Application Security Testing (SAST)**: Select this option to analyze the
source code for known security vulnerabilities.
1. Select **Create project**.
## Create a project from a built-in template
A built-in project template populates a new project with files to get you started.
Built-in templates are sourced from the following groups:
- [`project-templates`](https://gitlab.com/gitlab-org/project-templates)
- [`pages`](https://gitlab.com/pages)
Anyone can [contribute a built-in template](../../development/project_templates.md).
Built-in templates populate a new project with files to help you get started.
These templates are sourced from the [`project-templates`](https://gitlab.com/gitlab-org/project-templates)
and [`pages`](https://gitlab.com/pages) groups.
Anyone can [contribute to built-in project templates](../../development/project_templates.md).
To create a project from a built-in template:
@ -49,56 +46,26 @@ To create a project from a built-in template:
1. Select **Create from template**.
1. Select the **Built-in** tab.
1. From the list of templates:
- To view a preview of the template, select **Preview**.
- To use a template for the project, select **Use template**.
- To preview a template, select **Preview**.
- To use a template, select **Use template**.
1. Enter the project details:
- In the **Project name** field, enter the name of your project. The name must start with a lowercase or uppercase letter (`a-zA-Z`), digit (`0-9`), emoji, or underscore (`_`). It can also contain dots (`.`), pluses (`+`), dashes (`-`), or spaces.
- In the **Project slug** field, enter the path to your project. The GitLab instance uses the
slug as the URL path to the project. To change the slug, first enter the project name,
then change the slug.
- In the **Project description (optional)** field, enter the description of your project's dashboard. The description is limited to 500 characters.
- To modify the project's [viewing and access rights](../public_access.md) for users,
change the **Visibility Level**.
- **Project name**: Enter the name of your project.
- **Project slug**: Enter the path to your project. GitLab uses the slug as the URL path.
- **Project description (optional)** Enter a description for your project.
The character limit is 500.
- **Visibility Level**: Select the appropriate visibility level.
See the [viewing and access rights](../public_access.md) for users.
1. Select **Create project**.
Users who create projects [from a template](#create-a-project-from-a-built-in-template) or [by importing them](settings/import_export.md#import-a-project-and-its-data) are
displayed as the author of the imported items, which keep the original timestamp from the template or import. For this reason, the creation date of imported items can be
older than the creation date of the user's account. This can make items appear to have been created by a user before they even had an account.
NOTE:
If a user creates a project from a template, or [imports a project](settings/import_export.md#import-a-project-and-its-data),
they are shown as the author of the imported items, which retain the original timestamp from the template or import.
This can make items appear as if they were created before the user's account existed.
Imported objects are labeled as `By <username> on <timestamp>`. Before GitLab 17.1, the label was suffixed with `(imported from GitLab)`.
Imported objects are labeled as `By <username> on <timestamp>`.
Before GitLab 17.1, the label was suffixed with `(imported from GitLab)`.
## Create a project from a custom template
DETAILS:
**Tier:** Premium, Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
Custom project templates are available at:
- The [instance-level](../../administration/custom_project_templates.md)
- The [group-level](../../user/group/custom_project_templates.md)
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Select **Create from template**.
1. Select the **Instance** or **Group** tab.
1. From the list of templates:
- To view a preview of the template, select **Preview**.
- To use a template for the project, select **Use template**.
1. Enter the project details:
- In the **Project name** field, enter the name of your project. The name must start with a lowercase or uppercase letter (`a-zA-Z`), digit (`0-9`), emoji, or underscore (`_`). It can also contain dots (`.`), pluses (`+`), dashes (`-`), or spaces.
- In the **Project slug** field, enter the path to your project. The GitLab instance uses the
slug as the URL path to the project. To change the slug, first enter the project name,
then change the slug.
- The description of your project's dashboard in the **Project description (optional)** field. The description is limited to 500 characters.
- To modify the project's [viewing and access rights](../public_access.md) for users,
change the **Visibility Level**.
1. Select **Create project**.
## Create a project from the HIPAA Audit Protocol template
DETAILS:
**Tier:** Ultimate
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
### Create a project from the HIPAA Audit Protocol template
The HIPAA Audit Protocol template contains issues for audit inquiries in the
HIPAA Audit Protocol published by the U.S Department of Health and Human Services.
@ -109,31 +76,95 @@ To create a project from the HIPAA Audit Protocol template:
1. Select **Create from template**.
1. Select the **Built-in** tab.
1. Locate the **HIPAA Audit Protocol** template:
- To view a preview of the template, select **Preview**.
- To use the template for the project, select **Use template**.
- To preview the template, select **Preview**.
- To use the template, select **Use template**.
1. Enter the project details:
- In the **Project name** field, enter the name of your project. The name must start with a lowercase or uppercase letter (`a-zA-Z`), digit (`0-9`), emoji, or underscore (`_`). It can also contain dots (`.`), pluses (`+`), dashes (`-`), or spaces.
- In the **Project slug** field, enter the path to your project. The GitLab instance uses the
slug as the URL path to the project. To change the slug, first enter the project name,
then change the slug.
- In the **Project description (optional)** field, enter the description of your project's dashboard. The description is limited to 500 characters.
- To modify the project's [viewing and access rights](../public_access.md) for users,
change the **Visibility Level**.
- **Project name**: Enter the name of your project.
- **Project slug**: Enter the path to your project. GitLab uses the slug as the URL path.
- **Project description (optional)** Enter a description for your project.
The character limit is 500.
- **Visibility Level**: Select the appropriate visibility level.
See the [viewing and access rights](../public_access.md) for users.
1. Select **Create project**.
## Create a new project with Git push
## Create a project from a custom template
Use `git push` to push a local project repository to GitLab. After you push a repository,
Custom project templates are available for your [instance](../../administration/custom_project_templates.md)
and [group](../../user/group/custom_project_templates.md).
To create a project from a custom template:
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Select **Create from template**.
1. Select the **Instance** or **Group** tab.
1. From the list of templates:
- To preview the template, select **Preview**.
- To use a template, select **Use template**.
1. Enter the project details:
- **Project name**: Enter the name of your project.
- **Project slug**: Enter the path to your project. GitLab uses the slug as the URL path.
- **Project description (optional)** Enter a description for your project. The character limit is 500.
- **Visibility Level**: Select the appropriate visibility level.
See the [viewing and access rights](../public_access.md) for users.
1. Select **Create project**.
## Create a project that uses SHA-256 hashing
DETAILS:
**Status:** Experiment
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/794) in GitLab 16.9 [with a flag](../../administration/feature_flags.md)
> - named `support_sha256_repositories`. Disabled by default. This feature is an [experiment](../../policy/experiment-beta-support.md#experiment).
FLAG:
The availability of this feature is controlled by a feature flag.
For more information, see the history.
This feature is available for testing, but not ready for production use.
You can select SHA-256 hashing for a project only when you create the project.
Git does not support migrating to SHA-256 later, or migrating back to SHA-1.
To create a project that uses SHA-256 hashing:
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Enter the project details:
- **Project name**: Enter the name of your project.
- **Project slug**: Enter the path to your project. GitLab uses the slug as the URL path.
- **Project description (optional)** Enter a description for your project. The character limit is 500.
- **Visibility Level**: Select the appropriate visibility level.
See the [viewing and access rights](../public_access.md) for users.
1. In the **Project Configuration** area, expand the **Experimental settings**.
1. Select **Use SHA-256 as the repository hashing algorithm**.
1. Select **Create project**.
### Why SHA-256?
By default, Git uses the SHA-1 [hashing algorithm](https://handbook.gitlab.com/handbook/security/cryptographic-standard/#algorithmic-standards)
to generate a 40-character
ID for objects such as commits, blobs, trees, and tags. The SHA-1 algorithm was proven to be insecure when
[Google was able to produce a hash collision](https://security.googleblog.com/2017/02/announcing-first-sha1-collision.html).
The Git project is not yet impacted by these
kinds of attacks because of the way Git stores objects.
In SHA-256 repositories, the algorithm generates a 64-character ID instead of a 40-character ID.
The Git project determined that the SHA-256 feature is safe to use when they
[removed the experimental label](https://github.com/git/git/blob/master/Documentation/RelNotes/2.42.0.txt#L41-L45).
Federal regulations, such as NIST and CISA [guidelines](https://csrc.nist.gov/projects/hash-functions/nist-policy-on-hash-functions),
which [FedRamp](https://www.fedramp.gov/) enforces, have set a due date in 2030 to stop using SHA-1 and
encourage agencies to move away from SHA-1 earlier, if possible.
## Create a project with `git push`
Use `git push` to add a local project repository to GitLab. After you add a repository,
GitLab creates your project in your chosen namespace.
You cannot use `git push` to create projects with project paths that:
You cannot use `git push` to create projects with paths that have been used previously
or [renamed](working_with_projects.md#rename-a-repository).
- Have previously been used.
- Have been [renamed](working_with_projects.md#rename-a-repository).
Previously used project paths have a redirect. The redirect causes push attempts to redirect requests
to the renamed project location, instead of creating a new project. To create a new project for a previously
used or renamed project, use the UI or the [Projects API](../../api/projects.md#create-project).
Previously used project paths have a redirect. Instead of creating a new project, the redirect causes
push attempts to redirect requests to the renamed project location.
To create a new project for a previously used or renamed project, use the UI or the [Projects API](../../api/projects.md#create-project).
Prerequisites:
@ -143,84 +174,52 @@ Prerequisites:
1. On the left sidebar, select **Search or go to** and find your group.
1. In the upper-right corner, confirm that **New project** is visible.
Contact your GitLab administrator if you require permission.
To push your repository and create a project:
If you do not have the necessary permission, contact your GitLab administrator.
1. Push with SSH or HTTPS:
- To push with SSH:
To create a project with `git push`:
```shell
# Use this version if your project uses the standard port 22
$ git push --set-upstream git@gitlab.example.com:namespace/myproject.git main
1. In your local repository, push either:
# Use this version if your project requires a non-standard port number
$ git push --set-upstream ssh://git@gitlab.example.com:00/namespace/myproject.git main
```
- With SSH, by running:
- To push with HTTPS:
```shell
# Use this version if your project uses the standard port 22
$ git push --set-upstream git@gitlab.example.com:namespace/myproject.git main
```shell
git push --set-upstream https://gitlab.example.com/namespace/myproject.git master
```
# Use this version if your project requires a non-standard port number
$ git push --set-upstream ssh://git@gitlab.example.com:00/namespace/myproject.git main
```
- For `gitlab.example.com`, use the domain name of the machine that hosts your Git repository.
- For `namespace`, use the name of your [namespace](../namespace/index.md).
- For `myproject`, use the name of your project.
- If specifying a port, change `00` to your project's required port number.
- Optional. To export existing repository tags, append the `--tags` flag to your `git push` command.
1. Optional. To configure the remote:
- With HTTP, by running:
```shell
git push --set-upstream https://gitlab.example.com/namespace/myproject.git master
```
In the commands above:
- Replace `gitlab.example.com` with the machine domain name hosts your Git repository.
- Replace `namespace` with your [namespace](../namespace/index.md) name.
- Replace `myproject` with your project name.
- If specifying a port, change `00` to your project's required port number.
- Optional. To export existing repository tags, append the `--tags` flag to your `git push` command.
1. Optional. Configure the remote:
```shell
git remote add origin https://gitlab.example.com/namespace/myproject.git
```
When the push completes, GitLab displays the message:
When the push completes, GitLab displays the following message:
```shell
remote: The private project namespace/myproject was created.
```
To view your new project, go to `https://gitlab.example.com/namespace/myproject`.
Your project's visibility is set to **Private** by default. To change project visibility, adjust your
[project's settings](../public_access.md#change-project-visibility).
## Create a project that uses SHA-256 hashing
DETAILS:
**Status:** Experiment
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/794) in GitLab 16.9. This feature is an [experiment](../../policy/experiment-beta-support.md#experiment).
FLAG:
On self-managed GitLab, by default this feature is not available.
To make it available, an administrator can enable the [feature flag](../../administration/feature_flags.md) named `support_sha256_repositories`.
You can select SHA-256 hashing for a project only when you create the project.
To create a project that uses SHA-256 hashing:
1. On the left sidebar, at the top, select **Create new** (**{plus}**) and **New project/repository**.
1. Select **Create blank project** and fill out the project's details as usual. For more information on project details, see [Create a blank project](#create-a-blank-project).
1. In the **Project Configuration** area, click on **Experimental settings** to
expand the accordion.
1. Select **Use SHA-256 as the repository hashing algorithm**.
1. Select **Create project**.
WARNING:
Git does not support migrating to SHA-256 later, or migrating back to SHA-1.
### Why SHA-256?
By default, Git uses the SHA-1 [hashing algorithm](https://handbook.gitlab.com/handbook/security/cryptographic-standard/#algorithmic-standards) to generate a 40-character
ID for objects such as commits, blobs, trees, and tags. The SHA-1 algorithm was proven to be insecure when
[Google was able to produce a hash collision](https://security.googleblog.com/2017/02/announcing-first-sha1-collision.html). The Git project is not yet impacted by these
kinds of attacks because of the way Git stores objects. However, it is only a matter of time until new attacks on SHA-1 are found that impact Git.
In SHA-256 repositories, the algorithm generates a 64-character ID instead of a 40-character ID. The Git project determined that the SHA-256 feature is safe to use when they [removed the experimental label](https://github.com/git/git/blob/master/Documentation/RelNotes/2.42.0.txt#L41-L45).
Federal regulations, such as NIST and CISA [guidelines](https://csrc.nist.gov/projects/hash-functions/nist-policy-on-hash-functions) (which
[FedRamp](https://www.fedramp.gov/) enforces), have set a due date in 2030 to stop using SHA-1 and encourage agencies using SHA-1 to move away from it sooner, if possible.
By default, your project's visibility is set to **Private**,
but you can [change the project's visibility](../public_access.md#change-project-visibility).
## Related topics

View File

@ -144,7 +144,7 @@ To create a configuration secret for the proxy:
```shell
helm repo add gitlab-workspaces-proxy \
https://gitlab.com/api/v4/projects/gitlab-org%2fremote-development%2fgitlab-workspaces-proxy/packages/helm/devel
https://gitlab.com/api/v4/projects/gitlab-org%2fworkspaces%2fgitlab-workspaces-proxy/packages/helm/devel
```
1. Modify the `ingress.className` parameter if you're using a different Ingress class:
@ -154,7 +154,7 @@ To create a configuration secret for the proxy:
helm upgrade --install gitlab-workspaces-proxy \
gitlab-workspaces-proxy/gitlab-workspaces-proxy \
--version 0.1.13 \
--version 0.1.14 \
--namespace=gitlab-workspaces \
--create-namespace \
--set="auth.client_id=${CLIENT_ID}" \

View File

@ -0,0 +1,90 @@
# frozen_string_literal: true
module API
module Concerns
module VirtualRegistries
module Packages
module Maven
module CachedResponseEndpoints
extend ActiveSupport::Concern
included do
include ::API::PaginationParams
helpers do
def cached_responses
upstream.cached_responses.search_by_relative_path(params[:search])
end
def cached_response
upstream.cached_responses.find_by_relative_path!(declared_params[:cached_response_id])
end
end
desc 'List maven virtual registry upstream cached responses' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success Entities::VirtualRegistries::Packages::Maven::CachedResponse
failure [
{ code: 400, message: 'Bad Request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
is_array true
hidden true
end
params do
optional :search, type: String, desc: 'Search query', documentation: { example: 'foo/bar/mypkg' }
use :pagination
end
get do
authorize! :read_virtual_registry, registry
# TODO: refactor this when we support multiple upstreams.
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
not_found! if upstream&.id != params[:upstream_id]
present paginate(cached_responses), with: Entities::VirtualRegistries::Packages::Maven::CachedResponse
end
desc 'Delete a maven virtual registry upstream cached response' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success code: 204
failure [
{ code: 400, message: 'Bad Request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
requires :cached_response_id, type: String, coerce_with: Base64.method(:urlsafe_decode64),
desc: 'The base64 encoded relative path of the cached response',
documentation: { example: 'Zm9vL2Jhci9teXBrZy5wb20=' }
end
delete '*cached_response_id' do
authorize! :destroy_virtual_registry, registry
# TODO: refactor this when we support multiple upstreams.
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
not_found! if upstream&.id != params[:upstream_id]
destroy_conditionally!(cached_response) do |cached_response|
render_validation_error!(cached_response) unless cached_response.update(upstream: nil)
end
end
end
end
end
end
end
end
end

View File

@ -84,6 +84,8 @@ module API
get do
authorize! :read_virtual_registry, registry
# TODO: refactor this when we support multiple upstreams.
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
not_found! if upstream&.id != params[:upstream_id]
present upstream, with: Entities::VirtualRegistries::Packages::Maven::Upstream
@ -137,6 +139,8 @@ module API
delete do
authorize! :destroy_virtual_registry, registry
# TODO: refactor this when we support multiple upstreams.
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
not_found! if upstream&.id != params[:upstream_id]
destroy_conditionally!(upstream)

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module API
module Entities
module VirtualRegistries
module Packages
module Maven
class CachedResponse < Grape::Entity
expose :cached_response_id do |cached_response, _options|
Base64.urlsafe_encode64(cached_response.relative_path)
end
expose :group_id, :upstream_id, :upstream_checked_at, :file, :size, :downloaded_at,
:downloads_count, :relative_path, :upstream_etag, :content_type, :created_at, :updated_at
end
end
end
end
end
end

View File

@ -5,7 +5,6 @@ module API
module Packages
class Maven < ::API::Base
include ::API::Helpers::Authentication
include ::API::Concerns::VirtualRegistries::Packages::Endpoint
feature_category :virtual_registry
urgency :low
@ -52,6 +51,12 @@ module API
route_param :id, type: Integer, desc: 'The ID of the maven virtual registry' do
namespace :upstreams do
include ::API::Concerns::VirtualRegistries::Packages::Maven::UpstreamEndpoints
route_param :upstream_id, type: Integer, desc: 'The ID of the maven virtual registry upstream' do
namespace :cached_responses do
include ::API::Concerns::VirtualRegistries::Packages::Maven::CachedResponseEndpoints
end
end
end
end
end
@ -82,15 +87,19 @@ module API
desc: 'Package path',
documentation: { example: 'foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar' }
end
get ':id/*path', format: false do
service_response = ::VirtualRegistries::Packages::Maven::HandleFileRequestService.new(
registry: registry,
current_user: current_user,
params: { path: declared_params[:path] }
).execute
namespace ':id/*path' do
include ::API::Concerns::VirtualRegistries::Packages::Endpoint
send_error_response_from!(service_response: service_response) if service_response.error?
send_successful_response_from(service_response: service_response)
get format: false do
service_response = ::VirtualRegistries::Packages::Maven::HandleFileRequestService.new(
registry: registry,
current_user: current_user,
params: { path: params[:path] }
).execute
send_error_response_from!(service_response: service_response) if service_response.error?
send_successful_response_from(service_response: service_response)
end
end
end
end

View File

@ -16,8 +16,14 @@ module ContainerRegistry
LEVELS_SUPPORTED = 3
def initialize(path)
attr_reader :project
# The 'project' argument is optional.
# If provided during initialization, it will limit the path to the specified project,
# potentially reducing the need for a database query.
def initialize(path, project: nil)
@path = path.to_s.downcase
@project = project
end
def valid?

View File

@ -39,6 +39,8 @@ module Gitlab
end
def ignore_alternate_directories?
return false if Feature.enabled?(:dont_ignore_alternate_directories, project)
git_env = ::Gitlab::Git::HookEnv.all(repository.gl_repository)
git_env['GIT_OBJECT_DIRECTORY_RELATIVE'].present?

View File

@ -7,6 +7,7 @@ module Gitlab
JWT_AUDIENCE = 'gitlab-kas'
STUB_CLASSES = {
server_info: Gitlab::Agent::ServerInfo::Rpc::ServerInfo::Stub,
agent_tracker: Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub,
configuration_project: Gitlab::Agent::ConfigurationProject::Rpc::ConfigurationProject::Stub,
autoflow: Gitlab::Agent::AutoFlow::Rpc::AutoFlow::Stub,
@ -20,6 +21,18 @@ module Gitlab
raise ConfigurationError, 'KAS internal URL is not configured' unless Gitlab::Kas.internal_url.present?
end
# Return GitLab KAS server info
# This method only returns information about a single KAS server instance without taking into account
# that there are potentially multiple KAS replicas running, which may not have the same server info.
# This is particularly the case during a rollout.
def get_server_info
request = Gitlab::Agent::ServerInfo::Rpc::GetServerInfoRequest.new
stub_for(:server_info)
.get_server_info(request, metadata: metadata)
.current_server_info
end
def get_connected_agents_by_agent_ids(agent_ids:)
request = Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIDsRequest.new(agent_ids: agent_ids)

View File

@ -46,10 +46,8 @@ describe('Deploy freeze table', () => {
describe('Renders correct data', () => {
it('displays empty', () => {
expect(findEmptyFreezePeriods().exists()).toBe(true);
expect(findEmptyFreezePeriods().text()).toBe(
`No deploy freezes exist for this project. To add one, select
Add deploy freeze
above.`,
expect(findEmptyFreezePeriods().text()).toMatchInterpolatedText(
'No deploy freezes exist for this project. To add one, select Add deploy freeze above.',
);
});

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Entities::VirtualRegistries::Packages::Maven::CachedResponse, feature_category: :virtual_registry do
let(:cached_response) { build_stubbed(:virtual_registries_packages_maven_cached_response) }
subject { described_class.new(cached_response).as_json }
it do
is_expected.to include(:cached_response_id, :group_id, :upstream_id, :upstream_checked_at, :created_at, :updated_at,
:file, :size, :downloaded_at, :downloads_count, :relative_path, :upstream_etag, :content_type)
end
end

View File

@ -160,6 +160,17 @@ RSpec.describe ContainerRegistry::Path do
end
end
context 'when initialized with a project' do
subject { described_class.new(path, project: project) }
let(:project) { create(:project) }
let(:path) { 'any_path' }
it 'returns initialized project' do
expect(subject.repository_project).to eq project
end
end
context 'when matching multi-level path' do
let(:project) do
create(:project, group: group, path: 'some_project')

View File

@ -15,6 +15,10 @@ RSpec.describe Gitlab::Checks::FileSizeCheck::HookEnvironmentAwareAnyOversizedBl
let(:changes) { [{ newrev: 'master' }] }
before do
stub_feature_flags(dont_ignore_alternate_directories: false)
end
describe '#find' do
subject { any_quarantined_blobs.find }

View File

@ -45,6 +45,30 @@ RSpec.describe Gitlab::Kas::Client do
allow(token).to receive(:audience=).with(described_class::JWT_AUDIENCE)
end
describe '#get_server_info' do
let(:stub) { instance_double(Gitlab::Agent::ServerInfo::Rpc::ServerInfo::Stub) }
let(:request) { instance_double(Gitlab::Agent::ServerInfo::Rpc::GetServerInfoRequest) }
let(:server_info) { double }
let(:response) { double(Gitlab::Agent::ServerInfo::Rpc::GetServerInfoResponse, current_server_info: server_info) }
subject { described_class.new.get_server_info }
before do
expect(Gitlab::Agent::ServerInfo::Rpc::ServerInfo::Stub).to receive(:new)
.with('example.kas.internal', :this_channel_is_insecure, timeout: described_class::TIMEOUT)
.and_return(stub)
expect(Gitlab::Agent::ServerInfo::Rpc::GetServerInfoRequest).to receive(:new)
.and_return(request)
expect(stub).to receive(:get_server_info)
.with(request, metadata: { 'authorization' => 'bearer test-token' })
.and_return(response)
end
it { is_expected.to eq(server_info) }
end
describe '#get_connected_agents_by_agent_ids' do
let(:stub) { instance_double(Gitlab::Agent::AgentTracker::Rpc::AgentTracker::Stub) }
let(:request) { instance_double(Gitlab::Agent::AgentTracker::Rpc::GetConnectedAgentsByAgentIDsRequest) }

View File

@ -8,7 +8,8 @@ RSpec.describe Integrations::Matrix, feature_category: :integrations do
{
body: be_present,
msgtype: 'm.text',
format: 'org.matrix.custom.html'
format: 'org.matrix.custom.html',
formatted_body: be_present
}
end
end
@ -54,4 +55,44 @@ RSpec.describe Integrations::Matrix, feature_category: :integrations do
end
end
end
describe '#notify' do
let(:message) { instance_double(Integrations::ChatMessage::PushMessage, summary: '_Test message') }
let(:header) { { 'Content-Type' => 'application/json' } }
let(:response) { instance_double(HTTParty::Response, success?: true) }
let(:body) do
{
body: '_Test message',
msgtype: 'm.text',
format: 'org.matrix.custom.html',
formatted_body: Banzai.render_and_post_process('_Test message', context)
}.compact_blank
end
before do
allow(Gitlab::HTTP).to receive(:put).and_return(response)
end
context 'with project-level integration' do
let(:subject) { create(:matrix_integration) }
let(:context) { { project: subject.project } }
it 'sends PUT request with `project` context' do
expect(Gitlab::HTTP).to receive(:put).with(anything, headers: header, body: Gitlab::Json.dump(body))
subject.send(:notify, message, {})
end
end
context 'without project-level integration' do
let(:subject) { create(:matrix_integration, :instance) }
let(:context) { { skip_project_check: true } }
it 'sends PUT request with `skip_project_check` context' do
expect(Gitlab::HTTP).to receive(:put).with(anything, headers: header, body: Gitlab::Json.dump(body))
subject.send(:notify, message, {})
end
end
end
end

View File

@ -99,4 +99,19 @@ RSpec.describe VirtualRegistries::Packages::Maven::CachedResponse, type: :model,
end
end
end
describe '.search_by_relative_path' do
let_it_be(:cached_response) { create(:virtual_registries_packages_maven_cached_response) }
let_it_be(:other_cached_response) do
create(:virtual_registries_packages_maven_cached_response, relative_path: 'other/path')
end
subject { described_class.search_by_relative_path(relative_path) }
context 'with a matching relative path' do
let(:relative_path) { cached_response.relative_path.slice(3, 8) }
it { is_expected.to contain_exactly(cached_response) }
end
end
end

View File

@ -10,6 +10,10 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, feature_category: :virtu
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:registry) { create(:virtual_registries_packages_maven_registry, group: group) }
let_it_be(:upstream) { create(:virtual_registries_packages_maven_upstream, registry: registry) }
let_it_be_with_reload(:cached_response) do
create(:virtual_registries_packages_maven_cached_response, upstream: upstream)
end
let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:user) { create(:user, owner_of: project) }
let_it_be(:job) { create(:ci_build, :running, user: user, project: project) }
@ -967,6 +971,213 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, feature_category: :virtu
end
end
describe 'GET /api/v4/virtual_registries/packages/maven/registries/:id/upstreams/:upstream_id/cached_responses' do
let(:upstream_id) { upstream.id }
let(:url) do
"/virtual_registries/packages/maven/registries/#{registry.id}/upstreams/#{upstream_id}/cached_responses"
end
subject(:api_request) { get api(url), headers: headers }
shared_examples 'successful response' do
it 'returns a successful response' do
api_request
expect(response).to have_gitlab_http_status(:ok)
expect(Gitlab::Json.parse(response.body)).to contain_exactly(
cached_response
.as_json
.merge('cached_response_id' => Base64.urlsafe_encode64(cached_response.relative_path))
.except('id', 'object_storage_key', 'file_store')
)
end
end
it { is_expected.to have_request_urgency(:low) }
it_behaves_like 'disabled feature flag'
it_behaves_like 'disabled dependency proxy'
it_behaves_like 'not authenticated user'
context 'with invalid upstream' do
where(:upstream_id, :status) do
non_existing_record_id | :not_found
'foo' | :bad_request
'' | :bad_request
end
with_them do
it_behaves_like 'returning response status', params[:status]
end
end
context 'with a non-member user' do
let_it_be(:user) { create(:user) }
where(:group_access_level, :status) do
'PUBLIC' | :forbidden
'INTERNAL' | :forbidden
'PRIVATE' | :forbidden
end
with_them do
before do
group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_access_level, false))
end
it_behaves_like 'returning response status', params[:status]
end
end
context 'for authentication' do
where(:token, :sent_as, :status) do
:personal_access_token | :header | :ok
:personal_access_token | :basic_auth | :ok
:deploy_token | :header | :ok
:deploy_token | :basic_auth | :ok
:job_token | :header | :ok
:job_token | :basic_auth | :ok
end
with_them do
let(:headers) do
case sent_as
when :header
token_header(token)
when :basic_auth
token_basic_auth(token)
end
end
it_behaves_like 'returning response status', params[:status]
end
end
context 'for search param' do
let(:url) { "#{super()}?search=#{search}" }
let(:valid_search) { cached_response.relative_path.slice(0, 5) }
where(:search, :status) do
ref(:valid_search) | :ok
'foo' | :empty
'' | :ok
nil | :ok
end
with_them do
if params[:status] == :ok
it_behaves_like 'successful response'
else
it 'returns an empty array' do
api_request
expect(json_response).to eq([])
end
end
end
end
end
describe 'DELETE /api/v4/virtual_registries/packages/maven/registries/:id/upstreams/' \
':upstream_id/cached_responses/:cached_response_id' do
let(:cached_response_id) { Base64.urlsafe_encode64(cached_response.relative_path) }
let(:url) do
"/virtual_registries/packages/maven/registries/#{registry.id}/upstreams/#{upstream.id}/" \
"cached_responses/#{cached_response_id}"
end
subject(:api_request) { delete api(url), headers: headers }
shared_examples 'successful response' do
it 'returns a successful response' do
expect { api_request }.to change { upstream.cached_responses.count }.by(-1)
expect(response).to have_gitlab_http_status(:no_content)
end
end
it { is_expected.to have_request_urgency(:low) }
it_behaves_like 'disabled feature flag'
it_behaves_like 'disabled dependency proxy'
it_behaves_like 'not authenticated user'
context 'for different user roles' do
where(:user_role, :status) do
:owner | :no_content
:maintainer | :no_content
:developer | :forbidden
:reporter | :forbidden
:guest | :forbidden
end
with_them do
before do
group.send(:"add_#{user_role}", user)
end
if params[:status] == :no_content
it_behaves_like 'successful response'
else
it_behaves_like 'returning response status', params[:status]
end
end
end
context 'for authentication' do
before_all do
group.add_maintainer(user)
end
where(:token, :sent_as, :status) do
:personal_access_token | :header | :no_content
:personal_access_token | :basic_auth | :no_content
:deploy_token | :header | :forbidden
:deploy_token | :basic_auth | :forbidden
:job_token | :header | :no_content
:job_token | :basic_auth | :no_content
end
with_them do
let(:headers) do
case sent_as
when :header
token_header(token)
when :basic_auth
token_basic_auth(token)
end
end
if params[:status] == :no_content
it_behaves_like 'successful response'
else
it_behaves_like 'returning response status', params[:status]
end
end
end
context 'when error occurs' do
before_all do
group.add_maintainer(user)
end
before do
allow_next_found_instance_of(cached_response.class) do |instance|
allow(instance).to receive(:save).and_return(false)
errors = ActiveModel::Errors.new(instance).tap { |e| e.add(:cached_response, 'error message') }
allow(instance).to receive(:errors).and_return(errors)
end
end
it 'returns an error' do
api_request
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to eq({ 'message' => { 'cached_response' => ['error message'] } })
end
end
end
describe 'GET /api/v4/virtual_registries/packages/maven/:id/*path' do
let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
let(:url) { "/virtual_registries/packages/maven/#{registry.id}/#{path}" }

View File

@ -112,5 +112,15 @@ RSpec.describe ::Packages::Npm::ProcessPackageFileService, feature_category: :pa
)
end
end
context 'with TarInvalidError' do
before do
allow_next_instance_of(Gem::Package::TarReader::Entry) do |instance|
allow(instance).to receive(:full_name).and_raise(::Gem::Package::TarInvalidError)
end
end
it_behaves_like 'processing the package file'
end
end
end

Binary file not shown.