Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
cb40196f48
commit
dd9b804282
|
|
@ -795,14 +795,6 @@ rspec-ee unit gitlab-duo-chat-qa-fast pg14:
|
|||
- !reference [.base-script, script]
|
||||
- rspec_parallelized_job "--tag fast_chat_qa_evaluation"
|
||||
|
||||
rspec-ee unit gitlab-duo pg14:
|
||||
extends:
|
||||
- .rspec-ee-base-gitlab-duo
|
||||
- .rails:rules:ee-gitlab-duo-chat-always
|
||||
script:
|
||||
- !reference [.base-script, script]
|
||||
- rspec_parallelized_job "--tag gitlab_duo"
|
||||
|
||||
rspec-ee unit gitlab-duo-chat-qa pg14:
|
||||
variables:
|
||||
QA_EVAL_REPORT_FILENAME: "qa_evaluation_report.md"
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ include:
|
|||
# spec/lib, yet background migration tests are also sitting there,
|
||||
# and they should run on their own jobs so we don't need to run them
|
||||
# in unit tests again.
|
||||
- rspec_section rspec_parallelized_job "--fail-fast=${RSPEC_FAIL_FAST_THRESHOLD} --tag ~quarantine --tag ~level:background_migration --tag ~click_house --tag ~real_ai_request"
|
||||
- rspec_section rspec_parallelized_job "--fail-fast=${RSPEC_FAIL_FAST_THRESHOLD} --tag ~quarantine --tag ~level:background_migration --tag ~click_house"
|
||||
after_script:
|
||||
- source scripts/utils.sh
|
||||
- bundle exec gem list gitlab_quality-test_tooling
|
||||
|
|
|
|||
|
|
@ -2315,8 +2315,6 @@
|
|||
.rails:rules:ee-gitlab-duo-chat-base:
|
||||
rules:
|
||||
- !reference [".strict-ee-only-rules", rules]
|
||||
- if: '$REAL_AI_REQUEST == null'
|
||||
when: never
|
||||
- if: '$ANTHROPIC_API_KEY == null'
|
||||
when: never
|
||||
- <<: *if-fork-merge-request
|
||||
|
|
|
|||
|
|
@ -6,25 +6,9 @@ Rails/StrongParams:
|
|||
- 'app/controllers/activity_pub/projects/application_controller.rb'
|
||||
- 'app/controllers/admin/application_settings_controller.rb'
|
||||
- 'app/controllers/admin/applications_controller.rb'
|
||||
- 'app/controllers/admin/background_migrations_controller.rb'
|
||||
- 'app/controllers/admin/batched_jobs_controller.rb'
|
||||
- 'app/controllers/admin/broadcast_messages_controller.rb'
|
||||
- 'app/controllers/admin/deploy_keys_controller.rb'
|
||||
- 'app/controllers/admin/groups_controller.rb'
|
||||
- 'app/controllers/admin/hook_logs_controller.rb'
|
||||
- 'app/controllers/admin/hooks_controller.rb'
|
||||
- 'app/controllers/admin/identities_controller.rb'
|
||||
- 'app/controllers/admin/impersonation_tokens_controller.rb'
|
||||
- 'app/controllers/admin/keys_controller.rb'
|
||||
- 'app/controllers/admin/labels_controller.rb'
|
||||
- 'app/controllers/admin/projects_controller.rb'
|
||||
- 'app/controllers/admin/runner_projects_controller.rb'
|
||||
- 'app/controllers/admin/runners_controller.rb'
|
||||
- 'app/controllers/admin/sessions_controller.rb'
|
||||
- 'app/controllers/admin/slacks_controller.rb'
|
||||
- 'app/controllers/admin/spam_logs_controller.rb'
|
||||
- 'app/controllers/admin/topics/avatars_controller.rb'
|
||||
- 'app/controllers/admin/topics_controller.rb'
|
||||
- 'app/controllers/admin/users_controller.rb'
|
||||
- 'app/controllers/autocomplete_controller.rb'
|
||||
- 'app/controllers/banzai/uploads_controller.rb'
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
@import 'mixins_and_variables_and_functions';
|
||||
|
||||
.dashboard-cards {
|
||||
margin-right: -$gl-padding-8;
|
||||
margin-left: -$gl-padding-8;
|
||||
}
|
||||
|
||||
.dashboard-card {
|
||||
cursor: grab;
|
||||
|
||||
|
|
|
|||
|
|
@ -15,34 +15,34 @@ module Admin
|
|||
'finished' => batched_migration_class.with_status(:finished).queue_order.reverse_order
|
||||
}
|
||||
|
||||
@current_tab = @relations_by_tab.key?(params[:tab]) ? params[:tab] : 'queued'
|
||||
@migrations = @relations_by_tab[@current_tab].page(params[:page])
|
||||
@current_tab = @relations_by_tab.key?(safe_params[:tab]) ? safe_params[:tab] : 'queued'
|
||||
@migrations = @relations_by_tab[@current_tab].page(pagination_params[:page])
|
||||
@successful_rows_counts = batched_migration_class.successful_rows_counts(@migrations.map(&:id))
|
||||
@databases = Gitlab::Database.db_config_names(with_schema: :gitlab_shared)
|
||||
end
|
||||
|
||||
def show
|
||||
@migration = batched_migration_class.find(params[:id])
|
||||
@migration = batched_migration_class.find(safe_params[:id])
|
||||
|
||||
@failed_jobs = @migration.batched_jobs.with_status(:failed).page(params[:page])
|
||||
@failed_jobs = @migration.batched_jobs.with_status(:failed).page(pagination_params[:page])
|
||||
end
|
||||
|
||||
def pause
|
||||
migration = batched_migration_class.find(params[:id])
|
||||
migration = batched_migration_class.find(safe_params[:id])
|
||||
migration.pause!
|
||||
|
||||
redirect_back fallback_location: { action: 'index' }
|
||||
end
|
||||
|
||||
def resume
|
||||
migration = batched_migration_class.find(params[:id])
|
||||
migration = batched_migration_class.find(safe_params[:id])
|
||||
migration.execute!
|
||||
|
||||
redirect_back fallback_location: { action: 'index' }
|
||||
end
|
||||
|
||||
def retry
|
||||
migration = batched_migration_class.find(params[:id])
|
||||
migration = batched_migration_class.find(safe_params[:id])
|
||||
migration.retry_failed_jobs! if migration.failed?
|
||||
|
||||
redirect_back fallback_location: { action: 'index' }
|
||||
|
|
@ -57,7 +57,7 @@ module Admin
|
|||
end
|
||||
|
||||
def base_model
|
||||
@selected_database = params[:database] || Gitlab::Database::MAIN_DATABASE_NAME
|
||||
@selected_database = safe_params[:database] || Gitlab::Database::MAIN_DATABASE_NAME
|
||||
|
||||
Gitlab::Database.database_base_models[@selected_database]
|
||||
end
|
||||
|
|
@ -65,5 +65,9 @@ module Admin
|
|||
def batched_migration_class
|
||||
@batched_migration_class ||= Gitlab::Database::BackgroundMigration::BatchedMigration
|
||||
end
|
||||
|
||||
def safe_params
|
||||
params.permit(:id, :database, :tab)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module Admin
|
|||
around_action :support_multiple_databases
|
||||
|
||||
def show
|
||||
@job = Gitlab::Database::BackgroundMigration::BatchedJob.find(params[:id])
|
||||
@job = Gitlab::Database::BackgroundMigration::BatchedJob.find(safe_params[:id])
|
||||
|
||||
@transition_logs = @job.batched_job_transition_logs
|
||||
end
|
||||
|
|
@ -22,9 +22,13 @@ module Admin
|
|||
end
|
||||
|
||||
def base_model
|
||||
@selected_database = params[:database] || Gitlab::Database::MAIN_DATABASE_NAME
|
||||
@selected_database = safe_params[:database] || Gitlab::Database::MAIN_DATABASE_NAME
|
||||
|
||||
Gitlab::Database.database_base_models[@selected_database]
|
||||
end
|
||||
|
||||
def safe_params
|
||||
params.permit(:id, :database)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -76,11 +76,11 @@ module Admin
|
|||
protected
|
||||
|
||||
def find_broadcast_message
|
||||
@broadcast_message = System::BroadcastMessage.find(params[:id])
|
||||
@broadcast_message = System::BroadcastMessage.find(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def find_broadcast_messages
|
||||
@broadcast_messages = System::BroadcastMessage.order(ends_at: :desc).page(params[:page]) # rubocop: disable CodeReuse/ActiveRecord
|
||||
@broadcast_messages = System::BroadcastMessage.order(ends_at: :desc).page(pagination_params[:page]) # rubocop: disable CodeReuse/ActiveRecord
|
||||
end
|
||||
|
||||
def broadcast_message_params
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class Admin::DeployKeysController < Admin::ApplicationController
|
|||
protected
|
||||
|
||||
def deploy_key
|
||||
@deploy_key ||= deploy_keys.find(params[:id])
|
||||
@deploy_key ||= deploy_keys.find(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def deploy_keys
|
||||
|
|
|
|||
|
|
@ -8,9 +8,9 @@ class Admin::GroupsController < Admin::ApplicationController
|
|||
feature_category :groups_and_projects, [:create, :destroy, :edit, :index, :members_update, :new, :show, :update]
|
||||
|
||||
def index
|
||||
@groups = groups.sort_by_attribute(@sort = params[:sort])
|
||||
@groups = @groups.search(params[:name]) if params[:name].present?
|
||||
@groups = @groups.page(params[:page])
|
||||
@groups = groups.sort_by_attribute(@sort = pagination_params[:sort])
|
||||
@groups = @groups.search(safe_params[:name]) if safe_params[:name].present?
|
||||
@groups = @groups.page(pagination_params[:page])
|
||||
end
|
||||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
|
|
@ -21,10 +21,10 @@ class Admin::GroupsController < Admin::ApplicationController
|
|||
# the Group with statistics).
|
||||
@group = Group.with_statistics.find(group&.id)
|
||||
@members = present_members(
|
||||
group_members.order("access_level DESC").page(params[:members_page]))
|
||||
group_members.order("access_level DESC").page(safe_params[:members_page]))
|
||||
@requesters = present_members(
|
||||
AccessRequestsFinder.new(@group).execute(current_user))
|
||||
@projects = @group.projects.with_statistics.page(params[:projects_page])
|
||||
@projects = @group.projects.with_statistics.page(safe_params[:projects_page])
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
|
|
@ -79,7 +79,7 @@ class Admin::GroupsController < Admin::ApplicationController
|
|||
end
|
||||
|
||||
def group
|
||||
@group ||= Group.find_by_full_path(params[:id])
|
||||
@group ||= Group.find_by_full_path(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def group_members
|
||||
|
|
@ -111,6 +111,10 @@ class Admin::GroupsController < Admin::ApplicationController
|
|||
] }
|
||||
]
|
||||
end
|
||||
|
||||
def safe_params
|
||||
params.permit(:name, :members_page, :projects_page)
|
||||
end
|
||||
end
|
||||
|
||||
Admin::GroupsController.prepend_mod_with('Admin::GroupsController')
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ module Admin
|
|||
private
|
||||
|
||||
def hook
|
||||
@hook ||= SystemHook.find(params[:hook_id])
|
||||
@hook ||= SystemHook.find(params.permit(:hook_id)[:hook_id])
|
||||
end
|
||||
|
||||
def after_retry_redirect_path
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class Admin::HooksController < Admin::ApplicationController
|
|||
before_action :not_found, unless: -> { system_hooks? }
|
||||
|
||||
def test
|
||||
result = TestHooks::SystemService.new(hook, current_user, params[:trigger]).execute
|
||||
result = TestHooks::SystemService.new(hook, current_user, params.permit(:trigger)[:trigger]).execute
|
||||
|
||||
set_hook_execution_notice(result)
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ class Admin::HooksController < Admin::ApplicationController
|
|||
end
|
||||
|
||||
def hook
|
||||
@hook ||= SystemHook.find(params[:id])
|
||||
@hook ||= SystemHook.find(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def hook_param_names
|
||||
|
|
|
|||
|
|
@ -60,12 +60,12 @@ class Admin::IdentitiesController < Admin::ApplicationController
|
|||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def user
|
||||
@user ||= User.find_by!(username: params[:user_id])
|
||||
@user ||= User.find_by!(username: params.permit(:user_id)[:user_id])
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def identity
|
||||
@identity ||= user.identities.find(params[:id])
|
||||
@identity ||= user.identities.find(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def identity_params
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController
|
|||
end
|
||||
|
||||
def revoke
|
||||
@impersonation_token = finder.find(params[:id])
|
||||
@impersonation_token = finder.find(params.permit(:id)[:id])
|
||||
|
||||
if @impersonation_token.revoke!
|
||||
flash[:notice] = format(_("Revoked impersonation token %{token_name}!"), token_name: @impersonation_token.name)
|
||||
|
|
@ -39,7 +39,7 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController
|
|||
|
||||
# rubocop: disable CodeReuse/ActiveRecord
|
||||
def user
|
||||
@user ||= User.find_by!(username: params[:user_id])
|
||||
@user ||= User.find_by!(username: params.permit(:user_id)[:user_id])
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class Admin::LabelsController < Admin::ApplicationController
|
|||
urgency :low
|
||||
|
||||
def index
|
||||
@labels = Label.templates.page(params[:page])
|
||||
@labels = Label.templates.page(pagination_params[:page])
|
||||
end
|
||||
|
||||
def show
|
||||
|
|
@ -61,10 +61,10 @@ class Admin::LabelsController < Admin::ApplicationController
|
|||
private
|
||||
|
||||
def set_label
|
||||
@label = Label.find(params[:id])
|
||||
@label = Label.find(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def label_params
|
||||
params[:label].permit(:title, :description, :color)
|
||||
params[:label].permit(:title, :description, :color) # rubocop:disable Rails/StrongParams -- hash access is safely followed by permit
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ class Admin::RunnerProjectsController < Admin::ApplicationController
|
|||
urgency :low
|
||||
|
||||
def create
|
||||
@runner = Ci::Runner.find(params[:runner_project][:runner_id])
|
||||
@runner = Ci::Runner.find(safe_params[:runner_project][:runner_id])
|
||||
|
||||
if ::Ci::Runners::AssignRunnerService.new(@runner, @project, current_user).execute.success?
|
||||
flash[:success] = s_('Runners|Runner assigned to project.')
|
||||
|
|
@ -18,7 +18,7 @@ class Admin::RunnerProjectsController < Admin::ApplicationController
|
|||
end
|
||||
|
||||
def destroy
|
||||
rp = Ci::RunnerProject.find(params[:id])
|
||||
rp = Ci::RunnerProject.find(safe_params[:id])
|
||||
runner = rp.runner
|
||||
|
||||
::Ci::Runners::UnassignRunnerService.new(rp, current_user).execute
|
||||
|
|
@ -31,8 +31,12 @@ class Admin::RunnerProjectsController < Admin::ApplicationController
|
|||
|
||||
def project
|
||||
@project = Project.find_by_full_path(
|
||||
[params[:namespace_id], '/', params[:project_id]].join('')
|
||||
[safe_params[:namespace_id], '/', safe_params[:project_id]].join('')
|
||||
)
|
||||
@project || render_404
|
||||
end
|
||||
|
||||
def safe_params
|
||||
params.permit(:id, :namespace_id, :project_id, runner_project: [:runner_id])
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ class Admin::SessionsController < ApplicationController
|
|||
end
|
||||
|
||||
def user_params
|
||||
params.fetch(:user, {}).permit(:password, :otp_attempt, :device_response)
|
||||
params.fetch(:user, {}).permit(:password, :otp_attempt, :device_response) # rubocop:disable Rails/StrongParams -- fetch is safely followed by permit
|
||||
end
|
||||
|
||||
def valid_otp_attempt?(user)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ module Admin
|
|||
end
|
||||
|
||||
def installation_service
|
||||
Integrations::SlackInstallation::InstanceService.new(current_user: current_user, params: params)
|
||||
Integrations::SlackInstallation::InstanceService.new(current_user: current_user, params: params.permit(:code))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,14 +7,14 @@ class Admin::SpamLogsController < Admin::ApplicationController
|
|||
def index
|
||||
@spam_logs = SpamLog.preload(user: [:trusted_with_spam_attribute])
|
||||
.order(id: :desc)
|
||||
.page(params[:page]).without_count
|
||||
.page(pagination_params[:page]).without_count
|
||||
end
|
||||
# rubocop: enable CodeReuse/ActiveRecord
|
||||
|
||||
def destroy
|
||||
spam_log = SpamLog.find(params[:id])
|
||||
spam_log = SpamLog.find(safe_params[:id])
|
||||
|
||||
if params[:remove_user]
|
||||
if safe_params[:remove_user]
|
||||
spam_log.remove_user(deleted_by: current_user)
|
||||
redirect_to admin_spam_logs_path,
|
||||
status: :found,
|
||||
|
|
@ -26,7 +26,7 @@ class Admin::SpamLogsController < Admin::ApplicationController
|
|||
end
|
||||
|
||||
def mark_as_ham
|
||||
spam_log = SpamLog.find(params[:id])
|
||||
spam_log = SpamLog.find(safe_params[:id])
|
||||
|
||||
if Spam::HamService.new(spam_log).execute
|
||||
redirect_to admin_spam_logs_path, notice: _('Spam log successfully submitted as ham.')
|
||||
|
|
@ -34,4 +34,10 @@ class Admin::SpamLogsController < Admin::ApplicationController
|
|||
redirect_to admin_spam_logs_path, alert: _('Error with Akismet. Please check the logs for more info.')
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def safe_params
|
||||
params.permit(:id, :remove_user)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ class Admin::Topics::AvatarsController < Admin::ApplicationController
|
|||
feature_category :groups_and_projects
|
||||
|
||||
def destroy
|
||||
@topic = Projects::Topic.find(params[:topic_id])
|
||||
@topic = Projects::Topic.find(params.permit(:topic_id)[:topic_id])
|
||||
|
||||
@topic.remove_avatar!
|
||||
@topic.save
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class Admin::TopicsController < Admin::ApplicationController
|
|||
feature_category :groups_and_projects
|
||||
|
||||
def index
|
||||
@topics = Projects::TopicsFinder.new(params: params.permit(:search)).execute.page(params[:page]).without_count
|
||||
@topics = Projects::TopicsFinder.new(params: params.permit(:search)).execute.page(pagination_params[:page]).without_count
|
||||
end
|
||||
|
||||
def new
|
||||
|
|
@ -60,7 +60,7 @@ class Admin::TopicsController < Admin::ApplicationController
|
|||
private
|
||||
|
||||
def topic
|
||||
@topic ||= Projects::Topic.find(params[:id])
|
||||
@topic ||= Projects::Topic.find(params.permit(:id)[:id])
|
||||
end
|
||||
|
||||
def topic_params
|
||||
|
|
|
|||
|
|
@ -51,6 +51,10 @@ class Groups::ApplicationController < ApplicationController
|
|||
render_403 unless can?(current_user, :admin_group_member, group)
|
||||
end
|
||||
|
||||
def authorize_owner_access!
|
||||
render_403 unless can?(current_user, :owner_access, group)
|
||||
end
|
||||
|
||||
def authorize_billings_page!
|
||||
render_404 unless can?(current_user, :read_billing, group)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ class Groups::GroupMembersController < Groups::ApplicationController
|
|||
end
|
||||
|
||||
# Authorize
|
||||
before_action :authorize_owner_access!, only: :bulk_reassignment_file
|
||||
before_action :authorize_admin_group_member!, except: admin_not_required_endpoints
|
||||
before_action :authorize_read_group_member!, only: :index
|
||||
|
||||
|
|
@ -52,6 +53,22 @@ class Groups::GroupMembersController < Groups::ApplicationController
|
|||
)
|
||||
end
|
||||
|
||||
def bulk_reassignment_file
|
||||
return render_404 unless Feature.enabled?(:importer_user_mapping_reassignment_csv, current_user)
|
||||
|
||||
csv_response = Import::SourceUsers::GenerateCsvService.new(membershipable, current_user: current_user).execute
|
||||
|
||||
if csv_response.success?
|
||||
send_data(
|
||||
csv_response.payload,
|
||||
filename: "bulk_reassignments_for_namespace_#{membershipable.id}_#{Time.current.to_i}.csv",
|
||||
type: 'text/csv; charset=utf-8'
|
||||
)
|
||||
else
|
||||
redirect_back_or_default(options: { alert: csv_response.message })
|
||||
end
|
||||
end
|
||||
|
||||
# MembershipActions concern
|
||||
alias_method :membershipable, :group
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
module Import
|
||||
class SourceUser < ApplicationRecord
|
||||
include Gitlab::SQL::Pattern
|
||||
include EachBatch
|
||||
|
||||
self.table_name = 'import_source_users'
|
||||
|
||||
|
|
@ -27,25 +28,35 @@ module Import
|
|||
scope :awaiting_reassignment, -> { where(status: [0, 1, 2, 3, 4]) }
|
||||
scope :reassigned, -> { where(status: [5, 6]) }
|
||||
|
||||
STATUSES = {
|
||||
pending_reassignment: 0,
|
||||
awaiting_approval: 1,
|
||||
reassignment_in_progress: 2,
|
||||
rejected: 3,
|
||||
failed: 4,
|
||||
completed: 5,
|
||||
keep_as_placeholder: 6
|
||||
}.freeze
|
||||
|
||||
ACCEPTED_STATUSES = %i[reassignment_in_progress completed failed].freeze
|
||||
REASSIGNABLE_STATUSES = %i[pending_reassignment rejected].freeze
|
||||
CANCELABLE_STATUSES = %i[awaiting_approval rejected].freeze
|
||||
|
||||
state_machine :status, initial: :pending_reassignment do
|
||||
state :pending_reassignment, value: 0
|
||||
state :awaiting_approval, value: 1
|
||||
state :reassignment_in_progress, value: 2
|
||||
state :rejected, value: 3
|
||||
state :failed, value: 4
|
||||
state :completed, value: 5
|
||||
state :keep_as_placeholder, value: 6
|
||||
STATUSES.each do |status_name, value|
|
||||
state status_name, value: value
|
||||
end
|
||||
|
||||
event :reassign do
|
||||
transition [:pending_reassignment, :rejected] => :awaiting_approval
|
||||
transition REASSIGNABLE_STATUSES => :awaiting_approval
|
||||
end
|
||||
|
||||
event :cancel_reassignment do
|
||||
transition [:awaiting_approval, :rejected] => :pending_reassignment
|
||||
transition CANCELABLE_STATUSES => :pending_reassignment
|
||||
end
|
||||
|
||||
event :keep_as_placeholder do
|
||||
transition [:pending_reassignment, :rejected] => :keep_as_placeholder
|
||||
transition REASSIGNABLE_STATUSES => :keep_as_placeholder
|
||||
end
|
||||
|
||||
event :accept do
|
||||
|
|
@ -99,15 +110,15 @@ module Import
|
|||
end
|
||||
|
||||
def accepted_status?
|
||||
reassignment_in_progress? || completed? || failed?
|
||||
STATUSES.slice(*ACCEPTED_STATUSES).value?(status)
|
||||
end
|
||||
|
||||
def reassignable_status?
|
||||
pending_reassignment? || rejected?
|
||||
STATUSES.slice(*REASSIGNABLE_STATUSES).value?(status)
|
||||
end
|
||||
|
||||
def cancelable_status?
|
||||
awaiting_approval? || rejected?
|
||||
STATUSES.slice(*CANCELABLE_STATUSES).value?(status)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -77,11 +77,6 @@ class PoolRepository < ApplicationRecord
|
|||
object_pool.create
|
||||
rescue GRPC::AlreadyExists
|
||||
# The object pool already exists. Nothing to do here.
|
||||
rescue GRPC::FailedPrecondition => e
|
||||
# This rescue is temporary until gitaly returns the correct error code for
|
||||
# "repo exists already". Gitaly error messages are not guaranteed to match
|
||||
# and so should not typically be used to determine error type.
|
||||
raise unless e.message.include?('repository exists already')
|
||||
end
|
||||
|
||||
# The members of the pool should have fetched the missing objects to their own
|
||||
|
|
|
|||
|
|
@ -34,6 +34,19 @@ module VirtualRegistries
|
|||
after_validation :reset_credentials, if: -> { persisted? && url_changed? }
|
||||
before_save :write_credentials
|
||||
|
||||
def url_for(path)
|
||||
full_url = File.join(url, path)
|
||||
Addressable::URI.parse(full_url).to_s
|
||||
end
|
||||
|
||||
def headers
|
||||
return {} unless username.present? && password.present?
|
||||
|
||||
authorization = ActionController::HttpAuthentication::Basic.encode_credentials(username, password)
|
||||
|
||||
{ Authorization: authorization }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def read_credentials
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module VirtualRegistries
|
||||
module Packages
|
||||
module Maven
|
||||
class RegistryPolicy < ::BasePolicy
|
||||
delegate { ::VirtualRegistries::Packages::Policies::Group.new(@subject.group) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -50,13 +50,6 @@ module Ci
|
|||
)
|
||||
end
|
||||
|
||||
if Feature.disabled?(:ci_pipelines_data_ingestion_to_click_house, Feature.current_request)
|
||||
return ServiceResponse.error(
|
||||
message: 'Feature ci_pipelines_data_ingestion_to_click_house is disabled',
|
||||
reason: :disabled
|
||||
)
|
||||
end
|
||||
|
||||
# Prevent parallel jobs
|
||||
in_lock("#{self.class.name.underscore}/worker/#{@worker_index}", ttl: MAX_TTL, retries: 0) do
|
||||
::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,71 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Import
|
||||
module SourceUsers
|
||||
# This class generates CSV data for `Import::SourceUser` records associated
|
||||
# with a namespace. This spreadsheet is filled in and re-uploaded to
|
||||
# facilitate the user mapping flow.
|
||||
class GenerateCsvService
|
||||
HEADERS = [
|
||||
'Source host',
|
||||
'Import type',
|
||||
'Source user identifier',
|
||||
'Source user name',
|
||||
'Source username',
|
||||
'GitLab username',
|
||||
'GitLab public email'
|
||||
].freeze
|
||||
|
||||
# @param namespace [Namespace, Group] The namespace where the import source users are associated
|
||||
# @param current_user [User] The user performing the CSV export
|
||||
def initialize(namespace, current_user:)
|
||||
@namespace = namespace
|
||||
@current_user = current_user
|
||||
end
|
||||
|
||||
def execute
|
||||
# We use :owner_access here because it's shared between GroupPolicy and
|
||||
# NamespacePolicy.
|
||||
return error_invalid_permissions unless current_user.can?(:owner_access, namespace)
|
||||
|
||||
ServiceResponse.success(payload: csv_data)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :namespace, :current_user
|
||||
|
||||
def csv_data
|
||||
CSV.generate do |csv|
|
||||
csv << HEADERS
|
||||
|
||||
import_source_users.each_batch(of: 1000) do |batch|
|
||||
batch.each do |source_user|
|
||||
csv << [
|
||||
source_user.source_hostname,
|
||||
source_user.import_type,
|
||||
source_user.source_user_identifier,
|
||||
source_user.source_name,
|
||||
source_user.source_username,
|
||||
'',
|
||||
''
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def import_source_users
|
||||
statuses = Import::SourceUser::STATUSES.slice(*Import::SourceUser::REASSIGNABLE_STATUSES).values
|
||||
namespace.import_source_users.by_statuses(statuses)
|
||||
end
|
||||
|
||||
def error_invalid_permissions
|
||||
ServiceResponse.error(
|
||||
message: s_('Import|You do not have permission to view import source users for this namespace'),
|
||||
reason: :forbidden
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module VirtualRegistries
|
||||
module Packages
|
||||
module Maven
|
||||
class HandleFileRequestService < ::BaseContainerService
|
||||
alias_method :registry, :container
|
||||
|
||||
TIMEOUT = 5
|
||||
|
||||
def initialize(registry:, current_user: nil, params: {})
|
||||
super(container: registry, current_user: current_user, params: params)
|
||||
end
|
||||
|
||||
def execute
|
||||
return ServiceResponse.error(message: 'Path not present', reason: :path_not_present) unless path.present?
|
||||
return ServiceResponse.error(message: 'Unauthorized', reason: :unauthorized) unless allowed?
|
||||
|
||||
unless registry.upstream.present?
|
||||
return ServiceResponse.error(message: 'No upstreams set', reason: :no_upstreams)
|
||||
end
|
||||
|
||||
# TODO check cached responses here
|
||||
# If one exists and can be used, return it.
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/467983
|
||||
handle_upstream(registry.upstream)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def handle_upstream(upstream)
|
||||
url = upstream.url_for(path)
|
||||
headers = upstream.headers
|
||||
response = head_upstream(url: url, headers: headers)
|
||||
|
||||
if response.success?
|
||||
workhorse_send_url_response(url: url, headers: headers)
|
||||
else
|
||||
ServiceResponse.error(message: 'File not found on any upstream', reason: :file_not_found_on_upstreams)
|
||||
end
|
||||
rescue *::Gitlab::HTTP::HTTP_ERRORS
|
||||
ServiceResponse.error(message: 'Upstream not available', reason: :upstream_not_available)
|
||||
end
|
||||
|
||||
def head_upstream(url:, headers:)
|
||||
::Gitlab::HTTP.head(url, headers: headers, follow_redirects: true, timeout: TIMEOUT)
|
||||
end
|
||||
|
||||
def allowed?
|
||||
can?(current_user, :read_virtual_registry, registry)
|
||||
end
|
||||
|
||||
def path
|
||||
params[:path]
|
||||
end
|
||||
|
||||
def workhorse_send_url_response(url:, headers:)
|
||||
ServiceResponse.success(
|
||||
payload: { action: :workhorse_send_url, action_params: { url: url, headers: headers } }
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: ci_pipelines_data_ingestion_to_click_house
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/470079
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/158362
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/470495
|
||||
milestone: '17.2'
|
||||
group: group::runner
|
||||
type: wip
|
||||
default_enabled: false
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: virtual_registry_maven
|
||||
feature_issue_url: https://gitlab.com/groups/gitlab-org/-/epics/14137
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/160891
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/474863
|
||||
milestone: '17.3'
|
||||
group: group::package registry
|
||||
type: wip
|
||||
default_enabled: false
|
||||
|
|
@ -113,7 +113,11 @@ constraints(::Constraints::GroupUrlConstrainer.new) do
|
|||
|
||||
resources :group_members, only: [:index, :update, :destroy], concerns: :access_requestable do
|
||||
post :resend_invite, on: :member
|
||||
delete :leave, on: :collection
|
||||
|
||||
collection do
|
||||
get :bulk_reassignment_file
|
||||
delete :leave
|
||||
end
|
||||
end
|
||||
|
||||
resources :group_links, only: [:update, :destroy], constraints: { id: /\d+|:id/ }
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
---
|
||||
table_name: feature_gates
|
||||
classes:
|
||||
- Feature::BypassLoadBalancer::FlipperGate
|
||||
- Feature::FlipperGate
|
||||
- Flipper::Adapters::ActiveRecord::Gate
|
||||
feature_categories:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
---
|
||||
table_name: features
|
||||
classes:
|
||||
- Feature::BypassLoadBalancer::FlipperFeature
|
||||
- Feature::FlipperFeature
|
||||
- Flipper::Adapters::ActiveRecord::Feature
|
||||
feature_categories:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,64 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddDevWidgetToTasks < Gitlab::Database::Migration[2.2]
|
||||
class WorkItemType < MigrationRecord
|
||||
self.table_name = 'work_item_types'
|
||||
end
|
||||
|
||||
class WidgetDefinition < MigrationRecord
|
||||
self.table_name = 'work_item_widget_definitions'
|
||||
end
|
||||
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
disable_ddl_transaction!
|
||||
milestone '17.3'
|
||||
|
||||
TASK_ENUM_VALUE = 4
|
||||
WIDGET_NAME = 'Development'
|
||||
WIDGET_ENUM_VALUE = 23
|
||||
|
||||
class MigrationWorkItemType < MigrationRecord
|
||||
self.table_name = 'work_item_types'
|
||||
end
|
||||
|
||||
class MigrationWidgetDefinition < MigrationRecord
|
||||
self.table_name = 'work_item_widget_definitions'
|
||||
end
|
||||
|
||||
def up
|
||||
task_work_item_type = MigrationWorkItemType.find_by(base_type: TASK_ENUM_VALUE)
|
||||
|
||||
# Task type should exist in production applications, checking here to avoid failures
|
||||
# if inconsistent data is present.
|
||||
return say('Task work item type does not exist, skipping widget creation') unless task_work_item_type
|
||||
|
||||
widgets = [
|
||||
{
|
||||
work_item_type_id: task_work_item_type.id,
|
||||
name: WIDGET_NAME,
|
||||
widget_type: WIDGET_ENUM_VALUE
|
||||
}
|
||||
]
|
||||
|
||||
MigrationWidgetDefinition.upsert_all(
|
||||
widgets,
|
||||
unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
task_work_item_type = MigrationWorkItemType.find_by(base_type: TASK_ENUM_VALUE)
|
||||
|
||||
return say('Task work item type does not exist, skipping widget removal') unless task_work_item_type
|
||||
|
||||
widget_definition = MigrationWidgetDefinition.find_by(
|
||||
work_item_type_id: task_work_item_type.id,
|
||||
widget_type: WIDGET_ENUM_VALUE,
|
||||
name: WIDGET_NAME
|
||||
)
|
||||
|
||||
return say('Widget definition not found, skipping widget removal') unless widget_definition
|
||||
|
||||
widget_definition.destroy
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddNotNullConstraintToPCiBuilds < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.3'
|
||||
|
||||
CONSTRAINT_NAME = 'check_9aa9432137'
|
||||
|
||||
def up
|
||||
Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition|
|
||||
add_not_null_constraint partition.identifier, :project_id, constraint_name: CONSTRAINT_NAME, validate: false
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition|
|
||||
remove_not_null_constraint partition.identifier, :project_id, constraint_name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PreparePCiBuildsProjectIdNotNullValidation < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.3'
|
||||
|
||||
CONSTRAINT_NAME = 'check_9aa9432137'
|
||||
|
||||
def up
|
||||
Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition|
|
||||
prepare_async_check_constraint_validation partition.identifier, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition|
|
||||
unprepare_async_check_constraint_validation partition.identifier, name: CONSTRAINT_NAME
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
b4cec70cbcf124395b511ce3957dde3ee34b33856c47ca4ced569ffebf70e502
|
||||
|
|
@ -0,0 +1 @@
|
|||
07efb85a3ec3b76b6ba2d849f0daf9dac6ae4dbc3aac95b09530f782bff56179
|
||||
|
|
@ -0,0 +1 @@
|
|||
df7c930e70c324848db73c5f006d3edb6e8a353aa3f7b80144a08a46ae041ff8
|
||||
|
|
@ -22913,6 +22913,9 @@ ALTER TABLE ci_runners
|
|||
ALTER TABLE ci_runners
|
||||
ADD CONSTRAINT check_91230910ec CHECK ((char_length((name)::text) <= 256)) NOT VALID;
|
||||
|
||||
ALTER TABLE ci_builds
|
||||
ADD CONSTRAINT check_9aa9432137 CHECK ((project_id IS NOT NULL)) NOT VALID;
|
||||
|
||||
ALTER TABLE sprints
|
||||
ADD CONSTRAINT check_ccd8a1eae0 CHECK ((start_date IS NOT NULL)) NOT VALID;
|
||||
|
||||
|
|
|
|||
|
|
@ -14,14 +14,70 @@ DETAILS:
|
|||
You can install GitLab on several [cloud providers](cloud_providers.md),
|
||||
or use one of the following methods.
|
||||
|
||||
| Installation method | Description | When to choose |
|
||||
|----------------------------------------------------------------|-------------|----------------|
|
||||
| [Linux package](https://docs.gitlab.com/omnibus/installation/) (previously known as Omnibus GitLab) | The official `deb` and `rpm` packages. The Linux package has GitLab and dependent components, including PostgreSQL, Redis, and Sidekiq. | Use if you want the most mature, scalable method. This version is also used on GitLab.com. <br>- For additional flexibility and resilience, see the [reference architecture documentation](../administration/reference_architectures/index.md).<br>- Review the [system requirements](requirements.md).<br>- View the [list of supported Linux operating systems](../administration/package_information/supported_os.md#supported-operating-systems). |
|
||||
| [Helm chart](https://docs.gitlab.com/charts/) | A chart for installing a cloud-native version of GitLab and its components on Kubernetes. | Use if your infrastructure is on Kubernetes and you're familiar with how it works. Management, observability, and some concepts are different than traditional deployments.<br/>- Administration and troubleshooting requires Kubernetes knowledge.<br/>- It can be more expensive for smaller installations. The default installation requires more resources than a single node Linux package deployment, because most services are deployed in a redundant fashion.<br/><br/> |
|
||||
| [GitLab Operator](https://docs.gitlab.com/operator/) | An installation and management method that follows the [Kubernetes Operator pattern](https://kubernetes.io/docs/concepts/extend-kubernetes/operator/) for installing a cloud-native version of GitLab and its components in Kubernetes. | Use if your infrastructure is on Kubernetes or [OpenShift](openshift_and_gitlab/index.md) and you're familiar with how Operators work. Provides additional functionality beyond the Helm chart installation method, including automation of the [GitLab upgrade steps](https://docs.gitlab.com/operator/gitlab_upgrades.html).<br/>- The considerations for the Helm chart also apply here.<br/>- Consider the Helm chart instead if you are limited by the [GitLab Operator's known issues](https://docs.gitlab.com/operator#known-issues). |
|
||||
| [Docker](docker.md) | The GitLab packages in a Docker container. | Use if you're familiar with Docker. |
|
||||
| [Source](installation.md) | GitLab and its components from scratch. | Use if none of the previous methods are available for your platform. Can use for unsupported systems like \*BSD.|
|
||||
| [GitLab Environment Toolkit (GET)](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#documentation) | A set of opinionated Terraform and Ansible scripts. | Use to deploy a [reference architecture](../administration/reference_architectures/index.md) on selected major cloud providers. Has some [limitations](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#missing-features-to-be-aware-of) and manual setup for production environments. |
|
||||
## Linux package
|
||||
|
||||
The Linux package includes the official `deb` and `rpm` packages. The package has GitLab and dependent components, including PostgreSQL, Redis, and Sidekiq.
|
||||
|
||||
Use if you want the most mature, scalable method. This version is also used on GitLab.com.
|
||||
|
||||
For more information, see:
|
||||
|
||||
- [Linux package](https://docs.gitlab.com/omnibus/installation/)
|
||||
- [Reference architectures](../administration/reference_architectures/index.md)
|
||||
- [System requirements](requirements.md)
|
||||
- [Supported Linux operating systems](../administration/package_information/supported_os.md#supported-operating-systems)
|
||||
|
||||
## Helm chart
|
||||
|
||||
Use a chart to install a cloud-native version of GitLab and its components on Kubernetes.
|
||||
|
||||
Use if your infrastructure is on Kubernetes and you're familiar with how it works.
|
||||
|
||||
Before you use this installation method, consider that:
|
||||
|
||||
- Management, observability, and some other concepts are different than traditional deployments.
|
||||
- Administration and troubleshooting requires Kubernetes knowledge.
|
||||
- It can be more expensive for smaller installations.
|
||||
- The default installation requires more resources than a single node Linux package deployment, because most services are deployed in a redundant fashion.
|
||||
|
||||
For more information, see [Helm charts](https://docs.gitlab.com/charts/).
|
||||
|
||||
## GitLab Operator
|
||||
|
||||
To install a cloud-native version of GitLab and its components in Kubernetes, use GitLab Operator.
|
||||
This installation and management method follows the [Kubernetes Operator pattern](https://kubernetes.io/docs/concepts/extend-kubernetes/operator/).
|
||||
|
||||
Use if your infrastructure is on Kubernetes or [OpenShift](openshift_and_gitlab/index.md), and you're familiar with how Operators work.
|
||||
|
||||
This installation method provides additional functionality beyond the Helm chart installation method, including automation of the [GitLab upgrade steps](https://docs.gitlab.com/operator/gitlab_upgrades.html). The considerations for the Helm chart also apply here.
|
||||
|
||||
Consider the Helm chart installation method if you are limited by [GitLab Operator known issues](https://docs.gitlab.com/operator#known-issues).
|
||||
|
||||
For more information, see [GitLab Operator](https://docs.gitlab.com/operator/).
|
||||
|
||||
## Docker
|
||||
|
||||
Installs the GitLab packages in a Docker container.
|
||||
|
||||
Use if you're familiar with Docker.
|
||||
|
||||
For more information, see [Docker](docker.md).
|
||||
|
||||
## Source
|
||||
|
||||
Installs GitLab and its components from scratch.
|
||||
|
||||
Use if none of the previous methods are available for your platform. Can use for unsupported systems like \*BSD.
|
||||
|
||||
For more information, see [Source](installation.md).
|
||||
|
||||
## GitLab Environment Toolkit (GET)
|
||||
|
||||
[GitLab Environment Toolkit (GET)](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#documentation) is a set of opinionated Terraform and Ansible scripts.
|
||||
|
||||
Use to deploy a [reference architecture](../administration/reference_architectures/index.md) on selected major cloud providers.
|
||||
|
||||
This installation methods has some [limitations](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#missing-features-to-be-aware-of), and requires manual setup for production environments.
|
||||
|
||||
## Unsupported Linux distributions and Unix-like operating systems
|
||||
|
||||
|
|
@ -32,10 +88,11 @@ or use one of the following methods.
|
|||
- macOS
|
||||
|
||||
Installation of GitLab on these operating systems is possible, but not supported.
|
||||
See the [installation guides](https://about.gitlab.com/install/) for more information.
|
||||
|
||||
See [OS versions that are no longer supported](../administration/package_information/supported_os.md#os-versions-that-are-no-longer-supported)
|
||||
for a list of supported and unsupported OS versions for Linux package installations as well as the last support GitLab version for that OS.
|
||||
For more information, see:
|
||||
|
||||
- [Installation guides](https://about.gitlab.com/install/)
|
||||
- [Supported and unsupported OS versions for Linux package installations](../administration/package_information/supported_os.md#os-versions-that-are-no-longer-supported)
|
||||
|
||||
## Microsoft Windows
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,10 @@ DETAILS:
|
|||
**Tier:** Free, Premium, Ultimate
|
||||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
NOTE:
|
||||
This feature is not under active development. For more information, see [issue 468607](https://gitlab.com/gitlab-org/gitlab/-/issues/468607#note_1967939452).
|
||||
To determine if the feature meets your needs, see the [open bug issues](https://gitlab.com/gitlab-org/gitlab/-/issues/?sort=updated_desc&state=opened&label_name%5B%5D=Category%3AIncident%20Management&label_name%5B%5D=type%3A%3Abug&first_page_size=20).
|
||||
|
||||
Incident Management enables developers to easily triage and view the alerts and incidents
|
||||
generated by their application. By surfacing alerts and incidents where the code is
|
||||
being developed, efficiency and awareness can be increased. Check out the following sections for more information:
|
||||
|
|
|
|||
|
|
@ -93,13 +93,13 @@ To enable secret push protection in a project:
|
|||
|
||||
## Coverage
|
||||
|
||||
Secret push protection checks the content of each commit diff when it is pushed to GitLab.
|
||||
Secret push protection checks the content of each commit when it is pushed to GitLab.
|
||||
However, the following exclusions apply.
|
||||
|
||||
Secret push protection does not check a file in a commit when:
|
||||
|
||||
- The file is a binary file.
|
||||
- The diff patch for the file is larger than 1 MiB.
|
||||
- The file is larger than 1 MiB.
|
||||
- The file was renamed, deleted, or moved without changes to the content.
|
||||
- The content of the file is identical to the content of another file in the source code.
|
||||
- The file is contained in the initial push that created the repository.
|
||||
|
|
@ -186,3 +186,20 @@ To skip secret push protection when using any Git client:
|
|||
For example, you are using the GitLab Web IDE and have several commits that are blocked from being
|
||||
pushed because one of them contains a secret. To skip secret push protection, edit the latest
|
||||
commit message and add `[skip secret push protection]`, then push the commits.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
When working with secret push protection, you may encounter the following situations.
|
||||
|
||||
### Push blocked unexpectedly
|
||||
|
||||
Secret Push Protection scans all contents of modified files. This can cause a push to be
|
||||
unexpectedly blocked if a modified file contains a secret, even if the secret is not part of the diff.
|
||||
|
||||
To push a change to a file that contains a secret, you need to [skip secret push protection](#skip-secret-push-protection).
|
||||
|
||||
[Issue 469161](https://gitlab.com/gitlab-org/gitlab/-/issues/469161) proposes to change the scanning logic to scan only diffs.
|
||||
|
||||
### File was not scanned
|
||||
|
||||
Some files are excluded from scanning. For a list of exclusions, see [coverage](#coverage).
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@ In the GitLab UI, GitLab Duo Chat knows about these areas:
|
|||
|---------------|------------------------------------------------------------------------------------------------------------------|
|
||||
| Epics | From the epic, ask about `this epic`, `this`, or the URL. From any UI area, ask about the URL. |
|
||||
| Issues | From the issue, ask about `this issue`, `this`, or the URL. From any UI area, ask about the URL. |
|
||||
| Merge request | From the merge request, ask about `this merge request`, `this`, or the URL. From any UI area, ask about the URL. |
|
||||
| Code files | From the single file, ask about `this code` or `this file`. From any UI area, ask about the URL. |
|
||||
|
||||
In the IDEs, GitLab Duo Chat knows about these areas:
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ In the Roadmap view, you can sort epics by:
|
|||
- Last updated date
|
||||
|
||||
Each option contains a button that toggles the sort order between **ascending**
|
||||
and **descending**. The sort option and order persist when browsing Epics, including
|
||||
and **descending**. The sort option and order persist when browsing epics, including
|
||||
the [epics list view](../epics/index.md).
|
||||
|
||||
In the Roadmap view, you can also filter by the epics':
|
||||
|
|
@ -87,6 +87,18 @@ In the Roadmap view, you can also filter by the epics':
|
|||
|
||||
You can also [visualize roadmaps inside of an epic](../epics/index.md#roadmap-in-epics).
|
||||
|
||||
### Improve the performance of the roadmap
|
||||
|
||||
If your group contains a lot of epics, using filters can reduce the time your roadmap takes to load.
|
||||
Filtering the roadmap reduces the amount of data the roadmap contains. Reducing
|
||||
the data in the roadmap can also make it easier for you to find the information you're looking for.
|
||||
|
||||
In particular, filtering based on labels can result in a significant performance improvement.
|
||||
|
||||
After you set the filters you want to apply, you can save the URL as bookmark in your
|
||||
browser.
|
||||
In the future, you can quickly load the filtered roadmap using the bookmark.
|
||||
|
||||
### Roadmap settings
|
||||
|
||||
> - Labels visible on roadmaps [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/385231) in GitLab 15.9.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# Gitlab::SecretDetection
|
||||
|
||||
The gitlab-secret_detection gem performs keyword and regex matching on git diffs that may include secrets. The gem accepts one or more git diffs, matches them against a defined ruleset of regular expressions, and returns scan results.
|
||||
The gitlab-secret_detection gem performs keyword and regex matching on git blobs that may include secrets. The gem accepts one or more git blobs, matches them against a defined ruleset of regular expressions, and returns scan results.
|
||||
|
||||
##### Scan parameters
|
||||
|
||||
|
|
@ -10,10 +10,10 @@ accepts the following parameters:
|
|||
|
||||
| Parameter | Type | Required | Default | Description |
|
||||
|----------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `diffs` | Array | Yes | NA | Array of diffs. Each diff has attributes: left_blob_id, right_blob_id, patch, status, binary, and over_patch_bytes_limit. |
|
||||
| `blobs` | Array | Yes | NA | Array of blobs with each blob to have `id` and `data` properties. `id` represents the uniqueness of the blob in the given array and `data` is the content of the blob to scan. |
|
||||
| `timeout` | Number | No | [`60s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L22) | The maximum duration allowed for the scan to run on a commit request comprising multiple blobs. If the specified timeout elapses, the scan is automatically terminated. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. |
|
||||
| `diff_timeout` | Number | No | [`5s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L24) | The maximum duration allowed for the scan to run on an individual diff. Upon expiration of the specified timeout, the scan is interrupted for the current diff and advances to the next diff in the request. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. |
|
||||
| `subprocess` | Boolean | No | [`true`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L34) | Runs the scan operation within a subprocess rather than the main process. This design aims to mitigate memory overconsumption issues that may arise from scanning multiple large diffs within a single subprocess. Check [here](https://docs.gitlab.com/ee/architecture/blueprints/secret_detection/decisions/002_run_scan_within_subprocess.html) for more details. |
|
||||
| `blob_timeout` | Number | No | [`5s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L24) | The maximum duration allowed for the scan to run on an individual blob. Upon expiration of the specified timeout, the scan is interrupted for the current blob and advances to the next blob in the request. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. |
|
||||
| `subprocess` | Boolean | No | [`true`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L34) | Runs the scan operation within a subprocess rather than the main process. This design aims to mitigate memory overconsumption issues that may arise from scanning multiple large blobs within a single subprocess. Check [here](https://docs.gitlab.com/ee/architecture/blueprints/secret_detection/decisions/002_run_scan_within_subprocess.html) for more details. |
|
||||
|
||||
##### Scan Constraints
|
||||
|
||||
|
|
|
|||
|
|
@ -20,14 +20,14 @@ module Gitlab
|
|||
|
||||
# default time limit(in seconds) for running the scan operation per invocation
|
||||
DEFAULT_SCAN_TIMEOUT_SECS = 60
|
||||
# default time limit(in seconds) for running the scan operation on a single diff
|
||||
DEFAULT_DIFF_TIMEOUT_SECS = 5
|
||||
# default time limit(in seconds) for running the scan operation on a single blob
|
||||
DEFAULT_BLOB_TIMEOUT_SECS = 5
|
||||
# file path where the secrets ruleset file is located
|
||||
RULESET_FILE_PATH = File.expand_path('../../gitleaks.toml', __dir__)
|
||||
# Max no of child processes to spawn per request
|
||||
# ref: https://gitlab.com/gitlab-org/gitlab/-/issues/430160
|
||||
MAX_PROCS_PER_REQUEST = 5
|
||||
# Minimum cumulative size of the diffs required to spawn and
|
||||
# Minimum cumulative size of the blobs required to spawn and
|
||||
# run the scan within a new subprocess.
|
||||
MIN_CHUNK_SIZE_PER_PROC_BYTES = 2_097_152 # 2MiB
|
||||
# Whether to run scan in subprocesses or not. Default is true.
|
||||
|
|
@ -46,24 +46,23 @@ module Gitlab
|
|||
@pattern_matcher = build_pattern_matcher(rules)
|
||||
end
|
||||
|
||||
# Runs Secret Detection scan on the list of given diffs. Both the total scan duration and
|
||||
# the duration for each diff is time bound via +timeout+ and +diff_timeout+ respectively.
|
||||
# Runs Secret Detection scan on the list of given blobs. Both the total scan duration and
|
||||
# the duration for each blob is time bound via +timeout+ and +blob_timeout+ respectively.
|
||||
#
|
||||
# +diffs+:: Array of diffs between diff pairs. Each diff has attributes: left_blob_id, right_blob_id,
|
||||
# patch, status, binary, and over_patch_bytes_limit.
|
||||
# +blobs+:: Array of blobs with each blob to have `id` and `data` properties.
|
||||
# +timeout+:: No of seconds(accepts floating point for smaller time values) to limit the total scan duration
|
||||
# +diff_timeout+:: No of seconds(accepts floating point for smaller time values) to limit
|
||||
# the scan duration on each diff
|
||||
# +blob_timeout+:: No of seconds(accepts floating point for smaller time values) to limit
|
||||
# the scan duration on each blob
|
||||
# +subprocess+:: If passed true, the scan is performed within subprocess instead of main process.
|
||||
# To avoid over-consuming memory by running scan on multiple large diffs within a single subprocess,
|
||||
# it instead groups the diffs into smaller array where each array contains diffs with cumulative size of
|
||||
# To avoid over-consuming memory by running scan on multiple large blobs within a single subprocess,
|
||||
# it instead groups the blobs into smaller array where each array contains blobs with cumulative size of
|
||||
# +MIN_CHUNK_SIZE_PER_PROC_BYTES+ bytes and each group runs in a separate sub-process. Default value
|
||||
# is true.
|
||||
#
|
||||
# NOTE:
|
||||
# Running the scan in fork mode primarily focuses on reducing the memory consumption of the scan by
|
||||
# offloading regex operations on large diffs to sub-processes. However, it does not assure the improvement
|
||||
# in the overall latency of the scan, specifically in the case of smaller diff sizes, where the overhead of
|
||||
# offloading regex operations on large blobs to sub-processes. However, it does not assure the improvement
|
||||
# in the overall latency of the scan, specifically in the case of smaller blob sizes, where the overhead of
|
||||
# forking a new process adds to the overall latency of the scan instead. More reference on Subprocess-based
|
||||
# execution is found here: https://gitlab.com/gitlab-org/gitlab/-/issues/430160.
|
||||
#
|
||||
|
|
@ -74,25 +73,23 @@ module Gitlab
|
|||
# }
|
||||
#
|
||||
def secrets_scan(
|
||||
diffs,
|
||||
blobs,
|
||||
timeout: DEFAULT_SCAN_TIMEOUT_SECS,
|
||||
diff_timeout: DEFAULT_DIFF_TIMEOUT_SECS,
|
||||
blob_timeout: DEFAULT_BLOB_TIMEOUT_SECS,
|
||||
subprocess: RUN_IN_SUBPROCESS
|
||||
)
|
||||
|
||||
return SecretDetection::Response.new(SecretDetection::Status::INPUT_ERROR) unless validate_scan_input(diffs)
|
||||
return SecretDetection::Response.new(SecretDetection::Status::INPUT_ERROR) unless validate_scan_input(blobs)
|
||||
|
||||
Timeout.timeout(timeout) do
|
||||
matched_diffs = filter_by_keywords(diffs)
|
||||
matched_blobs = filter_by_keywords(blobs)
|
||||
|
||||
next SecretDetection::Response.new(SecretDetection::Status::NOT_FOUND) if matched_diffs.empty?
|
||||
next SecretDetection::Response.new(SecretDetection::Status::NOT_FOUND) if matched_blobs.empty?
|
||||
|
||||
secrets =
|
||||
if subprocess
|
||||
run_scan_within_subprocess(matched_diffs, diff_timeout)
|
||||
else
|
||||
run_scan(matched_diffs, diff_timeout)
|
||||
end
|
||||
secrets = if subprocess
|
||||
run_scan_within_subprocess(matched_blobs, blob_timeout)
|
||||
else
|
||||
run_scan(matched_blobs, blob_timeout)
|
||||
end
|
||||
|
||||
scan_status = overall_scan_status(secrets)
|
||||
|
||||
|
|
@ -146,137 +143,104 @@ module Gitlab
|
|||
secrets_keywords.flatten.compact.to_set
|
||||
end
|
||||
|
||||
# returns only those diffs that contain at least one of the keywords
|
||||
# returns only those blobs that contain at least one of the keywords
|
||||
# from the keywords list
|
||||
def filter_by_keywords(diffs)
|
||||
matched_diffs = []
|
||||
def filter_by_keywords(blobs)
|
||||
matched_blobs = []
|
||||
|
||||
diffs.each do |diff|
|
||||
matched_diffs << diff if keywords.any? { |keyword| diff.patch.include?(keyword) }
|
||||
blobs.each do |blob|
|
||||
matched_blobs << blob if keywords.any? { |keyword| blob.data.include?(keyword) }
|
||||
end
|
||||
|
||||
matched_diffs.freeze
|
||||
matched_blobs.freeze
|
||||
end
|
||||
|
||||
def run_scan(diffs, diff_timeout)
|
||||
found_secrets = diffs.flat_map do |diff|
|
||||
Timeout.timeout(diff_timeout) do
|
||||
find_secrets(diff)
|
||||
def run_scan(blobs, blob_timeout)
|
||||
found_secrets = blobs.flat_map do |blob|
|
||||
Timeout.timeout(blob_timeout) do
|
||||
find_secrets(blob)
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
logger.error "Secret Detection scan timed out on the diff(id:#{diff.right_blob_id}): #{e}"
|
||||
SecretDetection::Finding.new(diff.right_blob_id,
|
||||
SecretDetection::Status::DIFF_TIMEOUT)
|
||||
logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}"
|
||||
SecretDetection::Finding.new(blob.id,
|
||||
SecretDetection::Status::BLOB_TIMEOUT)
|
||||
end
|
||||
|
||||
found_secrets.freeze
|
||||
end
|
||||
|
||||
def run_scan_within_subprocess(diffs, diff_timeout)
|
||||
diff_sizes = diffs.map { |diff| diff.patch.length }
|
||||
grouped_diff_indicies = group_by_chunk_size(diff_sizes)
|
||||
def run_scan_within_subprocess(blobs, blob_timeout)
|
||||
blob_sizes = blobs.map(&:size)
|
||||
grouped_blob_indicies = group_by_chunk_size(blob_sizes)
|
||||
|
||||
grouped_diffs = grouped_diff_indicies.map { |idx_arr| idx_arr.map { |i| diffs[i] } }
|
||||
grouped_blobs = grouped_blob_indicies.map { |idx_arr| idx_arr.map { |i| blobs[i] } }
|
||||
|
||||
found_secrets = Parallel.flat_map(
|
||||
grouped_diffs,
|
||||
grouped_blobs,
|
||||
in_processes: MAX_PROCS_PER_REQUEST,
|
||||
isolation: true # do not reuse sub-processes
|
||||
) do |grouped_diff|
|
||||
grouped_diff.flat_map do |diff|
|
||||
Timeout.timeout(diff_timeout) do
|
||||
find_secrets(diff)
|
||||
) do |grouped_blob|
|
||||
grouped_blob.flat_map do |blob|
|
||||
Timeout.timeout(blob_timeout) do
|
||||
find_secrets(blob)
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
logger.error "Secret Detection scan timed out on the diff(id:#{diff.right_blob_id}): #{e}"
|
||||
SecretDetection::Finding.new(diff.right_blob_id,
|
||||
SecretDetection::Status::DIFF_TIMEOUT)
|
||||
logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}"
|
||||
SecretDetection::Finding.new(blob.id,
|
||||
SecretDetection::Status::BLOB_TIMEOUT)
|
||||
end
|
||||
end
|
||||
|
||||
found_secrets.freeze
|
||||
end
|
||||
|
||||
# finds secrets in the given diff with a timeout circuit breaker
|
||||
def find_secrets(diff)
|
||||
line_number_offset = 0
|
||||
# finds secrets in the given blob with a timeout circuit breaker
|
||||
def find_secrets(blob)
|
||||
secrets = []
|
||||
|
||||
lines = diff.patch.split("\n")
|
||||
blob.data.each_line.with_index do |line, index|
|
||||
patterns = pattern_matcher.match(line, exception: false)
|
||||
|
||||
# The following section parses the diff patch.
|
||||
#
|
||||
# If the line starts with @@, it is the hunk header, used to calculate the line number.
|
||||
# If the line starts with +, it is newly added in this diff, and we
|
||||
# scan the line for newly added secrets. Also increment line number.
|
||||
# If the line starts with -, it is removed in this diff, do not increment line number.
|
||||
# If the line starts with \\, it is the no newline marker, do not increment line number.
|
||||
# If the line starts with a space character, it is a context line, just increment the line number.
|
||||
#
|
||||
# A context line that starts with an important character would still be treated
|
||||
# like a context line, as shown below:
|
||||
# @@ -1,5 +1,5 @@
|
||||
# context line
|
||||
# -removed line
|
||||
# +added line
|
||||
# @@this context line has a @@ but starts with a space so isnt a header
|
||||
# +this context line has a + but starts with a space so isnt an addition
|
||||
# -this context line has a - but starts with a space so isnt a removal
|
||||
lines.each do |line|
|
||||
# Parse hunk header for start line
|
||||
if line.start_with?("@@")
|
||||
hunk_info = line.match(/@@ -\d+(,\d+)? \+(\d+)(,\d+)? @@/)
|
||||
start_line = hunk_info[2].to_i
|
||||
line_number_offset = start_line - 1
|
||||
# Line added in this commit
|
||||
elsif line.start_with?('+')
|
||||
line_number_offset += 1
|
||||
# Remove leading +
|
||||
line_content = line[1..]
|
||||
next unless patterns.any?
|
||||
|
||||
patterns = pattern_matcher.match(line_content, exception: false)
|
||||
next unless patterns.any?
|
||||
line_number = index + 1
|
||||
patterns.each do |pattern|
|
||||
type = rules[pattern]["id"]
|
||||
description = rules[pattern]["description"]
|
||||
|
||||
patterns.each do |pattern|
|
||||
type = rules[pattern]["id"]
|
||||
description = rules[pattern]["description"]
|
||||
|
||||
secrets << SecretDetection::Finding.new(
|
||||
diff.right_blob_id,
|
||||
SecretDetection::Status::FOUND,
|
||||
line_number_offset,
|
||||
type,
|
||||
description
|
||||
)
|
||||
end
|
||||
# Line not added in this commit, just increment line number
|
||||
elsif line.start_with?(' ')
|
||||
line_number_offset += 1
|
||||
# Line removed in this commit or no newline marker, do not increment line number
|
||||
elsif line.start_with?('-', '\\')
|
||||
# No increment
|
||||
secrets << SecretDetection::Finding.new(
|
||||
blob.id,
|
||||
SecretDetection::Status::FOUND,
|
||||
line_number,
|
||||
type,
|
||||
description
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
secrets
|
||||
rescue StandardError => e
|
||||
logger.error "Secret Detection scan failed on the diff(id:#{diff.right_blob_id}): #{e}"
|
||||
logger.error "Secret Detection scan failed on the blob(id:#{blob.id}): #{e}"
|
||||
|
||||
SecretDetection::Finding.new(diff.right_blob_id, SecretDetection::Status::SCAN_ERROR)
|
||||
SecretDetection::Finding.new(blob.id, SecretDetection::Status::SCAN_ERROR)
|
||||
end
|
||||
|
||||
def validate_scan_input(diffs)
|
||||
return false if diffs.nil? || !diffs.instance_of?(Array)
|
||||
def validate_scan_input(blobs)
|
||||
return false if blobs.nil? || !blobs.instance_of?(Array)
|
||||
|
||||
diffs.each { |diff| diff.patch.freeze }
|
||||
blobs.all? do |blob|
|
||||
next false unless blob.respond_to?(:id) || blob.respond_to?(:data)
|
||||
|
||||
blob.data.freeze # freeze blobs to avoid additional object allocations on strings
|
||||
end
|
||||
end
|
||||
|
||||
def overall_scan_status(found_secrets)
|
||||
return SecretDetection::Status::NOT_FOUND if found_secrets.empty?
|
||||
|
||||
timed_out_diffs = found_secrets.count { |el| el.status == SecretDetection::Status::DIFF_TIMEOUT }
|
||||
timed_out_blobs = found_secrets.count { |el| el.status == SecretDetection::Status::BLOB_TIMEOUT }
|
||||
|
||||
case timed_out_diffs
|
||||
case timed_out_blobs
|
||||
when 0
|
||||
SecretDetection::Status::FOUND
|
||||
when found_secrets.length
|
||||
|
|
@ -286,15 +250,15 @@ module Gitlab
|
|||
end
|
||||
end
|
||||
|
||||
# This method accepts an array of diff sizes(in bytes) and groups them into an array
|
||||
# This method accepts an array of blob sizes(in bytes) and groups them into an array
|
||||
# of arrays structure where each element is the group of indicies of the input
|
||||
# array whose cumulative diff sizes has at least +MIN_CHUNK_SIZE_PER_PROC_BYTES+
|
||||
def group_by_chunk_size(diff_size_arr)
|
||||
# array whose cumulative blob sizes has at least +MIN_CHUNK_SIZE_PER_PROC_BYTES+
|
||||
def group_by_chunk_size(blob_size_arr)
|
||||
cumulative_size = 0
|
||||
chunk_indexes = []
|
||||
chunk_idx_start = 0
|
||||
|
||||
diff_size_arr.each_with_index do |size, index|
|
||||
blob_size_arr.each_with_index do |size, index|
|
||||
cumulative_size += size
|
||||
next unless cumulative_size >= MIN_CHUNK_SIZE_PER_PROC_BYTES
|
||||
|
||||
|
|
@ -304,11 +268,11 @@ module Gitlab
|
|||
cumulative_size = 0
|
||||
end
|
||||
|
||||
if cumulative_size.positive? && (chunk_idx_start < diff_size_arr.length)
|
||||
chunk_indexes << if chunk_idx_start == diff_size_arr.length - 1
|
||||
if cumulative_size.positive? && (chunk_idx_start < blob_size_arr.length)
|
||||
chunk_indexes << if chunk_idx_start == blob_size_arr.length - 1
|
||||
[chunk_idx_start]
|
||||
else
|
||||
(chunk_idx_start..diff_size_arr.length - 1).to_a
|
||||
(chunk_idx_start..blob_size_arr.length - 1).to_a
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module Gitlab
|
|||
FOUND = 1 # When scan operation completes with one or more findings
|
||||
FOUND_WITH_ERRORS = 2 # When scan operation completes with one or more findings along with some errors
|
||||
SCAN_TIMEOUT = 3 # When the scan operation runs beyond given time out
|
||||
DIFF_TIMEOUT = 4 # When the scan operation on a diff runs beyond given time out
|
||||
BLOB_TIMEOUT = 4 # When the scan operation on a blob runs beyond given time out
|
||||
SCAN_ERROR = 5 # When the scan operation fails due to regex error
|
||||
INPUT_ERROR = 6 # When the scan operation fails due to invalid input
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,13 +5,10 @@ require 'spec_helper'
|
|||
RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detection do
|
||||
subject(:scan) { described_class.new }
|
||||
|
||||
let(:diff_blob) do
|
||||
Struct.new(:left_blob_id, :right_blob_id, :patch, :status, :binary, :over_patch_bytes_limit, keyword_init: true)
|
||||
def new_blob(id:, data:)
|
||||
Struct.new(:id, :data).new(id, data)
|
||||
end
|
||||
|
||||
let(:sha1_blank_sha) { ('0' * 40).freeze }
|
||||
let(:sample_blob_id) { 'fe29d93da4843da433e62711ace82db601eb4f8f' }
|
||||
|
||||
let(:ruleset) do
|
||||
{
|
||||
"title" => "gitleaks config",
|
||||
|
|
@ -67,108 +64,47 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio
|
|||
allow(scan).to receive(:parse_ruleset).and_return(ruleset)
|
||||
end
|
||||
|
||||
context 'when the diff does not contain a secret' do
|
||||
let(:diffs) do
|
||||
context 'when the blob does not contain a secret' do
|
||||
let(:blobs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+BASE_URL=https://foo.bar\n\\ No newline at end of file\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
new_blob(id: 1234, data: "no secrets")
|
||||
]
|
||||
end
|
||||
|
||||
it "does not match" do
|
||||
expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::NOT_FOUND)
|
||||
|
||||
expect(scan.secrets_scan(diffs)).to eq(expected_response)
|
||||
expect(scan.secrets_scan(blobs)).to eq(expected_response)
|
||||
end
|
||||
|
||||
it "attempts to keyword match returning no diffs for further scan" do
|
||||
it "attempts to keyword match returning no blobs for further scan" do
|
||||
expect(scan).to receive(:filter_by_keywords)
|
||||
.with(diffs)
|
||||
.with(blobs)
|
||||
.and_return([])
|
||||
|
||||
scan.secrets_scan(diffs)
|
||||
scan.secrets_scan(blobs)
|
||||
end
|
||||
|
||||
it "does not attempt to regex match" do
|
||||
expect(scan).not_to receive(:match_rules_bulk)
|
||||
|
||||
scan.secrets_scan(diffs)
|
||||
scan.secrets_scan(blobs)
|
||||
end
|
||||
end
|
||||
|
||||
context "when multiple diffs contains secrets" do
|
||||
let(:diffs) do
|
||||
context "when multiple blobs contains secrets" do
|
||||
let(:blobs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+glpat-12312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,3 @@\n+\n+\n+glptt-1231231231231231231212312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+GR134894112312312312312312312\n+glft-12312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+glptt-1231231231231231231212312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+glpat-12312312312312312312\n+GR134894112312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
new_blob(id: 111, data: "glpat-12312312312312312312"), # gitleaks:allow
|
||||
new_blob(id: 222, data: "\n\nglptt-1231231231231231231212312312312312312312"), # gitleaks:allow
|
||||
new_blob(id: 333, data: "data with no secret"),
|
||||
new_blob(id: 444,
|
||||
data: "GR134894112312312312312312312\nglft-12312312312312312312"), # gitleaks:allow
|
||||
new_blob(id: 555, data: "data with no secret"),
|
||||
new_blob(id: 666, data: "data with no secret"),
|
||||
new_blob(id: 777, data: "\nglptt-1231231231231231231212312312312312312312"), # gitleaks:allow
|
||||
new_blob(id: 888,
|
||||
data: "glpat-12312312312312312312;GR134894112312312312312312312") # gitleaks:allow
|
||||
]
|
||||
end
|
||||
|
||||
|
|
@ -177,51 +113,51 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio
|
|||
Gitlab::SecretDetection::Status::FOUND,
|
||||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[0].right_blob_id,
|
||||
blobs[0].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][0]['id'],
|
||||
ruleset['rules'][0]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[1].right_blob_id,
|
||||
blobs[1].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
3,
|
||||
ruleset['rules'][1]['id'],
|
||||
ruleset['rules'][1]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[3].right_blob_id,
|
||||
blobs[3].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][2]['id'],
|
||||
ruleset['rules'][2]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[3].right_blob_id,
|
||||
blobs[3].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
2,
|
||||
ruleset['rules'][3]['id'],
|
||||
ruleset['rules'][3]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[6].right_blob_id,
|
||||
blobs[6].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
2,
|
||||
ruleset['rules'][1]['id'],
|
||||
ruleset['rules'][1]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[7].right_blob_id,
|
||||
blobs[7].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][0]['id'],
|
||||
ruleset['rules'][0]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[7].right_blob_id,
|
||||
blobs[7].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
2,
|
||||
1,
|
||||
ruleset['rules'][2]['id'],
|
||||
ruleset['rules'][2]['description']
|
||||
)
|
||||
|
|
@ -229,93 +165,90 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio
|
|||
)
|
||||
end
|
||||
|
||||
it "attempts to keyword match returning only filtered diffs for further scan" do
|
||||
expected = diffs.reject { |d| d.patch.include?("data with no secret") }
|
||||
it "attempts to keyword match returning only filtered blobs for further scan" do
|
||||
expected = blobs.filter { |b| b.data != "data with no secret" }
|
||||
|
||||
expect(scan).to receive(:filter_by_keywords)
|
||||
.with(diffs)
|
||||
.with(blobs)
|
||||
.and_return(expected)
|
||||
|
||||
scan.secrets_scan(diffs)
|
||||
scan.secrets_scan(blobs)
|
||||
end
|
||||
|
||||
it "matches multiple rules when running in main process" do
|
||||
expect(scan.secrets_scan(diffs, subprocess: false)).to eq(expected_response)
|
||||
expect(scan.secrets_scan(blobs, subprocess: false)).to eq(expected_response)
|
||||
end
|
||||
|
||||
context "in subprocess" do
|
||||
let(:dummy_lines) do
|
||||
10_000
|
||||
end
|
||||
|
||||
let(:large_blobs) do
|
||||
dummy_data = "\nrandom data" * dummy_lines
|
||||
[
|
||||
new_blob(id: 111, data: "glpat-12312312312312312312#{dummy_data}"), # gitleaks:allow
|
||||
new_blob(id: 222, data: "\n\nglptt-1231231231231231231212312312312312312312#{dummy_data}"), # gitleaks:allow
|
||||
new_blob(id: 333, data: "data with no secret#{dummy_data}"),
|
||||
new_blob(id: 444,
|
||||
data: "GR134894112312312312312312312\nglft-12312312312312312312#{dummy_data}"), # gitleaks:allow
|
||||
new_blob(id: 555, data: "data with no secret#{dummy_data}"),
|
||||
new_blob(id: 666, data: "data with no secret#{dummy_data}"),
|
||||
new_blob(id: 777, data: "#{dummy_data}\nglptt-1231231231231231231212312312312312312312") # gitleaks:allow
|
||||
]
|
||||
end
|
||||
|
||||
it "matches multiple rules" do
|
||||
expect(scan.secrets_scan(blobs, subprocess: true)).to eq(expected_response)
|
||||
end
|
||||
|
||||
it "allocates less memory than when running in main process" do
|
||||
forked_stats = Benchmark::Malloc.new.run { scan.secrets_scan(large_blobs, subprocess: true) }
|
||||
non_forked_stats = Benchmark::Malloc.new.run { scan.secrets_scan(large_blobs, subprocess: false) }
|
||||
|
||||
max_processes = Gitlab::SecretDetection::Scan::MAX_PROCS_PER_REQUEST
|
||||
|
||||
forked_memory = forked_stats.allocated.total_memory
|
||||
non_forked_memory = non_forked_stats.allocated.total_memory
|
||||
forked_obj_allocs = forked_stats.allocated.total_objects
|
||||
non_forked_obj_allocs = non_forked_stats.allocated.total_objects
|
||||
|
||||
expect(non_forked_memory).to be >= forked_memory * max_processes
|
||||
expect(non_forked_obj_allocs).to be >= forked_obj_allocs * max_processes
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "when configured with time out" do
|
||||
let(:each_diff_timeout_secs) { 0.000_001 } # 1 micro-sec to intentionally timeout large diff
|
||||
let(:each_blob_timeout_secs) { 0.000_001 } # 1 micro-sec to intentionally timeout large blob
|
||||
|
||||
let(:large_data) do
|
||||
("\n+large data with a secret glpat-12312312312312312312" * 10_000_000).freeze # gitleaks:allow
|
||||
("large data with a secret glpat-12312312312312312312\n" * 10_000_000).freeze # gitleaks:allow
|
||||
end
|
||||
|
||||
let(:diffs) do
|
||||
let(:blobs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+GR134894112312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
new_blob(id: 111, data: "GR134894112312312312312312312"), # gitleaks:allow
|
||||
new_blob(id: 333, data: "data with no secret"),
|
||||
new_blob(id: 333, data: large_data)
|
||||
]
|
||||
end
|
||||
|
||||
let(:all_large_diffs) do
|
||||
let(:all_large_blobs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
new_blob(id: 111, data: large_data),
|
||||
new_blob(id: 222, data: large_data),
|
||||
new_blob(id: 333, data: large_data)
|
||||
]
|
||||
end
|
||||
|
||||
it "whole secret detection scan operation times out" do
|
||||
scan_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large diff
|
||||
scan_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large blob
|
||||
|
||||
expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::SCAN_TIMEOUT)
|
||||
|
||||
begin
|
||||
response = scan.secrets_scan(diffs, timeout: scan_timeout_secs)
|
||||
response = scan.secrets_scan(blobs, timeout: scan_timeout_secs)
|
||||
expect(response).to eq(expected_response)
|
||||
rescue ArgumentError
|
||||
# When RSpec's main process terminates and attempts to clean up child processes upon completion, it terminates
|
||||
|
|
@ -331,50 +264,50 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio
|
|||
end
|
||||
end
|
||||
|
||||
it "one of the diffs times out while others continue to get scanned" do
|
||||
it "one of the blobs times out while others continue to get scanned" do
|
||||
expected_response = Gitlab::SecretDetection::Response.new(
|
||||
Gitlab::SecretDetection::Status::FOUND_WITH_ERRORS,
|
||||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[0].right_blob_id,
|
||||
blobs[0].id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][2]['id'],
|
||||
ruleset['rules'][2]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[2].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::DIFF_TIMEOUT
|
||||
blobs[2].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
expect(scan.secrets_scan(diffs, diff_timeout: each_diff_timeout_secs)).to eq(expected_response)
|
||||
expect(scan.secrets_scan(blobs, blob_timeout: each_blob_timeout_secs)).to eq(expected_response)
|
||||
end
|
||||
|
||||
it "all the diffs time out" do
|
||||
# scan status changes to SCAN_TIMEOUT when *all* the diffs time out
|
||||
it "all the blobs time out" do
|
||||
# scan status changes to SCAN_TIMEOUT when *all* the blobs time out
|
||||
expected_scan_status = Gitlab::SecretDetection::Status::SCAN_TIMEOUT
|
||||
|
||||
expected_response = Gitlab::SecretDetection::Response.new(
|
||||
expected_scan_status,
|
||||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_diffs[0].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::DIFF_TIMEOUT
|
||||
all_large_blobs[0].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_diffs[1].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::DIFF_TIMEOUT
|
||||
all_large_blobs[1].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_diffs[2].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::DIFF_TIMEOUT
|
||||
all_large_blobs[2].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
expect(scan.secrets_scan(all_large_diffs, diff_timeout: each_diff_timeout_secs)).to eq(expected_response)
|
||||
expect(scan.secrets_scan(all_large_blobs, blob_timeout: each_blob_timeout_secs)).to eq(expected_response)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -353,6 +353,7 @@ module API
|
|||
mount ::API::Users
|
||||
mount ::API::UserCounts
|
||||
mount ::API::UserRunners
|
||||
mount ::API::VirtualRegistries::Packages::Maven
|
||||
mount ::API::Wikis
|
||||
|
||||
add_open_api_documentation!
|
||||
|
|
|
|||
|
|
@ -0,0 +1,94 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module API
|
||||
module Concerns
|
||||
module VirtualRegistries
|
||||
module Packages
|
||||
module Endpoint
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
NO_BROWSER_EXECUTION_RESPONSE_HEADERS = { 'Content-Security-Policy' => "default-src 'none'" }.freeze
|
||||
MAJOR_BROWSERS = %i[webkit firefox ie edge opera chrome].freeze
|
||||
WEB_BROWSER_ERROR_MESSAGE = 'This endpoint is not meant to be accessed by a web browser.'
|
||||
|
||||
TIMEOUTS = {
|
||||
open: 10,
|
||||
read: 10
|
||||
}.freeze
|
||||
|
||||
RESPONSE_STATUSES = {
|
||||
error: :bad_gateway,
|
||||
timeout: :gateway_timeout
|
||||
}.freeze
|
||||
|
||||
included do
|
||||
include ::API::Helpers::Authentication
|
||||
|
||||
feature_category :virtual_registry
|
||||
urgency :low
|
||||
|
||||
helpers do
|
||||
def require_non_web_browser!
|
||||
browser = ::Browser.new(request.user_agent)
|
||||
bad_request!(WEB_BROWSER_ERROR_MESSAGE) if MAJOR_BROWSERS.any? { |b| browser.method(:"#{b}?").call }
|
||||
end
|
||||
|
||||
def require_dependency_proxy_enabled!
|
||||
not_found! unless ::Gitlab.config.dependency_proxy.enabled
|
||||
end
|
||||
|
||||
def send_successful_response_from(service_response:)
|
||||
action, action_params = service_response.to_h.values_at(:action, :action_params)
|
||||
case action
|
||||
when :workhorse_send_url
|
||||
workhorse_send_url(url: action_params[:url], headers: action_params[:headers])
|
||||
end
|
||||
end
|
||||
|
||||
def send_error_response_from!(service_response:)
|
||||
case service_response.reason
|
||||
when :unauthorized
|
||||
unauthorized!
|
||||
when :file_not_found_on_upstreams
|
||||
not_found!(service_response.message)
|
||||
else
|
||||
bad_request!(service_response.message)
|
||||
end
|
||||
end
|
||||
|
||||
def workhorse_send_url(url:, headers: {})
|
||||
send_workhorse_headers(
|
||||
Gitlab::Workhorse.send_url(
|
||||
url,
|
||||
headers: headers,
|
||||
allow_redirects: true,
|
||||
timeouts: TIMEOUTS,
|
||||
response_statuses: RESPONSE_STATUSES,
|
||||
response_headers: NO_BROWSER_EXECUTION_RESPONSE_HEADERS
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
def send_workhorse_headers(headers)
|
||||
header(*headers)
|
||||
env['api.format'] = :binary
|
||||
content_type 'application/octet-stream'
|
||||
status :ok
|
||||
body ''
|
||||
end
|
||||
end
|
||||
|
||||
after_validation do
|
||||
not_found! unless Feature.enabled?(:virtual_registry_maven, current_user)
|
||||
|
||||
require_non_web_browser!
|
||||
require_dependency_proxy_enabled!
|
||||
|
||||
authenticate!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module API
|
||||
module VirtualRegistries
|
||||
module Packages
|
||||
class Maven < ::API::Base
|
||||
include ::API::Concerns::VirtualRegistries::Packages::Endpoint
|
||||
|
||||
authenticate_with do |accept|
|
||||
accept.token_types(:personal_access_token).sent_through(:http_private_token_header)
|
||||
accept.token_types(:deploy_token).sent_through(:http_deploy_token_header)
|
||||
accept.token_types(:job_token).sent_through(:http_job_token_header)
|
||||
|
||||
accept.token_types(
|
||||
:personal_access_token_with_username,
|
||||
:deploy_token_with_username,
|
||||
:job_token_with_username
|
||||
).sent_through(:http_basic_auth)
|
||||
end
|
||||
|
||||
helpers do
|
||||
include ::Gitlab::Utils::StrongMemoize
|
||||
|
||||
def registry
|
||||
::VirtualRegistries::Packages::Maven::Registry.find(declared_params[:id])
|
||||
end
|
||||
strong_memoize_attr :registry
|
||||
end
|
||||
|
||||
desc 'Download endpoint of the Maven virtual registry.' do
|
||||
detail 'This feature was introduced in GitLab 17.3. \
|
||||
This feature is currently in experiment state. \
|
||||
This feature behind the `virtual_registry_maven` feature flag.'
|
||||
success [
|
||||
{ code: 200 }
|
||||
]
|
||||
failure [
|
||||
{ code: 400, message: 'Bad request' },
|
||||
{ code: 401, message: 'Unauthorized' },
|
||||
{ code: 403, message: 'Forbidden' },
|
||||
{ code: 404, message: 'Not Found' }
|
||||
]
|
||||
tags %w[maven_virtual_registries]
|
||||
hidden true
|
||||
end
|
||||
params do
|
||||
requires :id,
|
||||
type: Integer,
|
||||
desc: 'The ID of the Maven virtual registry'
|
||||
requires :path,
|
||||
type: String,
|
||||
file_path: true,
|
||||
desc: 'Package path',
|
||||
documentation: { example: 'foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar' }
|
||||
end
|
||||
get 'virtual_registries/packages/maven/:id/*path', format: false do
|
||||
service_response = ::VirtualRegistries::Packages::Maven::HandleFileRequestService.new(
|
||||
registry: registry,
|
||||
current_user: current_user,
|
||||
params: { path: declared_params[:path] }
|
||||
).execute
|
||||
|
||||
send_error_response_from!(service_response: service_response) if service_response.error?
|
||||
send_successful_response_from(service_response: service_response)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -4,43 +4,24 @@ require 'flipper/adapters/active_record'
|
|||
require 'flipper/adapters/active_support_cache_store'
|
||||
|
||||
module Feature
|
||||
module BypassLoadBalancer
|
||||
FLAG = 'FEATURE_FLAGS_BYPASS_LOAD_BALANCER'
|
||||
class FlipperRecord < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord -- This class perfectly replaces
|
||||
# Flipper::Adapters::ActiveRecord::Model, which inherits ActiveRecord::Base
|
||||
include DatabaseReflection
|
||||
self.abstract_class = true
|
||||
class FlipperRecord < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord -- This class perfectly replaces
|
||||
# Flipper::Adapters::ActiveRecord::Model, which inherits ActiveRecord::Base
|
||||
include DatabaseReflection
|
||||
self.abstract_class = true
|
||||
|
||||
# Bypass the load balancer by restoring the default behavior of `connection`
|
||||
# before the load balancer patches ActiveRecord::Base
|
||||
def self.connection
|
||||
retrieve_connection
|
||||
end
|
||||
end
|
||||
|
||||
class FlipperFeature < FlipperRecord
|
||||
self.table_name = 'features'
|
||||
end
|
||||
|
||||
class FlipperGate < FlipperRecord
|
||||
self.table_name = 'feature_gates'
|
||||
end
|
||||
|
||||
def self.enabled?
|
||||
Gitlab::Utils.to_boolean(ENV[FLAG], default: false)
|
||||
# Bypass the load balancer by restoring the default behavior of `connection`
|
||||
# before the load balancer patches ActiveRecord::Base
|
||||
def self.connection
|
||||
retrieve_connection
|
||||
end
|
||||
end
|
||||
|
||||
# Classes to override flipper table names
|
||||
class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature
|
||||
include DatabaseReflection
|
||||
class FlipperFeature < FlipperRecord
|
||||
self.table_name = 'features'
|
||||
end
|
||||
|
||||
# Using `self.table_name` won't work. ActiveRecord bug?
|
||||
superclass.table_name = 'features'
|
||||
|
||||
def self.feature_names
|
||||
pluck(:key)
|
||||
end
|
||||
class FlipperGate < FlipperRecord
|
||||
self.table_name = 'feature_gates'
|
||||
end
|
||||
|
||||
class OptOut
|
||||
|
|
@ -53,10 +34,6 @@ module Feature
|
|||
end
|
||||
end
|
||||
|
||||
class FlipperGate < Flipper::Adapters::ActiveRecord::Gate
|
||||
superclass.table_name = 'feature_gates'
|
||||
end
|
||||
|
||||
# Generates the same flipper_id when in a request
|
||||
# If not in a request, it generates a unique flipper_id every time
|
||||
class FlipperRequest
|
||||
|
|
@ -97,8 +74,7 @@ module Feature
|
|||
end
|
||||
|
||||
def persisted_names
|
||||
model = BypassLoadBalancer.enabled? ? BypassLoadBalancer::FlipperRecord : ApplicationRecord
|
||||
return [] unless model.database.exists?
|
||||
return [] unless FlipperRecord.database.exists?
|
||||
|
||||
# This loads names of all stored feature flags
|
||||
# and returns a stable Set in the following order:
|
||||
|
|
@ -344,8 +320,7 @@ module Feature
|
|||
# During setup the database does not exist yet. So we haven't stored a value
|
||||
# for the feature yet and return the default.
|
||||
|
||||
model = BypassLoadBalancer.enabled? ? BypassLoadBalancer::FlipperRecord : ApplicationRecord
|
||||
return unless model.database.exists?
|
||||
return unless FlipperRecord.database.exists?
|
||||
|
||||
flag_stack = ::Thread.current[:feature_flag_recursion_check] || []
|
||||
Thread.current[:feature_flag_recursion_check] = flag_stack
|
||||
|
|
@ -379,15 +354,9 @@ module Feature
|
|||
end
|
||||
|
||||
def build_flipper_instance(memoize: false)
|
||||
active_record_adapter = if BypassLoadBalancer.enabled?
|
||||
Flipper::Adapters::ActiveRecord.new(
|
||||
feature_class: BypassLoadBalancer::FlipperFeature,
|
||||
gate_class: BypassLoadBalancer::FlipperGate)
|
||||
else
|
||||
Flipper::Adapters::ActiveRecord.new(
|
||||
feature_class: FlipperFeature,
|
||||
gate_class: FlipperGate)
|
||||
end
|
||||
active_record_adapter = Flipper::Adapters::ActiveRecord.new(
|
||||
feature_class: FlipperFeature,
|
||||
gate_class: FlipperGate)
|
||||
# Redis L2 cache
|
||||
redis_cache_adapter =
|
||||
ActiveSupportCacheStoreAdapter.new(
|
||||
|
|
|
|||
|
|
@ -279,11 +279,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def ignore_replication_lag_time?
|
||||
Feature::BypassLoadBalancer.enabled? && Feature.enabled?(:load_balancer_ignore_replication_lag_time, type: :ops)
|
||||
Feature.enabled?(:load_balancer_ignore_replication_lag_time, type: :ops)
|
||||
end
|
||||
|
||||
def double_replication_lag_time?
|
||||
Feature::BypassLoadBalancer.enabled? && Feature.enabled?(:load_balancer_double_replication_lag_time, type: :ops)
|
||||
Feature.enabled?(:load_balancer_double_replication_lag_time, type: :ops)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -97,6 +97,7 @@ module Gitlab
|
|||
:crm_contacts,
|
||||
:current_user_todos,
|
||||
:description,
|
||||
:development,
|
||||
:hierarchy,
|
||||
:iteration,
|
||||
:labels,
|
||||
|
|
|
|||
|
|
@ -53,13 +53,18 @@ module WebIde
|
|||
|
||||
result = { enabled: false, reason: disabled_reason, help_url: help_url }
|
||||
|
||||
if disabled_reason == :opt_in_unset || disabled_reason == :opt_in_disabled
|
||||
result[:user_preferences_url] = user_preferences_url
|
||||
end
|
||||
|
||||
result
|
||||
result.merge(gallery_disabled_extra_attributes(disabled_reason: disabled_reason, user: user))
|
||||
end
|
||||
|
||||
# rubocop:disable Lint/UnusedMethodArgument -- `user:` param is used in EE
|
||||
def self.gallery_disabled_extra_attributes(disabled_reason:, user:)
|
||||
return { user_preferences_url: user_preferences_url } if disabled_reason == :opt_in_unset
|
||||
return { user_preferences_url: user_preferences_url } if disabled_reason == :opt_in_disabled
|
||||
|
||||
{}
|
||||
end
|
||||
# rubocop:enable Lint/UnusedMethodArgument
|
||||
|
||||
def self.help_url
|
||||
::Gitlab::Routing.url_helpers.help_page_url('user/project/web_ide/index', anchor: 'extension-marketplace')
|
||||
end
|
||||
|
|
@ -72,3 +77,5 @@ module WebIde
|
|||
private_class_method :help_url, :user_preferences_url
|
||||
end
|
||||
end
|
||||
|
||||
WebIde::ExtensionsMarketplace.prepend_mod
|
||||
|
|
|
|||
|
|
@ -9,14 +9,14 @@ module WebIde
|
|||
# the "gitlab-web-ide" and "gitlab-web-ide-vscode-fork" projects
|
||||
# (https://gitlab.com/gitlab-org/gitlab-web-ide & https://gitlab.com/gitlab-org/gitlab-web-ide-vscode-fork),
|
||||
# so we must ensure that any changes made here are also reflected in those projects.
|
||||
DISABLED_REASONS =
|
||||
%i[
|
||||
no_user
|
||||
no_flag
|
||||
instance_disabled
|
||||
opt_in_unset
|
||||
opt_in_disabled
|
||||
].to_h { |reason| [reason, reason] }.freeze
|
||||
# Please also see EE_DISABLED_REASONS in the relevant EE module.
|
||||
DISABLED_REASONS = %i[
|
||||
no_user
|
||||
no_flag
|
||||
instance_disabled
|
||||
opt_in_unset
|
||||
opt_in_disabled
|
||||
].to_h { |reason| [reason, reason] }.freeze
|
||||
|
||||
# @param [Hash] context
|
||||
# @return [Hash]
|
||||
|
|
@ -31,7 +31,7 @@ module WebIde
|
|||
extensions_marketplace_feature_flag_enabled
|
||||
}
|
||||
|
||||
extensions_gallery_metadata = metadata_for_user(
|
||||
extensions_gallery_metadata = build_metadata(
|
||||
user: user,
|
||||
flag_enabled: extensions_marketplace_feature_flag_enabled
|
||||
)
|
||||
|
|
@ -43,11 +43,23 @@ module WebIde
|
|||
# @param [User, nil] user
|
||||
# @param [Boolean, nil] flag_enabled
|
||||
# @return [Hash]
|
||||
def self.metadata_for_user(user:, flag_enabled:)
|
||||
def self.build_metadata(user:, flag_enabled:)
|
||||
return metadata_disabled(:no_user) unless user
|
||||
return metadata_disabled(:no_flag) if flag_enabled.nil?
|
||||
return metadata_disabled(:instance_disabled) unless flag_enabled
|
||||
|
||||
build_metadata_for_user(user)
|
||||
end
|
||||
|
||||
def self.disabled_reasons
|
||||
DISABLED_REASONS
|
||||
end
|
||||
|
||||
# note: This is overridden in EE
|
||||
#
|
||||
# @param [User] user
|
||||
# @return [Hash]
|
||||
def self.build_metadata_for_user(user)
|
||||
# noinspection RubyNilAnalysis -- RubyMine doesn't realize user can't be nil because of guard clause above
|
||||
opt_in_status = user.extensions_marketplace_opt_in_status.to_sym
|
||||
|
||||
|
|
@ -73,10 +85,13 @@ module WebIde
|
|||
# @param [symbol] reason
|
||||
# @return [Hash]
|
||||
def self.metadata_disabled(reason)
|
||||
{ enabled: false, disabled_reason: DISABLED_REASONS.fetch(reason) }
|
||||
{ enabled: false, disabled_reason: disabled_reasons.fetch(reason) }
|
||||
end
|
||||
|
||||
private_class_method :metadata_for_user, :metadata_enabled, :metadata_disabled
|
||||
private_class_method :build_metadata, :build_metadata_for_user, :disabled_reasons, :metadata_enabled,
|
||||
:metadata_disabled
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
WebIde::Settings::ExtensionsGalleryMetadataGenerator.prepend_mod
|
||||
|
|
|
|||
|
|
@ -867,6 +867,9 @@ msgstr ""
|
|||
msgid "%{group_name}&%{epic_iid} · created %{epic_created} by %{author}"
|
||||
msgstr ""
|
||||
|
||||
msgid "%{host} connection failed: %{error}."
|
||||
msgstr ""
|
||||
|
||||
msgid "%{host} could not be reached. %{cta}"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -27595,6 +27598,9 @@ msgstr ""
|
|||
msgid "Import|You can import a Subversion repository by using third-party tools. %{svn_link}."
|
||||
msgstr ""
|
||||
|
||||
msgid "Import|You do not have permission to view import source users for this namespace"
|
||||
msgstr ""
|
||||
|
||||
msgid "Import|You have insufficient permissions to update the import source user"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -31,6 +31,10 @@ FactoryBot.define do
|
|||
status { 2 }
|
||||
end
|
||||
|
||||
trait :rejected do
|
||||
status { 3 }
|
||||
end
|
||||
|
||||
trait :completed do
|
||||
with_reassign_to_user
|
||||
status { 5 }
|
||||
|
|
|
|||
|
|
@ -4,5 +4,9 @@ FactoryBot.define do
|
|||
factory :virtual_registries_packages_maven_registry, class: 'VirtualRegistries::Packages::Maven::Registry' do
|
||||
group
|
||||
cache_validity_hours { 1 }
|
||||
|
||||
trait :with_upstream do
|
||||
upstream { association(:virtual_registries_packages_maven_upstream, group: group) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
FactoryBot.define do
|
||||
factory :virtual_registries_packages_maven_upstream, class: 'VirtualRegistries::Packages::Maven::Upstream' do
|
||||
url { 'http://local.test/maven' }
|
||||
sequence(:url) { |n| "http://local.test/maven/#{n}" }
|
||||
username { 'user' }
|
||||
password { 'password' }
|
||||
registry { association(:virtual_registries_packages_maven_registry) }
|
||||
|
|
|
|||
|
|
@ -20,8 +20,9 @@ RSpec.describe 'Merge request > User toggles whitespace changes', :js, feature_c
|
|||
end
|
||||
|
||||
describe 'clicking "Hide whitespace changes" button' do
|
||||
it 'toggles the "Hide whitespace changes" button', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333793' do
|
||||
it 'toggles the "Hide whitespace changes" button' do
|
||||
find_by_testid('show-whitespace').click
|
||||
wait_for_requests
|
||||
|
||||
visit diffs_project_merge_request_path(project, merge_request)
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -315,8 +315,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host, feature_category: :databas
|
|||
let(:load_balancer_ignore_replication_lag_time) { false }
|
||||
|
||||
before do
|
||||
# Enable feature flags in the load balancer. They are enabled in production and relate to behavior in these specs
|
||||
stub_env(Feature::BypassLoadBalancer::FLAG, 'true')
|
||||
stub_feature_flags(load_balancer_double_replication_lag_time: load_balancer_double_replication_lag_time)
|
||||
stub_feature_flags(load_balancer_ignore_replication_lag_time: load_balancer_ignore_replication_lag_time)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -77,6 +77,8 @@ RSpec.describe WebIde::Settings::ExtensionsGalleryMetadataGenerator, :web_ide_fa
|
|||
|
||||
before do
|
||||
allow(user).to receive(:extensions_marketplace_opt_in_status) { opt_in_status.to_s }
|
||||
# EE feature has to be stubbed since we run EE code through CE tests
|
||||
allow(user).to receive(:enterprise_user?).and_return(false)
|
||||
allow(enums).to receive(:statuses).and_return({ unset: :unset, enabled: :enabled, disabled: :disabled })
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe AddDevWidgetToTasks, :migration, feature_category: :team_planning do
|
||||
it_behaves_like 'migration that adds a widget to a work item type' do
|
||||
let(:target_type_enum_value) { described_class::TASK_ENUM_VALUE }
|
||||
let(:target_type) { :task }
|
||||
end
|
||||
end
|
||||
|
|
@ -135,4 +135,43 @@ RSpec.describe VirtualRegistries::Packages::Maven::Upstream, type: :model, featu
|
|||
expect(upstream_read.password).to eq('test')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#url_for' do
|
||||
subject { upstream.url_for(path) }
|
||||
|
||||
where(:path, :expected_url) do
|
||||
'path' | 'http://test.maven/path'
|
||||
'' | 'http://test.maven/'
|
||||
'/path' | 'http://test.maven/path'
|
||||
'/sub/path' | 'http://test.maven/sub/path'
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
upstream.url = 'http://test.maven/'
|
||||
end
|
||||
|
||||
it { is_expected.to eq(expected_url) }
|
||||
end
|
||||
end
|
||||
|
||||
describe '#headers' do
|
||||
subject { upstream.headers }
|
||||
|
||||
where(:username, :password, :expected_headers) do
|
||||
'user' | 'pass' | { Authorization: 'Basic dXNlcjpwYXNz' }
|
||||
'user' | '' | {}
|
||||
'' | 'pass' | {}
|
||||
'' | '' | {}
|
||||
end
|
||||
|
||||
with_them do
|
||||
before do
|
||||
upstream.username = username
|
||||
upstream.password = password
|
||||
end
|
||||
|
||||
it { is_expected.to eq(expected_headers) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,197 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe API::VirtualRegistries::Packages::Maven, feature_category: :virtual_registry do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
include WorkhorseHelpers
|
||||
include HttpBasicAuthHelpers
|
||||
|
||||
let_it_be(:registry) { create(:virtual_registries_packages_maven_registry, :with_upstream) }
|
||||
let_it_be(:project) { create(:project, namespace: registry.group) }
|
||||
let_it_be(:user) { project.creator }
|
||||
|
||||
let(:upstream) { registry.upstream }
|
||||
|
||||
describe 'GET /api/v4/virtual_registries/packages/maven/:id/*path' do
|
||||
let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
|
||||
let(:url) { "/virtual_registries/packages/maven/#{registry.id}/#{path}" }
|
||||
let(:service_response) do
|
||||
ServiceResponse.success(
|
||||
payload: { action: :workhorse_send_url,
|
||||
action_params: { url: upstream.url_for(path), headers: upstream.headers } }
|
||||
)
|
||||
end
|
||||
|
||||
let(:service_double) do
|
||||
instance_double(::VirtualRegistries::Packages::Maven::HandleFileRequestService, execute: service_response)
|
||||
end
|
||||
|
||||
before do
|
||||
allow(::VirtualRegistries::Packages::Maven::HandleFileRequestService)
|
||||
.to receive(:new)
|
||||
.with(registry: registry, current_user: user, params: { path: path })
|
||||
.and_return(service_double)
|
||||
stub_config(dependency_proxy: { enabled: true }) # not enabled by default
|
||||
end
|
||||
|
||||
subject(:request) do
|
||||
get api(url), headers: headers
|
||||
end
|
||||
|
||||
shared_examples 'returning the workhorse send_url response' do
|
||||
it 'returns a workhorse send_url response' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('send-url:')
|
||||
expect(response.headers['Content-Type']).to eq('application/octet-stream')
|
||||
expect(response.headers['Content-Length'].to_i).to eq(0)
|
||||
expect(response.body).to eq('')
|
||||
|
||||
send_data_type, send_data = workhorse_send_data
|
||||
|
||||
expected_headers = upstream.headers.deep_stringify_keys.deep_transform_values do |value|
|
||||
[value]
|
||||
end
|
||||
|
||||
expected_resp_headers = described_class::NO_BROWSER_EXECUTION_RESPONSE_HEADERS.deep_transform_values do |value|
|
||||
[value]
|
||||
end
|
||||
|
||||
expect(send_data_type).to eq('send-url')
|
||||
expect(send_data['URL']).to be_present
|
||||
expect(send_data['AllowRedirects']).to be_truthy
|
||||
expect(send_data['DialTimeout']).to eq('10s')
|
||||
expect(send_data['ResponseHeaderTimeout']).to eq('10s')
|
||||
expect(send_data['ErrorResponseStatus']).to eq(502)
|
||||
expect(send_data['TimeoutResponseStatus']).to eq(504)
|
||||
expect(send_data['Header']).to eq(expected_headers)
|
||||
expect(send_data['ResponseHeaders']).to eq(expected_resp_headers)
|
||||
end
|
||||
end
|
||||
|
||||
context 'for authentication' do
|
||||
context 'with a personal access token' do
|
||||
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
|
||||
|
||||
context 'when sent by headers' do
|
||||
let(:headers) { { 'Private-Token' => personal_access_token.token } }
|
||||
|
||||
it_behaves_like 'returning the workhorse send_url response'
|
||||
end
|
||||
|
||||
context 'when sent by basic auth' do
|
||||
let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
|
||||
|
||||
it_behaves_like 'returning the workhorse send_url response'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a deploy token' do
|
||||
let_it_be(:deploy_token) do
|
||||
create(:deploy_token, :group, groups: [registry.group], read_virtual_registry: true)
|
||||
end
|
||||
|
||||
let_it_be(:user) { deploy_token }
|
||||
|
||||
context 'when sent by headers' do
|
||||
let(:headers) { { 'Deploy-Token' => deploy_token.token } }
|
||||
|
||||
it_behaves_like 'returning the workhorse send_url response'
|
||||
end
|
||||
|
||||
context 'when sent by basic auth' do
|
||||
let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) }
|
||||
|
||||
it_behaves_like 'returning the workhorse send_url response'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with ci job token' do
|
||||
let_it_be(:job) { create(:ci_build, user: user, status: :running, project: project) }
|
||||
|
||||
context 'when sent by headers' do
|
||||
let(:headers) { { 'Job-Token' => job.token } }
|
||||
|
||||
it_behaves_like 'returning the workhorse send_url response'
|
||||
end
|
||||
|
||||
context 'when sent by basic auth' do
|
||||
let(:headers) { basic_auth_header(::Gitlab::Auth::CI_JOB_USER, job.token) }
|
||||
|
||||
it_behaves_like 'returning the workhorse send_url response'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a valid user' do
|
||||
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
|
||||
let(:headers) { { 'Private-Token' => personal_access_token.token } }
|
||||
|
||||
context 'with service response errors' do
|
||||
where(:reason, :expected_status) do
|
||||
:path_not_present | :bad_request
|
||||
:unauthorized | :unauthorized
|
||||
:no_upstreams | :bad_request
|
||||
:file_not_found_on_upstreams | :not_found
|
||||
:upstream_not_available | :bad_request
|
||||
end
|
||||
|
||||
with_them do
|
||||
let(:service_response) do
|
||||
ServiceResponse.error(message: 'error', reason: reason)
|
||||
end
|
||||
|
||||
it "returns a #{params[:expected_status]} response" do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(expected_status)
|
||||
expect(response.body).to include('error') unless expected_status == :unauthorized
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with feature flag virtual_registry_maven disabled' do
|
||||
before do
|
||||
stub_feature_flags(virtual_registry_maven: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'returning response status', :not_found
|
||||
end
|
||||
|
||||
context 'with a web browser' do
|
||||
described_class::MAJOR_BROWSERS.each do |browser|
|
||||
context "when accessing with a #{browser} browser" do
|
||||
before do
|
||||
allow_next_instance_of(::Browser) do |b|
|
||||
allow(b).to receive("#{browser}?").and_return(true)
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns a bad request response' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(response.body).to include(described_class::WEB_BROWSER_ERROR_MESSAGE)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with the dependency proxy disabled' do
|
||||
before do
|
||||
stub_config(dependency_proxy: { enabled: false })
|
||||
end
|
||||
|
||||
it_behaves_like 'returning response status', :not_found
|
||||
end
|
||||
|
||||
context 'as anonymous' do
|
||||
let(:headers) { {} }
|
||||
|
||||
it_behaves_like 'returning response status', :unauthorized
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -31,4 +31,73 @@ RSpec.describe Groups::GroupMembersController, feature_category: :groups_and_pro
|
|||
|
||||
it_behaves_like 'request_accessable'
|
||||
end
|
||||
|
||||
describe 'GET /groups/*group_id/-/group_members/bulk_reassignment_file' do
|
||||
let_it_be(:membershipable) do
|
||||
create(:group, :public).tap do |group|
|
||||
group.add_owner(user)
|
||||
end
|
||||
end
|
||||
|
||||
subject(:request) do
|
||||
get bulk_reassignment_file_group_group_members_path(group_id: membershipable)
|
||||
end
|
||||
|
||||
context 'when not signed in' do
|
||||
it 'forbids access to the endpoint' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when signed in' do
|
||||
before do
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
it 'responds with CSV data' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:success)
|
||||
end
|
||||
|
||||
context 'and the user is not a group owner' do
|
||||
let_it_be(:membershipable) { create(:group, :public) }
|
||||
|
||||
it 'forbids access to the endpoint' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:forbidden)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and the CSV is not generated properly' do
|
||||
before do
|
||||
allow_next_instance_of(Import::SourceUsers::GenerateCsvService) do |service|
|
||||
allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'my error message'))
|
||||
end
|
||||
end
|
||||
|
||||
it 'redirects with an error' do
|
||||
request
|
||||
|
||||
expect(response).to be_redirect
|
||||
expect(flash[:alert]).to eq('my error message')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :importer_user_mapping_reassignment_csv is disabled' do
|
||||
before do
|
||||
stub_feature_flags(importer_user_mapping_reassignment_csv: false)
|
||||
end
|
||||
|
||||
it 'responds with 404' do
|
||||
request
|
||||
|
||||
expect(response).to have_gitlab_http_status(:not_found)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -223,20 +223,6 @@ RSpec.describe Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService,
|
|||
end
|
||||
end
|
||||
|
||||
context 'when the ci_pipelines_data_ingestion_to_click_house feature flag is off' do
|
||||
before do
|
||||
stub_feature_flags(ci_pipelines_data_ingestion_to_click_house: false)
|
||||
end
|
||||
|
||||
it 'skips execution' do
|
||||
is_expected.to have_attributes({
|
||||
status: :error,
|
||||
message: 'Feature ci_pipelines_data_ingestion_to_click_house is disabled',
|
||||
reason: :disabled
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
def create_sync_events(*pipelines)
|
||||
pipelines.each do |pipeline|
|
||||
Ci::FinishedPipelineChSyncEvent.new(
|
||||
|
|
|
|||
|
|
@ -185,8 +185,8 @@ RSpec.describe Ci::PipelineTriggerService, feature_category: :continuous_integra
|
|||
end
|
||||
end
|
||||
|
||||
context 'when params have an existsed job token' do
|
||||
context 'when params have an existsed ref' do
|
||||
context 'when params have a valid job token' do
|
||||
context 'when params have an existing ref' do
|
||||
let(:params) { { token: job.token, ref: 'master', variables: nil } }
|
||||
|
||||
it 'triggers a pipeline' do
|
||||
|
|
|
|||
|
|
@ -0,0 +1,87 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Import::SourceUsers::GenerateCsvService, feature_category: :importers do
|
||||
let_it_be(:namespace) { create(:namespace) }
|
||||
let_it_be(:current_user) { namespace.owner }
|
||||
|
||||
let_it_be(:user_pending_assignment) { create(:import_source_user, :pending_reassignment, namespace: namespace) }
|
||||
let_it_be(:user_awaiting_approval) { create(:import_source_user, :awaiting_approval, namespace: namespace) }
|
||||
let_it_be(:rejected_user) { create(:import_source_user, :rejected, namespace: namespace) }
|
||||
|
||||
subject(:service) { described_class.new(namespace, current_user: current_user) }
|
||||
|
||||
describe '#execute' do
|
||||
context 'when the user is a namespace owner', :aggregate_failures do
|
||||
it 'returns spreadsheet data' do
|
||||
result = service.execute
|
||||
|
||||
expect(result).to be_success
|
||||
|
||||
csv_data = CSV.parse(result.payload)
|
||||
|
||||
expect(csv_data.size).to eq(3)
|
||||
expect(csv_data[0]).to match_array(described_class::HEADERS)
|
||||
|
||||
expect(csv_data).to include(an_array_matching([
|
||||
user_pending_assignment.source_hostname,
|
||||
user_pending_assignment.import_type,
|
||||
user_pending_assignment.source_user_identifier,
|
||||
user_pending_assignment.source_name,
|
||||
user_pending_assignment.source_username,
|
||||
'',
|
||||
''
|
||||
]))
|
||||
end
|
||||
|
||||
it 'returns only data for this namespace' do
|
||||
other_source_user = create(:import_source_user)
|
||||
|
||||
result = service.execute
|
||||
|
||||
csv_data = CSV.parse(result.payload)
|
||||
source_user_identifiers = csv_data.pluck(2)
|
||||
|
||||
expect(source_user_identifiers).not_to include(other_source_user.source_user_identifier)
|
||||
end
|
||||
|
||||
it 'returns only data for Import::SourceUser records with a re-assignable status' do
|
||||
result = service.execute
|
||||
|
||||
csv_data = CSV.parse(result.payload)
|
||||
|
||||
source_user_identifiers = csv_data.pluck(2).drop(1)
|
||||
expect(source_user_identifiers).to match_array([
|
||||
user_pending_assignment.source_user_identifier,
|
||||
rejected_user.source_user_identifier
|
||||
])
|
||||
end
|
||||
|
||||
context 'and there is no data to return' do
|
||||
let(:namespace) { create(:namespace) }
|
||||
|
||||
subject(:service) { described_class.new(namespace, current_user: namespace.owner) }
|
||||
|
||||
it 'only returns the headers' do
|
||||
result = service.execute
|
||||
csv_data = CSV.parse(result.payload)
|
||||
|
||||
expect(csv_data.size).to eq(1)
|
||||
expect(csv_data[0]).to match_array(described_class::HEADERS)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when current user does not have permission' do
|
||||
subject(:service) { described_class.new(namespace, current_user: create(:user)) }
|
||||
|
||||
it 'returns error no permissions' do
|
||||
result = service.execute
|
||||
|
||||
expect(result).to be_error
|
||||
expect(result.message).to eq('You do not have permission to view import source users for this namespace')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe VirtualRegistries::Packages::Maven::HandleFileRequestService, :aggregate_failures, feature_category: :virtual_registry do
|
||||
let_it_be(:registry) { create(:virtual_registries_packages_maven_registry, :with_upstream) }
|
||||
let_it_be(:project) { create(:project, namespace: registry.group) }
|
||||
|
||||
let(:user) { project.creator }
|
||||
let(:upstream) { registry.upstream }
|
||||
let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
|
||||
let(:upstream_resource_url) { upstream.url_for(path) }
|
||||
let(:service) { described_class.new(registry: registry, current_user: user, params: { path: path }) }
|
||||
|
||||
describe '#execute' do
|
||||
subject(:execute) { service.execute }
|
||||
|
||||
shared_examples 'returning a service response error response with' do |message:, reason:|
|
||||
it 'returns an error' do
|
||||
expect(execute).to be_a(ServiceResponse)
|
||||
expect(execute).to be_error
|
||||
expect(execute.message).to eq(message)
|
||||
expect(execute.reason).to eq(reason)
|
||||
end
|
||||
end
|
||||
|
||||
shared_examples 'returning a service response success response' do
|
||||
before do
|
||||
stub_external_registry_request
|
||||
end
|
||||
|
||||
it 'returns a success service response' do
|
||||
expect(execute).to be_success
|
||||
expect(execute.payload).to eq(
|
||||
action: :workhorse_send_url,
|
||||
action_params: { url: upstream_resource_url, headers: upstream.headers }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a User' do
|
||||
it_behaves_like 'returning a service response success response'
|
||||
|
||||
context 'with upstream returning an error' do
|
||||
before do
|
||||
stub_external_registry_request(status: 404)
|
||||
end
|
||||
|
||||
it_behaves_like 'returning a service response error response with', message: 'File not found on any upstream',
|
||||
reason: :file_not_found_on_upstreams
|
||||
end
|
||||
|
||||
context 'with upstream head raising an error' do
|
||||
before do
|
||||
stub_external_registry_request(raise_error: true)
|
||||
end
|
||||
|
||||
it_behaves_like 'returning a service response error response with', message: 'Upstream not available',
|
||||
reason: :upstream_not_available
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a DeployToken' do
|
||||
let_it_be(:user) { create(:deploy_token, :group, groups: [registry.group], read_virtual_registry: true) }
|
||||
|
||||
it_behaves_like 'returning a service response success response'
|
||||
end
|
||||
|
||||
context 'with no path' do
|
||||
let(:path) { nil }
|
||||
|
||||
it_behaves_like 'returning a service response error response with', message: 'Path not present',
|
||||
reason: :path_not_present
|
||||
end
|
||||
|
||||
context 'with no user' do
|
||||
let(:user) { nil }
|
||||
|
||||
it_behaves_like 'returning a service response error response with', message: 'Unauthorized', reason: :unauthorized
|
||||
end
|
||||
|
||||
context 'with registry with no upstreams' do
|
||||
before do
|
||||
registry.upstream = nil
|
||||
end
|
||||
|
||||
it_behaves_like 'returning a service response error response with', message: 'No upstreams set',
|
||||
reason: :no_upstreams
|
||||
end
|
||||
|
||||
def stub_external_registry_request(status: 200, raise_error: false)
|
||||
request = stub_request(:head, upstream_resource_url)
|
||||
.with(headers: upstream.headers)
|
||||
|
||||
if raise_error
|
||||
request.to_raise(Gitlab::HTTP::BlockedUrlError)
|
||||
else
|
||||
request.to_return(status: status, body: '')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue