diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml index 35cdbaa940c..3971a073aa4 100644 --- a/.gitlab/ci/rails.gitlab-ci.yml +++ b/.gitlab/ci/rails.gitlab-ci.yml @@ -795,14 +795,6 @@ rspec-ee unit gitlab-duo-chat-qa-fast pg14: - !reference [.base-script, script] - rspec_parallelized_job "--tag fast_chat_qa_evaluation" -rspec-ee unit gitlab-duo pg14: - extends: - - .rspec-ee-base-gitlab-duo - - .rails:rules:ee-gitlab-duo-chat-always - script: - - !reference [.base-script, script] - - rspec_parallelized_job "--tag gitlab_duo" - rspec-ee unit gitlab-duo-chat-qa pg14: variables: QA_EVAL_REPORT_FILENAME: "qa_evaluation_report.md" diff --git a/.gitlab/ci/rails/shared.gitlab-ci.yml b/.gitlab/ci/rails/shared.gitlab-ci.yml index 9453bdd9b8d..0f0b44edfd0 100644 --- a/.gitlab/ci/rails/shared.gitlab-ci.yml +++ b/.gitlab/ci/rails/shared.gitlab-ci.yml @@ -103,7 +103,7 @@ include: # spec/lib, yet background migration tests are also sitting there, # and they should run on their own jobs so we don't need to run them # in unit tests again. - - rspec_section rspec_parallelized_job "--fail-fast=${RSPEC_FAIL_FAST_THRESHOLD} --tag ~quarantine --tag ~level:background_migration --tag ~click_house --tag ~real_ai_request" + - rspec_section rspec_parallelized_job "--fail-fast=${RSPEC_FAIL_FAST_THRESHOLD} --tag ~quarantine --tag ~level:background_migration --tag ~click_house" after_script: - source scripts/utils.sh - bundle exec gem list gitlab_quality-test_tooling diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml index 7faa7dde56f..9aad7d1baf7 100644 --- a/.gitlab/ci/rules.gitlab-ci.yml +++ b/.gitlab/ci/rules.gitlab-ci.yml @@ -2315,8 +2315,6 @@ .rails:rules:ee-gitlab-duo-chat-base: rules: - !reference [".strict-ee-only-rules", rules] - - if: '$REAL_AI_REQUEST == null' - when: never - if: '$ANTHROPIC_API_KEY == null' when: never - <<: *if-fork-merge-request diff --git a/.rubocop_todo/rails/strong_params.yml b/.rubocop_todo/rails/strong_params.yml index 8b4b0ece4bd..61a62f78b2e 100644 --- a/.rubocop_todo/rails/strong_params.yml +++ b/.rubocop_todo/rails/strong_params.yml @@ -6,25 +6,9 @@ Rails/StrongParams: - 'app/controllers/activity_pub/projects/application_controller.rb' - 'app/controllers/admin/application_settings_controller.rb' - 'app/controllers/admin/applications_controller.rb' - - 'app/controllers/admin/background_migrations_controller.rb' - - 'app/controllers/admin/batched_jobs_controller.rb' - - 'app/controllers/admin/broadcast_messages_controller.rb' - - 'app/controllers/admin/deploy_keys_controller.rb' - - 'app/controllers/admin/groups_controller.rb' - - 'app/controllers/admin/hook_logs_controller.rb' - - 'app/controllers/admin/hooks_controller.rb' - - 'app/controllers/admin/identities_controller.rb' - - 'app/controllers/admin/impersonation_tokens_controller.rb' - 'app/controllers/admin/keys_controller.rb' - - 'app/controllers/admin/labels_controller.rb' - 'app/controllers/admin/projects_controller.rb' - - 'app/controllers/admin/runner_projects_controller.rb' - 'app/controllers/admin/runners_controller.rb' - - 'app/controllers/admin/sessions_controller.rb' - - 'app/controllers/admin/slacks_controller.rb' - - 'app/controllers/admin/spam_logs_controller.rb' - - 'app/controllers/admin/topics/avatars_controller.rb' - - 'app/controllers/admin/topics_controller.rb' - 'app/controllers/admin/users_controller.rb' - 'app/controllers/autocomplete_controller.rb' - 'app/controllers/banzai/uploads_controller.rb' diff --git a/app/assets/stylesheets/page_bundles/operations.scss b/app/assets/stylesheets/page_bundles/operations.scss index f1f2464aa68..c1c3b2c4b8a 100644 --- a/app/assets/stylesheets/page_bundles/operations.scss +++ b/app/assets/stylesheets/page_bundles/operations.scss @@ -1,10 +1,5 @@ @import 'mixins_and_variables_and_functions'; -.dashboard-cards { - margin-right: -$gl-padding-8; - margin-left: -$gl-padding-8; -} - .dashboard-card { cursor: grab; diff --git a/app/controllers/admin/background_migrations_controller.rb b/app/controllers/admin/background_migrations_controller.rb index d1b87e67800..a6c13d0094b 100644 --- a/app/controllers/admin/background_migrations_controller.rb +++ b/app/controllers/admin/background_migrations_controller.rb @@ -15,34 +15,34 @@ module Admin 'finished' => batched_migration_class.with_status(:finished).queue_order.reverse_order } - @current_tab = @relations_by_tab.key?(params[:tab]) ? params[:tab] : 'queued' - @migrations = @relations_by_tab[@current_tab].page(params[:page]) + @current_tab = @relations_by_tab.key?(safe_params[:tab]) ? safe_params[:tab] : 'queued' + @migrations = @relations_by_tab[@current_tab].page(pagination_params[:page]) @successful_rows_counts = batched_migration_class.successful_rows_counts(@migrations.map(&:id)) @databases = Gitlab::Database.db_config_names(with_schema: :gitlab_shared) end def show - @migration = batched_migration_class.find(params[:id]) + @migration = batched_migration_class.find(safe_params[:id]) - @failed_jobs = @migration.batched_jobs.with_status(:failed).page(params[:page]) + @failed_jobs = @migration.batched_jobs.with_status(:failed).page(pagination_params[:page]) end def pause - migration = batched_migration_class.find(params[:id]) + migration = batched_migration_class.find(safe_params[:id]) migration.pause! redirect_back fallback_location: { action: 'index' } end def resume - migration = batched_migration_class.find(params[:id]) + migration = batched_migration_class.find(safe_params[:id]) migration.execute! redirect_back fallback_location: { action: 'index' } end def retry - migration = batched_migration_class.find(params[:id]) + migration = batched_migration_class.find(safe_params[:id]) migration.retry_failed_jobs! if migration.failed? redirect_back fallback_location: { action: 'index' } @@ -57,7 +57,7 @@ module Admin end def base_model - @selected_database = params[:database] || Gitlab::Database::MAIN_DATABASE_NAME + @selected_database = safe_params[:database] || Gitlab::Database::MAIN_DATABASE_NAME Gitlab::Database.database_base_models[@selected_database] end @@ -65,5 +65,9 @@ module Admin def batched_migration_class @batched_migration_class ||= Gitlab::Database::BackgroundMigration::BatchedMigration end + + def safe_params + params.permit(:id, :database, :tab) + end end end diff --git a/app/controllers/admin/batched_jobs_controller.rb b/app/controllers/admin/batched_jobs_controller.rb index 10b5f68d630..83cfa52e0b1 100644 --- a/app/controllers/admin/batched_jobs_controller.rb +++ b/app/controllers/admin/batched_jobs_controller.rb @@ -8,7 +8,7 @@ module Admin around_action :support_multiple_databases def show - @job = Gitlab::Database::BackgroundMigration::BatchedJob.find(params[:id]) + @job = Gitlab::Database::BackgroundMigration::BatchedJob.find(safe_params[:id]) @transition_logs = @job.batched_job_transition_logs end @@ -22,9 +22,13 @@ module Admin end def base_model - @selected_database = params[:database] || Gitlab::Database::MAIN_DATABASE_NAME + @selected_database = safe_params[:database] || Gitlab::Database::MAIN_DATABASE_NAME Gitlab::Database.database_base_models[@selected_database] end + + def safe_params + params.permit(:id, :database) + end end end diff --git a/app/controllers/admin/broadcast_messages_controller.rb b/app/controllers/admin/broadcast_messages_controller.rb index 36ca4fdfa1e..38f48cc44b7 100644 --- a/app/controllers/admin/broadcast_messages_controller.rb +++ b/app/controllers/admin/broadcast_messages_controller.rb @@ -76,11 +76,11 @@ module Admin protected def find_broadcast_message - @broadcast_message = System::BroadcastMessage.find(params[:id]) + @broadcast_message = System::BroadcastMessage.find(params.permit(:id)[:id]) end def find_broadcast_messages - @broadcast_messages = System::BroadcastMessage.order(ends_at: :desc).page(params[:page]) # rubocop: disable CodeReuse/ActiveRecord + @broadcast_messages = System::BroadcastMessage.order(ends_at: :desc).page(pagination_params[:page]) # rubocop: disable CodeReuse/ActiveRecord end def broadcast_message_params diff --git a/app/controllers/admin/deploy_keys_controller.rb b/app/controllers/admin/deploy_keys_controller.rb index a9b34bf56f6..4db1ba958ed 100644 --- a/app/controllers/admin/deploy_keys_controller.rb +++ b/app/controllers/admin/deploy_keys_controller.rb @@ -47,7 +47,7 @@ class Admin::DeployKeysController < Admin::ApplicationController protected def deploy_key - @deploy_key ||= deploy_keys.find(params[:id]) + @deploy_key ||= deploy_keys.find(params.permit(:id)[:id]) end def deploy_keys diff --git a/app/controllers/admin/groups_controller.rb b/app/controllers/admin/groups_controller.rb index 150180022b3..8e486a1ac9f 100644 --- a/app/controllers/admin/groups_controller.rb +++ b/app/controllers/admin/groups_controller.rb @@ -8,9 +8,9 @@ class Admin::GroupsController < Admin::ApplicationController feature_category :groups_and_projects, [:create, :destroy, :edit, :index, :members_update, :new, :show, :update] def index - @groups = groups.sort_by_attribute(@sort = params[:sort]) - @groups = @groups.search(params[:name]) if params[:name].present? - @groups = @groups.page(params[:page]) + @groups = groups.sort_by_attribute(@sort = pagination_params[:sort]) + @groups = @groups.search(safe_params[:name]) if safe_params[:name].present? + @groups = @groups.page(pagination_params[:page]) end # rubocop: disable CodeReuse/ActiveRecord @@ -21,10 +21,10 @@ class Admin::GroupsController < Admin::ApplicationController # the Group with statistics). @group = Group.with_statistics.find(group&.id) @members = present_members( - group_members.order("access_level DESC").page(params[:members_page])) + group_members.order("access_level DESC").page(safe_params[:members_page])) @requesters = present_members( AccessRequestsFinder.new(@group).execute(current_user)) - @projects = @group.projects.with_statistics.page(params[:projects_page]) + @projects = @group.projects.with_statistics.page(safe_params[:projects_page]) end # rubocop: enable CodeReuse/ActiveRecord @@ -79,7 +79,7 @@ class Admin::GroupsController < Admin::ApplicationController end def group - @group ||= Group.find_by_full_path(params[:id]) + @group ||= Group.find_by_full_path(params.permit(:id)[:id]) end def group_members @@ -111,6 +111,10 @@ class Admin::GroupsController < Admin::ApplicationController ] } ] end + + def safe_params + params.permit(:name, :members_page, :projects_page) + end end Admin::GroupsController.prepend_mod_with('Admin::GroupsController') diff --git a/app/controllers/admin/hook_logs_controller.rb b/app/controllers/admin/hook_logs_controller.rb index a283d3abb0b..08c010498a3 100644 --- a/app/controllers/admin/hook_logs_controller.rb +++ b/app/controllers/admin/hook_logs_controller.rb @@ -7,7 +7,7 @@ module Admin private def hook - @hook ||= SystemHook.find(params[:hook_id]) + @hook ||= SystemHook.find(params.permit(:hook_id)[:hook_id]) end def after_retry_redirect_path diff --git a/app/controllers/admin/hooks_controller.rb b/app/controllers/admin/hooks_controller.rb index 71cd56cfa84..457f2763645 100644 --- a/app/controllers/admin/hooks_controller.rb +++ b/app/controllers/admin/hooks_controller.rb @@ -8,7 +8,7 @@ class Admin::HooksController < Admin::ApplicationController before_action :not_found, unless: -> { system_hooks? } def test - result = TestHooks::SystemService.new(hook, current_user, params[:trigger]).execute + result = TestHooks::SystemService.new(hook, current_user, params.permit(:trigger)[:trigger]).execute set_hook_execution_notice(result) @@ -22,7 +22,7 @@ class Admin::HooksController < Admin::ApplicationController end def hook - @hook ||= SystemHook.find(params[:id]) + @hook ||= SystemHook.find(params.permit(:id)[:id]) end def hook_param_names diff --git a/app/controllers/admin/identities_controller.rb b/app/controllers/admin/identities_controller.rb index 443a3661ed2..75c8a7110f6 100644 --- a/app/controllers/admin/identities_controller.rb +++ b/app/controllers/admin/identities_controller.rb @@ -60,12 +60,12 @@ class Admin::IdentitiesController < Admin::ApplicationController # rubocop: disable CodeReuse/ActiveRecord def user - @user ||= User.find_by!(username: params[:user_id]) + @user ||= User.find_by!(username: params.permit(:user_id)[:user_id]) end # rubocop: enable CodeReuse/ActiveRecord def identity - @identity ||= user.identities.find(params[:id]) + @identity ||= user.identities.find(params.permit(:id)[:id]) end def identity_params diff --git a/app/controllers/admin/impersonation_tokens_controller.rb b/app/controllers/admin/impersonation_tokens_controller.rb index 1f25dad3428..045593704d7 100644 --- a/app/controllers/admin/impersonation_tokens_controller.rb +++ b/app/controllers/admin/impersonation_tokens_controller.rb @@ -24,7 +24,7 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController end def revoke - @impersonation_token = finder.find(params[:id]) + @impersonation_token = finder.find(params.permit(:id)[:id]) if @impersonation_token.revoke! flash[:notice] = format(_("Revoked impersonation token %{token_name}!"), token_name: @impersonation_token.name) @@ -39,7 +39,7 @@ class Admin::ImpersonationTokensController < Admin::ApplicationController # rubocop: disable CodeReuse/ActiveRecord def user - @user ||= User.find_by!(username: params[:user_id]) + @user ||= User.find_by!(username: params.permit(:user_id)[:user_id]) end # rubocop: enable CodeReuse/ActiveRecord diff --git a/app/controllers/admin/labels_controller.rb b/app/controllers/admin/labels_controller.rb index 10d060b8161..abed42a160e 100644 --- a/app/controllers/admin/labels_controller.rb +++ b/app/controllers/admin/labels_controller.rb @@ -7,7 +7,7 @@ class Admin::LabelsController < Admin::ApplicationController urgency :low def index - @labels = Label.templates.page(params[:page]) + @labels = Label.templates.page(pagination_params[:page]) end def show @@ -61,10 +61,10 @@ class Admin::LabelsController < Admin::ApplicationController private def set_label - @label = Label.find(params[:id]) + @label = Label.find(params.permit(:id)[:id]) end def label_params - params[:label].permit(:title, :description, :color) + params[:label].permit(:title, :description, :color) # rubocop:disable Rails/StrongParams -- hash access is safely followed by permit end end diff --git a/app/controllers/admin/runner_projects_controller.rb b/app/controllers/admin/runner_projects_controller.rb index 79a31331374..c26f9576b58 100644 --- a/app/controllers/admin/runner_projects_controller.rb +++ b/app/controllers/admin/runner_projects_controller.rb @@ -7,7 +7,7 @@ class Admin::RunnerProjectsController < Admin::ApplicationController urgency :low def create - @runner = Ci::Runner.find(params[:runner_project][:runner_id]) + @runner = Ci::Runner.find(safe_params[:runner_project][:runner_id]) if ::Ci::Runners::AssignRunnerService.new(@runner, @project, current_user).execute.success? flash[:success] = s_('Runners|Runner assigned to project.') @@ -18,7 +18,7 @@ class Admin::RunnerProjectsController < Admin::ApplicationController end def destroy - rp = Ci::RunnerProject.find(params[:id]) + rp = Ci::RunnerProject.find(safe_params[:id]) runner = rp.runner ::Ci::Runners::UnassignRunnerService.new(rp, current_user).execute @@ -31,8 +31,12 @@ class Admin::RunnerProjectsController < Admin::ApplicationController def project @project = Project.find_by_full_path( - [params[:namespace_id], '/', params[:project_id]].join('') + [safe_params[:namespace_id], '/', safe_params[:project_id]].join('') ) @project || render_404 end + + def safe_params + params.permit(:id, :namespace_id, :project_id, runner_project: [:runner_id]) + end end diff --git a/app/controllers/admin/sessions_controller.rb b/app/controllers/admin/sessions_controller.rb index bb275532170..2ac204bd9d0 100644 --- a/app/controllers/admin/sessions_controller.rb +++ b/app/controllers/admin/sessions_controller.rb @@ -63,7 +63,7 @@ class Admin::SessionsController < ApplicationController end def user_params - params.fetch(:user, {}).permit(:password, :otp_attempt, :device_response) + params.fetch(:user, {}).permit(:password, :otp_attempt, :device_response) # rubocop:disable Rails/StrongParams -- fetch is safely followed by permit end def valid_otp_attempt?(user) diff --git a/app/controllers/admin/slacks_controller.rb b/app/controllers/admin/slacks_controller.rb index 15e93d0b9ce..ede2b274470 100644 --- a/app/controllers/admin/slacks_controller.rb +++ b/app/controllers/admin/slacks_controller.rb @@ -21,7 +21,7 @@ module Admin end def installation_service - Integrations::SlackInstallation::InstanceService.new(current_user: current_user, params: params) + Integrations::SlackInstallation::InstanceService.new(current_user: current_user, params: params.permit(:code)) end end end diff --git a/app/controllers/admin/spam_logs_controller.rb b/app/controllers/admin/spam_logs_controller.rb index d7ed6aa33ef..66272d00c56 100644 --- a/app/controllers/admin/spam_logs_controller.rb +++ b/app/controllers/admin/spam_logs_controller.rb @@ -7,14 +7,14 @@ class Admin::SpamLogsController < Admin::ApplicationController def index @spam_logs = SpamLog.preload(user: [:trusted_with_spam_attribute]) .order(id: :desc) - .page(params[:page]).without_count + .page(pagination_params[:page]).without_count end # rubocop: enable CodeReuse/ActiveRecord def destroy - spam_log = SpamLog.find(params[:id]) + spam_log = SpamLog.find(safe_params[:id]) - if params[:remove_user] + if safe_params[:remove_user] spam_log.remove_user(deleted_by: current_user) redirect_to admin_spam_logs_path, status: :found, @@ -26,7 +26,7 @@ class Admin::SpamLogsController < Admin::ApplicationController end def mark_as_ham - spam_log = SpamLog.find(params[:id]) + spam_log = SpamLog.find(safe_params[:id]) if Spam::HamService.new(spam_log).execute redirect_to admin_spam_logs_path, notice: _('Spam log successfully submitted as ham.') @@ -34,4 +34,10 @@ class Admin::SpamLogsController < Admin::ApplicationController redirect_to admin_spam_logs_path, alert: _('Error with Akismet. Please check the logs for more info.') end end + + private + + def safe_params + params.permit(:id, :remove_user) + end end diff --git a/app/controllers/admin/topics/avatars_controller.rb b/app/controllers/admin/topics/avatars_controller.rb index ee0a7e68bb3..397a0c0d05f 100644 --- a/app/controllers/admin/topics/avatars_controller.rb +++ b/app/controllers/admin/topics/avatars_controller.rb @@ -4,7 +4,7 @@ class Admin::Topics::AvatarsController < Admin::ApplicationController feature_category :groups_and_projects def destroy - @topic = Projects::Topic.find(params[:topic_id]) + @topic = Projects::Topic.find(params.permit(:topic_id)[:topic_id]) @topic.remove_avatar! @topic.save diff --git a/app/controllers/admin/topics_controller.rb b/app/controllers/admin/topics_controller.rb index 40cad2d26f4..9578609dd2d 100644 --- a/app/controllers/admin/topics_controller.rb +++ b/app/controllers/admin/topics_controller.rb @@ -9,7 +9,7 @@ class Admin::TopicsController < Admin::ApplicationController feature_category :groups_and_projects def index - @topics = Projects::TopicsFinder.new(params: params.permit(:search)).execute.page(params[:page]).without_count + @topics = Projects::TopicsFinder.new(params: params.permit(:search)).execute.page(pagination_params[:page]).without_count end def new @@ -60,7 +60,7 @@ class Admin::TopicsController < Admin::ApplicationController private def topic - @topic ||= Projects::Topic.find(params[:id]) + @topic ||= Projects::Topic.find(params.permit(:id)[:id]) end def topic_params diff --git a/app/controllers/groups/application_controller.rb b/app/controllers/groups/application_controller.rb index 453214d57fd..71b54527e8d 100644 --- a/app/controllers/groups/application_controller.rb +++ b/app/controllers/groups/application_controller.rb @@ -51,6 +51,10 @@ class Groups::ApplicationController < ApplicationController render_403 unless can?(current_user, :admin_group_member, group) end + def authorize_owner_access! + render_403 unless can?(current_user, :owner_access, group) + end + def authorize_billings_page! render_404 unless can?(current_user, :read_billing, group) end diff --git a/app/controllers/groups/group_members_controller.rb b/app/controllers/groups/group_members_controller.rb index 1d85a8839b6..48b8629eb15 100644 --- a/app/controllers/groups/group_members_controller.rb +++ b/app/controllers/groups/group_members_controller.rb @@ -13,6 +13,7 @@ class Groups::GroupMembersController < Groups::ApplicationController end # Authorize + before_action :authorize_owner_access!, only: :bulk_reassignment_file before_action :authorize_admin_group_member!, except: admin_not_required_endpoints before_action :authorize_read_group_member!, only: :index @@ -52,6 +53,22 @@ class Groups::GroupMembersController < Groups::ApplicationController ) end + def bulk_reassignment_file + return render_404 unless Feature.enabled?(:importer_user_mapping_reassignment_csv, current_user) + + csv_response = Import::SourceUsers::GenerateCsvService.new(membershipable, current_user: current_user).execute + + if csv_response.success? + send_data( + csv_response.payload, + filename: "bulk_reassignments_for_namespace_#{membershipable.id}_#{Time.current.to_i}.csv", + type: 'text/csv; charset=utf-8' + ) + else + redirect_back_or_default(options: { alert: csv_response.message }) + end + end + # MembershipActions concern alias_method :membershipable, :group diff --git a/app/models/import/source_user.rb b/app/models/import/source_user.rb index cd89f865638..ccaaa1b8565 100644 --- a/app/models/import/source_user.rb +++ b/app/models/import/source_user.rb @@ -3,6 +3,7 @@ module Import class SourceUser < ApplicationRecord include Gitlab::SQL::Pattern + include EachBatch self.table_name = 'import_source_users' @@ -27,25 +28,35 @@ module Import scope :awaiting_reassignment, -> { where(status: [0, 1, 2, 3, 4]) } scope :reassigned, -> { where(status: [5, 6]) } + STATUSES = { + pending_reassignment: 0, + awaiting_approval: 1, + reassignment_in_progress: 2, + rejected: 3, + failed: 4, + completed: 5, + keep_as_placeholder: 6 + }.freeze + + ACCEPTED_STATUSES = %i[reassignment_in_progress completed failed].freeze + REASSIGNABLE_STATUSES = %i[pending_reassignment rejected].freeze + CANCELABLE_STATUSES = %i[awaiting_approval rejected].freeze + state_machine :status, initial: :pending_reassignment do - state :pending_reassignment, value: 0 - state :awaiting_approval, value: 1 - state :reassignment_in_progress, value: 2 - state :rejected, value: 3 - state :failed, value: 4 - state :completed, value: 5 - state :keep_as_placeholder, value: 6 + STATUSES.each do |status_name, value| + state status_name, value: value + end event :reassign do - transition [:pending_reassignment, :rejected] => :awaiting_approval + transition REASSIGNABLE_STATUSES => :awaiting_approval end event :cancel_reassignment do - transition [:awaiting_approval, :rejected] => :pending_reassignment + transition CANCELABLE_STATUSES => :pending_reassignment end event :keep_as_placeholder do - transition [:pending_reassignment, :rejected] => :keep_as_placeholder + transition REASSIGNABLE_STATUSES => :keep_as_placeholder end event :accept do @@ -99,15 +110,15 @@ module Import end def accepted_status? - reassignment_in_progress? || completed? || failed? + STATUSES.slice(*ACCEPTED_STATUSES).value?(status) end def reassignable_status? - pending_reassignment? || rejected? + STATUSES.slice(*REASSIGNABLE_STATUSES).value?(status) end def cancelable_status? - awaiting_approval? || rejected? + STATUSES.slice(*CANCELABLE_STATUSES).value?(status) end end end diff --git a/app/models/pool_repository.rb b/app/models/pool_repository.rb index f655cd8ba73..3340b5076d1 100644 --- a/app/models/pool_repository.rb +++ b/app/models/pool_repository.rb @@ -77,11 +77,6 @@ class PoolRepository < ApplicationRecord object_pool.create rescue GRPC::AlreadyExists # The object pool already exists. Nothing to do here. - rescue GRPC::FailedPrecondition => e - # This rescue is temporary until gitaly returns the correct error code for - # "repo exists already". Gitaly error messages are not guaranteed to match - # and so should not typically be used to determine error type. - raise unless e.message.include?('repository exists already') end # The members of the pool should have fetched the missing objects to their own diff --git a/app/models/virtual_registries/packages/maven/upstream.rb b/app/models/virtual_registries/packages/maven/upstream.rb index db1e41b5f75..55f2d24a102 100644 --- a/app/models/virtual_registries/packages/maven/upstream.rb +++ b/app/models/virtual_registries/packages/maven/upstream.rb @@ -34,6 +34,19 @@ module VirtualRegistries after_validation :reset_credentials, if: -> { persisted? && url_changed? } before_save :write_credentials + def url_for(path) + full_url = File.join(url, path) + Addressable::URI.parse(full_url).to_s + end + + def headers + return {} unless username.present? && password.present? + + authorization = ActionController::HttpAuthentication::Basic.encode_credentials(username, password) + + { Authorization: authorization } + end + private def read_credentials diff --git a/app/policies/virtual_registries/packages/maven/registry_policy.rb b/app/policies/virtual_registries/packages/maven/registry_policy.rb new file mode 100644 index 00000000000..68cf38f2b9b --- /dev/null +++ b/app/policies/virtual_registries/packages/maven/registry_policy.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +module VirtualRegistries + module Packages + module Maven + class RegistryPolicy < ::BasePolicy + delegate { ::VirtualRegistries::Packages::Policies::Group.new(@subject.group) } + end + end + end +end diff --git a/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb b/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb index 12c0acd155f..bb67d95d419 100644 --- a/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb +++ b/app/services/ci/click_house/data_ingestion/finished_pipelines_sync_service.rb @@ -50,13 +50,6 @@ module Ci ) end - if Feature.disabled?(:ci_pipelines_data_ingestion_to_click_house, Feature.current_request) - return ServiceResponse.error( - message: 'Feature ci_pipelines_data_ingestion_to_click_house is disabled', - reason: :disabled - ) - end - # Prevent parallel jobs in_lock("#{self.class.name.underscore}/worker/#{@worker_index}", ttl: MAX_TTL, retries: 0) do ::Gitlab::Database::LoadBalancing::Session.without_sticky_writes do diff --git a/app/services/import/source_users/generate_csv_service.rb b/app/services/import/source_users/generate_csv_service.rb new file mode 100644 index 00000000000..115600200af --- /dev/null +++ b/app/services/import/source_users/generate_csv_service.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +module Import + module SourceUsers + # This class generates CSV data for `Import::SourceUser` records associated + # with a namespace. This spreadsheet is filled in and re-uploaded to + # facilitate the user mapping flow. + class GenerateCsvService + HEADERS = [ + 'Source host', + 'Import type', + 'Source user identifier', + 'Source user name', + 'Source username', + 'GitLab username', + 'GitLab public email' + ].freeze + + # @param namespace [Namespace, Group] The namespace where the import source users are associated + # @param current_user [User] The user performing the CSV export + def initialize(namespace, current_user:) + @namespace = namespace + @current_user = current_user + end + + def execute + # We use :owner_access here because it's shared between GroupPolicy and + # NamespacePolicy. + return error_invalid_permissions unless current_user.can?(:owner_access, namespace) + + ServiceResponse.success(payload: csv_data) + end + + private + + attr_reader :namespace, :current_user + + def csv_data + CSV.generate do |csv| + csv << HEADERS + + import_source_users.each_batch(of: 1000) do |batch| + batch.each do |source_user| + csv << [ + source_user.source_hostname, + source_user.import_type, + source_user.source_user_identifier, + source_user.source_name, + source_user.source_username, + '', + '' + ] + end + end + end + end + + def import_source_users + statuses = Import::SourceUser::STATUSES.slice(*Import::SourceUser::REASSIGNABLE_STATUSES).values + namespace.import_source_users.by_statuses(statuses) + end + + def error_invalid_permissions + ServiceResponse.error( + message: s_('Import|You do not have permission to view import source users for this namespace'), + reason: :forbidden + ) + end + end + end +end diff --git a/app/services/virtual_registries/packages/maven/handle_file_request_service.rb b/app/services/virtual_registries/packages/maven/handle_file_request_service.rb new file mode 100644 index 00000000000..b3917d13d12 --- /dev/null +++ b/app/services/virtual_registries/packages/maven/handle_file_request_service.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module VirtualRegistries + module Packages + module Maven + class HandleFileRequestService < ::BaseContainerService + alias_method :registry, :container + + TIMEOUT = 5 + + def initialize(registry:, current_user: nil, params: {}) + super(container: registry, current_user: current_user, params: params) + end + + def execute + return ServiceResponse.error(message: 'Path not present', reason: :path_not_present) unless path.present? + return ServiceResponse.error(message: 'Unauthorized', reason: :unauthorized) unless allowed? + + unless registry.upstream.present? + return ServiceResponse.error(message: 'No upstreams set', reason: :no_upstreams) + end + + # TODO check cached responses here + # If one exists and can be used, return it. + # https://gitlab.com/gitlab-org/gitlab/-/issues/467983 + handle_upstream(registry.upstream) + end + + private + + def handle_upstream(upstream) + url = upstream.url_for(path) + headers = upstream.headers + response = head_upstream(url: url, headers: headers) + + if response.success? + workhorse_send_url_response(url: url, headers: headers) + else + ServiceResponse.error(message: 'File not found on any upstream', reason: :file_not_found_on_upstreams) + end + rescue *::Gitlab::HTTP::HTTP_ERRORS + ServiceResponse.error(message: 'Upstream not available', reason: :upstream_not_available) + end + + def head_upstream(url:, headers:) + ::Gitlab::HTTP.head(url, headers: headers, follow_redirects: true, timeout: TIMEOUT) + end + + def allowed? + can?(current_user, :read_virtual_registry, registry) + end + + def path + params[:path] + end + + def workhorse_send_url_response(url:, headers:) + ServiceResponse.success( + payload: { action: :workhorse_send_url, action_params: { url: url, headers: headers } } + ) + end + end + end + end +end diff --git a/config/feature_flags/wip/ci_pipelines_data_ingestion_to_click_house.yml b/config/feature_flags/wip/ci_pipelines_data_ingestion_to_click_house.yml deleted file mode 100644 index 23ecbf1ea71..00000000000 --- a/config/feature_flags/wip/ci_pipelines_data_ingestion_to_click_house.yml +++ /dev/null @@ -1,9 +0,0 @@ ---- -name: ci_pipelines_data_ingestion_to_click_house -feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/470079 -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/158362 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/470495 -milestone: '17.2' -group: group::runner -type: wip -default_enabled: false diff --git a/config/feature_flags/wip/virtual_registry_maven.yml b/config/feature_flags/wip/virtual_registry_maven.yml new file mode 100644 index 00000000000..fb8b77ec954 --- /dev/null +++ b/config/feature_flags/wip/virtual_registry_maven.yml @@ -0,0 +1,9 @@ +--- +name: virtual_registry_maven +feature_issue_url: https://gitlab.com/groups/gitlab-org/-/epics/14137 +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/160891 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/474863 +milestone: '17.3' +group: group::package registry +type: wip +default_enabled: false diff --git a/config/routes/group.rb b/config/routes/group.rb index 1e34de30790..b0bef2363a6 100644 --- a/config/routes/group.rb +++ b/config/routes/group.rb @@ -113,7 +113,11 @@ constraints(::Constraints::GroupUrlConstrainer.new) do resources :group_members, only: [:index, :update, :destroy], concerns: :access_requestable do post :resend_invite, on: :member - delete :leave, on: :collection + + collection do + get :bulk_reassignment_file + delete :leave + end end resources :group_links, only: [:update, :destroy], constraints: { id: /\d+|:id/ } diff --git a/db/docs/feature_gates.yml b/db/docs/feature_gates.yml index e7166d710a8..642602f5f52 100644 --- a/db/docs/feature_gates.yml +++ b/db/docs/feature_gates.yml @@ -1,7 +1,6 @@ --- table_name: feature_gates classes: -- Feature::BypassLoadBalancer::FlipperGate - Feature::FlipperGate - Flipper::Adapters::ActiveRecord::Gate feature_categories: diff --git a/db/docs/features.yml b/db/docs/features.yml index d8e6c0ddd75..eca348578cb 100644 --- a/db/docs/features.yml +++ b/db/docs/features.yml @@ -1,7 +1,6 @@ --- table_name: features classes: -- Feature::BypassLoadBalancer::FlipperFeature - Feature::FlipperFeature - Flipper::Adapters::ActiveRecord::Feature feature_categories: diff --git a/db/migrate/20240806222106_add_dev_widget_to_tasks.rb b/db/migrate/20240806222106_add_dev_widget_to_tasks.rb new file mode 100644 index 00000000000..1e9b3245117 --- /dev/null +++ b/db/migrate/20240806222106_add_dev_widget_to_tasks.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +class AddDevWidgetToTasks < Gitlab::Database::Migration[2.2] + class WorkItemType < MigrationRecord + self.table_name = 'work_item_types' + end + + class WidgetDefinition < MigrationRecord + self.table_name = 'work_item_widget_definitions' + end + + restrict_gitlab_migration gitlab_schema: :gitlab_main + disable_ddl_transaction! + milestone '17.3' + + TASK_ENUM_VALUE = 4 + WIDGET_NAME = 'Development' + WIDGET_ENUM_VALUE = 23 + + class MigrationWorkItemType < MigrationRecord + self.table_name = 'work_item_types' + end + + class MigrationWidgetDefinition < MigrationRecord + self.table_name = 'work_item_widget_definitions' + end + + def up + task_work_item_type = MigrationWorkItemType.find_by(base_type: TASK_ENUM_VALUE) + + # Task type should exist in production applications, checking here to avoid failures + # if inconsistent data is present. + return say('Task work item type does not exist, skipping widget creation') unless task_work_item_type + + widgets = [ + { + work_item_type_id: task_work_item_type.id, + name: WIDGET_NAME, + widget_type: WIDGET_ENUM_VALUE + } + ] + + MigrationWidgetDefinition.upsert_all( + widgets, + unique_by: :index_work_item_widget_definitions_on_default_witype_and_name + ) + end + + def down + task_work_item_type = MigrationWorkItemType.find_by(base_type: TASK_ENUM_VALUE) + + return say('Task work item type does not exist, skipping widget removal') unless task_work_item_type + + widget_definition = MigrationWidgetDefinition.find_by( + work_item_type_id: task_work_item_type.id, + widget_type: WIDGET_ENUM_VALUE, + name: WIDGET_NAME + ) + + return say('Widget definition not found, skipping widget removal') unless widget_definition + + widget_definition.destroy + end +end diff --git a/db/post_migrate/20240808160937_add_not_null_constraint_to_p_ci_builds.rb b/db/post_migrate/20240808160937_add_not_null_constraint_to_p_ci_builds.rb new file mode 100644 index 00000000000..ad87dadfaa5 --- /dev/null +++ b/db/post_migrate/20240808160937_add_not_null_constraint_to_p_ci_builds.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +class AddNotNullConstraintToPCiBuilds < Gitlab::Database::Migration[2.2] + disable_ddl_transaction! + milestone '17.3' + + CONSTRAINT_NAME = 'check_9aa9432137' + + def up + Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition| + add_not_null_constraint partition.identifier, :project_id, constraint_name: CONSTRAINT_NAME, validate: false + end + end + + def down + Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition| + remove_not_null_constraint partition.identifier, :project_id, constraint_name: CONSTRAINT_NAME + end + end +end diff --git a/db/post_migrate/20240808172712_prepare_p_ci_builds_project_id_not_null_validation.rb b/db/post_migrate/20240808172712_prepare_p_ci_builds_project_id_not_null_validation.rb new file mode 100644 index 00000000000..53dcb969d91 --- /dev/null +++ b/db/post_migrate/20240808172712_prepare_p_ci_builds_project_id_not_null_validation.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class PreparePCiBuildsProjectIdNotNullValidation < Gitlab::Database::Migration[2.2] + milestone '17.3' + + CONSTRAINT_NAME = 'check_9aa9432137' + + def up + Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition| + prepare_async_check_constraint_validation partition.identifier, name: CONSTRAINT_NAME + end + end + + def down + Gitlab::Database::PostgresPartitionedTable.each_partition(:p_ci_builds) do |partition| + unprepare_async_check_constraint_validation partition.identifier, name: CONSTRAINT_NAME + end + end +end diff --git a/db/schema_migrations/20240806222106 b/db/schema_migrations/20240806222106 new file mode 100644 index 00000000000..c6483833f23 --- /dev/null +++ b/db/schema_migrations/20240806222106 @@ -0,0 +1 @@ +b4cec70cbcf124395b511ce3957dde3ee34b33856c47ca4ced569ffebf70e502 \ No newline at end of file diff --git a/db/schema_migrations/20240808160937 b/db/schema_migrations/20240808160937 new file mode 100644 index 00000000000..58a2216a863 --- /dev/null +++ b/db/schema_migrations/20240808160937 @@ -0,0 +1 @@ +07efb85a3ec3b76b6ba2d849f0daf9dac6ae4dbc3aac95b09530f782bff56179 \ No newline at end of file diff --git a/db/schema_migrations/20240808172712 b/db/schema_migrations/20240808172712 new file mode 100644 index 00000000000..79de2ae9bcf --- /dev/null +++ b/db/schema_migrations/20240808172712 @@ -0,0 +1 @@ +df7c930e70c324848db73c5f006d3edb6e8a353aa3f7b80144a08a46ae041ff8 \ No newline at end of file diff --git a/db/structure.sql b/db/structure.sql index 3c5aa3a35b8..6bd2c27cab1 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -22913,6 +22913,9 @@ ALTER TABLE ci_runners ALTER TABLE ci_runners ADD CONSTRAINT check_91230910ec CHECK ((char_length((name)::text) <= 256)) NOT VALID; +ALTER TABLE ci_builds + ADD CONSTRAINT check_9aa9432137 CHECK ((project_id IS NOT NULL)) NOT VALID; + ALTER TABLE sprints ADD CONSTRAINT check_ccd8a1eae0 CHECK ((start_date IS NOT NULL)) NOT VALID; diff --git a/doc/install/install_methods.md b/doc/install/install_methods.md index 7a46942b3d9..d9b1756e30b 100644 --- a/doc/install/install_methods.md +++ b/doc/install/install_methods.md @@ -14,14 +14,70 @@ DETAILS: You can install GitLab on several [cloud providers](cloud_providers.md), or use one of the following methods. -| Installation method | Description | When to choose | -|----------------------------------------------------------------|-------------|----------------| -| [Linux package](https://docs.gitlab.com/omnibus/installation/) (previously known as Omnibus GitLab) | The official `deb` and `rpm` packages. The Linux package has GitLab and dependent components, including PostgreSQL, Redis, and Sidekiq. | Use if you want the most mature, scalable method. This version is also used on GitLab.com.
- For additional flexibility and resilience, see the [reference architecture documentation](../administration/reference_architectures/index.md).
- Review the [system requirements](requirements.md).
- View the [list of supported Linux operating systems](../administration/package_information/supported_os.md#supported-operating-systems). | -| [Helm chart](https://docs.gitlab.com/charts/) | A chart for installing a cloud-native version of GitLab and its components on Kubernetes. | Use if your infrastructure is on Kubernetes and you're familiar with how it works. Management, observability, and some concepts are different than traditional deployments.
- Administration and troubleshooting requires Kubernetes knowledge.
- It can be more expensive for smaller installations. The default installation requires more resources than a single node Linux package deployment, because most services are deployed in a redundant fashion.

| -| [GitLab Operator](https://docs.gitlab.com/operator/) | An installation and management method that follows the [Kubernetes Operator pattern](https://kubernetes.io/docs/concepts/extend-kubernetes/operator/) for installing a cloud-native version of GitLab and its components in Kubernetes. | Use if your infrastructure is on Kubernetes or [OpenShift](openshift_and_gitlab/index.md) and you're familiar with how Operators work. Provides additional functionality beyond the Helm chart installation method, including automation of the [GitLab upgrade steps](https://docs.gitlab.com/operator/gitlab_upgrades.html).
- The considerations for the Helm chart also apply here.
- Consider the Helm chart instead if you are limited by the [GitLab Operator's known issues](https://docs.gitlab.com/operator#known-issues). | -| [Docker](docker.md) | The GitLab packages in a Docker container. | Use if you're familiar with Docker. | -| [Source](installation.md) | GitLab and its components from scratch. | Use if none of the previous methods are available for your platform. Can use for unsupported systems like \*BSD.| -| [GitLab Environment Toolkit (GET)](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#documentation) | A set of opinionated Terraform and Ansible scripts. | Use to deploy a [reference architecture](../administration/reference_architectures/index.md) on selected major cloud providers. Has some [limitations](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#missing-features-to-be-aware-of) and manual setup for production environments. | +## Linux package + +The Linux package includes the official `deb` and `rpm` packages. The package has GitLab and dependent components, including PostgreSQL, Redis, and Sidekiq. + +Use if you want the most mature, scalable method. This version is also used on GitLab.com. + +For more information, see: + +- [Linux package](https://docs.gitlab.com/omnibus/installation/) +- [Reference architectures](../administration/reference_architectures/index.md) +- [System requirements](requirements.md) +- [Supported Linux operating systems](../administration/package_information/supported_os.md#supported-operating-systems) + +## Helm chart + +Use a chart to install a cloud-native version of GitLab and its components on Kubernetes. + +Use if your infrastructure is on Kubernetes and you're familiar with how it works. + +Before you use this installation method, consider that: + +- Management, observability, and some other concepts are different than traditional deployments. +- Administration and troubleshooting requires Kubernetes knowledge. +- It can be more expensive for smaller installations. +- The default installation requires more resources than a single node Linux package deployment, because most services are deployed in a redundant fashion. + +For more information, see [Helm charts](https://docs.gitlab.com/charts/). + +## GitLab Operator + +To install a cloud-native version of GitLab and its components in Kubernetes, use GitLab Operator. +This installation and management method follows the [Kubernetes Operator pattern](https://kubernetes.io/docs/concepts/extend-kubernetes/operator/). + +Use if your infrastructure is on Kubernetes or [OpenShift](openshift_and_gitlab/index.md), and you're familiar with how Operators work. + +This installation method provides additional functionality beyond the Helm chart installation method, including automation of the [GitLab upgrade steps](https://docs.gitlab.com/operator/gitlab_upgrades.html). The considerations for the Helm chart also apply here. + +Consider the Helm chart installation method if you are limited by [GitLab Operator known issues](https://docs.gitlab.com/operator#known-issues). + +For more information, see [GitLab Operator](https://docs.gitlab.com/operator/). + +## Docker + +Installs the GitLab packages in a Docker container. + +Use if you're familiar with Docker. + +For more information, see [Docker](docker.md). + +## Source + +Installs GitLab and its components from scratch. + +Use if none of the previous methods are available for your platform. Can use for unsupported systems like \*BSD. + +For more information, see [Source](installation.md). + +## GitLab Environment Toolkit (GET) + +[GitLab Environment Toolkit (GET)](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#documentation) is a set of opinionated Terraform and Ansible scripts. + +Use to deploy a [reference architecture](../administration/reference_architectures/index.md) on selected major cloud providers. + +This installation methods has some [limitations](https://gitlab.com/gitlab-org/gitlab-environment-toolkit#missing-features-to-be-aware-of), and requires manual setup for production environments. ## Unsupported Linux distributions and Unix-like operating systems @@ -32,10 +88,11 @@ or use one of the following methods. - macOS Installation of GitLab on these operating systems is possible, but not supported. -See the [installation guides](https://about.gitlab.com/install/) for more information. -See [OS versions that are no longer supported](../administration/package_information/supported_os.md#os-versions-that-are-no-longer-supported) -for a list of supported and unsupported OS versions for Linux package installations as well as the last support GitLab version for that OS. +For more information, see: + +- [Installation guides](https://about.gitlab.com/install/) +- [Supported and unsupported OS versions for Linux package installations](../administration/package_information/supported_os.md#os-versions-that-are-no-longer-supported) ## Microsoft Windows diff --git a/doc/operations/incident_management/index.md b/doc/operations/incident_management/index.md index 6637f282b91..be749d5074c 100644 --- a/doc/operations/incident_management/index.md +++ b/doc/operations/incident_management/index.md @@ -10,6 +10,10 @@ DETAILS: **Tier:** Free, Premium, Ultimate **Offering:** GitLab.com, Self-managed, GitLab Dedicated +NOTE: +This feature is not under active development. For more information, see [issue 468607](https://gitlab.com/gitlab-org/gitlab/-/issues/468607#note_1967939452). +To determine if the feature meets your needs, see the [open bug issues](https://gitlab.com/gitlab-org/gitlab/-/issues/?sort=updated_desc&state=opened&label_name%5B%5D=Category%3AIncident%20Management&label_name%5B%5D=type%3A%3Abug&first_page_size=20). + Incident Management enables developers to easily triage and view the alerts and incidents generated by their application. By surfacing alerts and incidents where the code is being developed, efficiency and awareness can be increased. Check out the following sections for more information: diff --git a/doc/user/application_security/secret_detection/secret_push_protection/index.md b/doc/user/application_security/secret_detection/secret_push_protection/index.md index 59e621a5699..dd0d4700326 100644 --- a/doc/user/application_security/secret_detection/secret_push_protection/index.md +++ b/doc/user/application_security/secret_detection/secret_push_protection/index.md @@ -93,13 +93,13 @@ To enable secret push protection in a project: ## Coverage -Secret push protection checks the content of each commit diff when it is pushed to GitLab. +Secret push protection checks the content of each commit when it is pushed to GitLab. However, the following exclusions apply. Secret push protection does not check a file in a commit when: - The file is a binary file. -- The diff patch for the file is larger than 1 MiB. +- The file is larger than 1 MiB. - The file was renamed, deleted, or moved without changes to the content. - The content of the file is identical to the content of another file in the source code. - The file is contained in the initial push that created the repository. @@ -186,3 +186,20 @@ To skip secret push protection when using any Git client: For example, you are using the GitLab Web IDE and have several commits that are blocked from being pushed because one of them contains a secret. To skip secret push protection, edit the latest commit message and add `[skip secret push protection]`, then push the commits. + +## Troubleshooting + +When working with secret push protection, you may encounter the following situations. + +### Push blocked unexpectedly + +Secret Push Protection scans all contents of modified files. This can cause a push to be +unexpectedly blocked if a modified file contains a secret, even if the secret is not part of the diff. + +To push a change to a file that contains a secret, you need to [skip secret push protection](#skip-secret-push-protection). + +[Issue 469161](https://gitlab.com/gitlab-org/gitlab/-/issues/469161) proposes to change the scanning logic to scan only diffs. + +### File was not scanned + +Some files are excluded from scanning. For a list of exclusions, see [coverage](#coverage). diff --git a/doc/user/gitlab_duo_chat/index.md b/doc/user/gitlab_duo_chat/index.md index b412ec63969..f929e8f9a1e 100644 --- a/doc/user/gitlab_duo_chat/index.md +++ b/doc/user/gitlab_duo_chat/index.md @@ -55,7 +55,6 @@ In the GitLab UI, GitLab Duo Chat knows about these areas: |---------------|------------------------------------------------------------------------------------------------------------------| | Epics | From the epic, ask about `this epic`, `this`, or the URL. From any UI area, ask about the URL. | | Issues | From the issue, ask about `this issue`, `this`, or the URL. From any UI area, ask about the URL. | -| Merge request | From the merge request, ask about `this merge request`, `this`, or the URL. From any UI area, ask about the URL. | | Code files | From the single file, ask about `this code` or `this file`. From any UI area, ask about the URL. | In the IDEs, GitLab Duo Chat knows about these areas: diff --git a/doc/user/group/roadmap/index.md b/doc/user/group/roadmap/index.md index de207579197..2c4a3fce046 100644 --- a/doc/user/group/roadmap/index.md +++ b/doc/user/group/roadmap/index.md @@ -70,7 +70,7 @@ In the Roadmap view, you can sort epics by: - Last updated date Each option contains a button that toggles the sort order between **ascending** -and **descending**. The sort option and order persist when browsing Epics, including +and **descending**. The sort option and order persist when browsing epics, including the [epics list view](../epics/index.md). In the Roadmap view, you can also filter by the epics': @@ -87,6 +87,18 @@ In the Roadmap view, you can also filter by the epics': You can also [visualize roadmaps inside of an epic](../epics/index.md#roadmap-in-epics). +### Improve the performance of the roadmap + +If your group contains a lot of epics, using filters can reduce the time your roadmap takes to load. +Filtering the roadmap reduces the amount of data the roadmap contains. Reducing +the data in the roadmap can also make it easier for you to find the information you're looking for. + +In particular, filtering based on labels can result in a significant performance improvement. + +After you set the filters you want to apply, you can save the URL as bookmark in your +browser. +In the future, you can quickly load the filtered roadmap using the bookmark. + ### Roadmap settings > - Labels visible on roadmaps [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/385231) in GitLab 15.9. diff --git a/gems/gitlab-secret_detection/README.md b/gems/gitlab-secret_detection/README.md index ede38a5a01a..9fc770a8bf5 100644 --- a/gems/gitlab-secret_detection/README.md +++ b/gems/gitlab-secret_detection/README.md @@ -1,6 +1,6 @@ # Gitlab::SecretDetection -The gitlab-secret_detection gem performs keyword and regex matching on git diffs that may include secrets. The gem accepts one or more git diffs, matches them against a defined ruleset of regular expressions, and returns scan results. +The gitlab-secret_detection gem performs keyword and regex matching on git blobs that may include secrets. The gem accepts one or more git blobs, matches them against a defined ruleset of regular expressions, and returns scan results. ##### Scan parameters @@ -10,10 +10,10 @@ accepts the following parameters: | Parameter | Type | Required | Default | Description | |----------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `diffs` | Array | Yes | NA | Array of diffs. Each diff has attributes: left_blob_id, right_blob_id, patch, status, binary, and over_patch_bytes_limit. | +| `blobs` | Array | Yes | NA | Array of blobs with each blob to have `id` and `data` properties. `id` represents the uniqueness of the blob in the given array and `data` is the content of the blob to scan. | | `timeout` | Number | No | [`60s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L22) | The maximum duration allowed for the scan to run on a commit request comprising multiple blobs. If the specified timeout elapses, the scan is automatically terminated. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. | -| `diff_timeout` | Number | No | [`5s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L24) | The maximum duration allowed for the scan to run on an individual diff. Upon expiration of the specified timeout, the scan is interrupted for the current diff and advances to the next diff in the request. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. | -| `subprocess` | Boolean | No | [`true`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L34) | Runs the scan operation within a subprocess rather than the main process. This design aims to mitigate memory overconsumption issues that may arise from scanning multiple large diffs within a single subprocess. Check [here](https://docs.gitlab.com/ee/architecture/blueprints/secret_detection/decisions/002_run_scan_within_subprocess.html) for more details. | +| `blob_timeout` | Number | No | [`5s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L24) | The maximum duration allowed for the scan to run on an individual blob. Upon expiration of the specified timeout, the scan is interrupted for the current blob and advances to the next blob in the request. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. | +| `subprocess` | Boolean | No | [`true`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L34) | Runs the scan operation within a subprocess rather than the main process. This design aims to mitigate memory overconsumption issues that may arise from scanning multiple large blobs within a single subprocess. Check [here](https://docs.gitlab.com/ee/architecture/blueprints/secret_detection/decisions/002_run_scan_within_subprocess.html) for more details. | ##### Scan Constraints diff --git a/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb b/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb index 0b58bc4213e..3918d584ccd 100644 --- a/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb +++ b/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb @@ -20,14 +20,14 @@ module Gitlab # default time limit(in seconds) for running the scan operation per invocation DEFAULT_SCAN_TIMEOUT_SECS = 60 - # default time limit(in seconds) for running the scan operation on a single diff - DEFAULT_DIFF_TIMEOUT_SECS = 5 + # default time limit(in seconds) for running the scan operation on a single blob + DEFAULT_BLOB_TIMEOUT_SECS = 5 # file path where the secrets ruleset file is located RULESET_FILE_PATH = File.expand_path('../../gitleaks.toml', __dir__) # Max no of child processes to spawn per request # ref: https://gitlab.com/gitlab-org/gitlab/-/issues/430160 MAX_PROCS_PER_REQUEST = 5 - # Minimum cumulative size of the diffs required to spawn and + # Minimum cumulative size of the blobs required to spawn and # run the scan within a new subprocess. MIN_CHUNK_SIZE_PER_PROC_BYTES = 2_097_152 # 2MiB # Whether to run scan in subprocesses or not. Default is true. @@ -46,24 +46,23 @@ module Gitlab @pattern_matcher = build_pattern_matcher(rules) end - # Runs Secret Detection scan on the list of given diffs. Both the total scan duration and - # the duration for each diff is time bound via +timeout+ and +diff_timeout+ respectively. + # Runs Secret Detection scan on the list of given blobs. Both the total scan duration and + # the duration for each blob is time bound via +timeout+ and +blob_timeout+ respectively. # - # +diffs+:: Array of diffs between diff pairs. Each diff has attributes: left_blob_id, right_blob_id, - # patch, status, binary, and over_patch_bytes_limit. + # +blobs+:: Array of blobs with each blob to have `id` and `data` properties. # +timeout+:: No of seconds(accepts floating point for smaller time values) to limit the total scan duration - # +diff_timeout+:: No of seconds(accepts floating point for smaller time values) to limit - # the scan duration on each diff + # +blob_timeout+:: No of seconds(accepts floating point for smaller time values) to limit + # the scan duration on each blob # +subprocess+:: If passed true, the scan is performed within subprocess instead of main process. - # To avoid over-consuming memory by running scan on multiple large diffs within a single subprocess, - # it instead groups the diffs into smaller array where each array contains diffs with cumulative size of + # To avoid over-consuming memory by running scan on multiple large blobs within a single subprocess, + # it instead groups the blobs into smaller array where each array contains blobs with cumulative size of # +MIN_CHUNK_SIZE_PER_PROC_BYTES+ bytes and each group runs in a separate sub-process. Default value # is true. # # NOTE: # Running the scan in fork mode primarily focuses on reducing the memory consumption of the scan by - # offloading regex operations on large diffs to sub-processes. However, it does not assure the improvement - # in the overall latency of the scan, specifically in the case of smaller diff sizes, where the overhead of + # offloading regex operations on large blobs to sub-processes. However, it does not assure the improvement + # in the overall latency of the scan, specifically in the case of smaller blob sizes, where the overhead of # forking a new process adds to the overall latency of the scan instead. More reference on Subprocess-based # execution is found here: https://gitlab.com/gitlab-org/gitlab/-/issues/430160. # @@ -74,25 +73,23 @@ module Gitlab # } # def secrets_scan( - diffs, + blobs, timeout: DEFAULT_SCAN_TIMEOUT_SECS, - diff_timeout: DEFAULT_DIFF_TIMEOUT_SECS, + blob_timeout: DEFAULT_BLOB_TIMEOUT_SECS, subprocess: RUN_IN_SUBPROCESS ) - - return SecretDetection::Response.new(SecretDetection::Status::INPUT_ERROR) unless validate_scan_input(diffs) + return SecretDetection::Response.new(SecretDetection::Status::INPUT_ERROR) unless validate_scan_input(blobs) Timeout.timeout(timeout) do - matched_diffs = filter_by_keywords(diffs) + matched_blobs = filter_by_keywords(blobs) - next SecretDetection::Response.new(SecretDetection::Status::NOT_FOUND) if matched_diffs.empty? + next SecretDetection::Response.new(SecretDetection::Status::NOT_FOUND) if matched_blobs.empty? - secrets = - if subprocess - run_scan_within_subprocess(matched_diffs, diff_timeout) - else - run_scan(matched_diffs, diff_timeout) - end + secrets = if subprocess + run_scan_within_subprocess(matched_blobs, blob_timeout) + else + run_scan(matched_blobs, blob_timeout) + end scan_status = overall_scan_status(secrets) @@ -146,137 +143,104 @@ module Gitlab secrets_keywords.flatten.compact.to_set end - # returns only those diffs that contain at least one of the keywords + # returns only those blobs that contain at least one of the keywords # from the keywords list - def filter_by_keywords(diffs) - matched_diffs = [] + def filter_by_keywords(blobs) + matched_blobs = [] - diffs.each do |diff| - matched_diffs << diff if keywords.any? { |keyword| diff.patch.include?(keyword) } + blobs.each do |blob| + matched_blobs << blob if keywords.any? { |keyword| blob.data.include?(keyword) } end - matched_diffs.freeze + matched_blobs.freeze end - def run_scan(diffs, diff_timeout) - found_secrets = diffs.flat_map do |diff| - Timeout.timeout(diff_timeout) do - find_secrets(diff) + def run_scan(blobs, blob_timeout) + found_secrets = blobs.flat_map do |blob| + Timeout.timeout(blob_timeout) do + find_secrets(blob) end rescue Timeout::Error => e - logger.error "Secret Detection scan timed out on the diff(id:#{diff.right_blob_id}): #{e}" - SecretDetection::Finding.new(diff.right_blob_id, - SecretDetection::Status::DIFF_TIMEOUT) + logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}" + SecretDetection::Finding.new(blob.id, + SecretDetection::Status::BLOB_TIMEOUT) end found_secrets.freeze end - def run_scan_within_subprocess(diffs, diff_timeout) - diff_sizes = diffs.map { |diff| diff.patch.length } - grouped_diff_indicies = group_by_chunk_size(diff_sizes) + def run_scan_within_subprocess(blobs, blob_timeout) + blob_sizes = blobs.map(&:size) + grouped_blob_indicies = group_by_chunk_size(blob_sizes) - grouped_diffs = grouped_diff_indicies.map { |idx_arr| idx_arr.map { |i| diffs[i] } } + grouped_blobs = grouped_blob_indicies.map { |idx_arr| idx_arr.map { |i| blobs[i] } } found_secrets = Parallel.flat_map( - grouped_diffs, + grouped_blobs, in_processes: MAX_PROCS_PER_REQUEST, isolation: true # do not reuse sub-processes - ) do |grouped_diff| - grouped_diff.flat_map do |diff| - Timeout.timeout(diff_timeout) do - find_secrets(diff) + ) do |grouped_blob| + grouped_blob.flat_map do |blob| + Timeout.timeout(blob_timeout) do + find_secrets(blob) end rescue Timeout::Error => e - logger.error "Secret Detection scan timed out on the diff(id:#{diff.right_blob_id}): #{e}" - SecretDetection::Finding.new(diff.right_blob_id, - SecretDetection::Status::DIFF_TIMEOUT) + logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}" + SecretDetection::Finding.new(blob.id, + SecretDetection::Status::BLOB_TIMEOUT) end end found_secrets.freeze end - # finds secrets in the given diff with a timeout circuit breaker - def find_secrets(diff) - line_number_offset = 0 + # finds secrets in the given blob with a timeout circuit breaker + def find_secrets(blob) secrets = [] - lines = diff.patch.split("\n") + blob.data.each_line.with_index do |line, index| + patterns = pattern_matcher.match(line, exception: false) - # The following section parses the diff patch. - # - # If the line starts with @@, it is the hunk header, used to calculate the line number. - # If the line starts with +, it is newly added in this diff, and we - # scan the line for newly added secrets. Also increment line number. - # If the line starts with -, it is removed in this diff, do not increment line number. - # If the line starts with \\, it is the no newline marker, do not increment line number. - # If the line starts with a space character, it is a context line, just increment the line number. - # - # A context line that starts with an important character would still be treated - # like a context line, as shown below: - # @@ -1,5 +1,5 @@ - # context line - # -removed line - # +added line - # @@this context line has a @@ but starts with a space so isnt a header - # +this context line has a + but starts with a space so isnt an addition - # -this context line has a - but starts with a space so isnt a removal - lines.each do |line| - # Parse hunk header for start line - if line.start_with?("@@") - hunk_info = line.match(/@@ -\d+(,\d+)? \+(\d+)(,\d+)? @@/) - start_line = hunk_info[2].to_i - line_number_offset = start_line - 1 - # Line added in this commit - elsif line.start_with?('+') - line_number_offset += 1 - # Remove leading + - line_content = line[1..] + next unless patterns.any? - patterns = pattern_matcher.match(line_content, exception: false) - next unless patterns.any? + line_number = index + 1 + patterns.each do |pattern| + type = rules[pattern]["id"] + description = rules[pattern]["description"] - patterns.each do |pattern| - type = rules[pattern]["id"] - description = rules[pattern]["description"] - - secrets << SecretDetection::Finding.new( - diff.right_blob_id, - SecretDetection::Status::FOUND, - line_number_offset, - type, - description - ) - end - # Line not added in this commit, just increment line number - elsif line.start_with?(' ') - line_number_offset += 1 - # Line removed in this commit or no newline marker, do not increment line number - elsif line.start_with?('-', '\\') - # No increment + secrets << SecretDetection::Finding.new( + blob.id, + SecretDetection::Status::FOUND, + line_number, + type, + description + ) end end secrets rescue StandardError => e - logger.error "Secret Detection scan failed on the diff(id:#{diff.right_blob_id}): #{e}" + logger.error "Secret Detection scan failed on the blob(id:#{blob.id}): #{e}" - SecretDetection::Finding.new(diff.right_blob_id, SecretDetection::Status::SCAN_ERROR) + SecretDetection::Finding.new(blob.id, SecretDetection::Status::SCAN_ERROR) end - def validate_scan_input(diffs) - return false if diffs.nil? || !diffs.instance_of?(Array) + def validate_scan_input(blobs) + return false if blobs.nil? || !blobs.instance_of?(Array) - diffs.each { |diff| diff.patch.freeze } + blobs.all? do |blob| + next false unless blob.respond_to?(:id) || blob.respond_to?(:data) + + blob.data.freeze # freeze blobs to avoid additional object allocations on strings + end end def overall_scan_status(found_secrets) return SecretDetection::Status::NOT_FOUND if found_secrets.empty? - timed_out_diffs = found_secrets.count { |el| el.status == SecretDetection::Status::DIFF_TIMEOUT } + timed_out_blobs = found_secrets.count { |el| el.status == SecretDetection::Status::BLOB_TIMEOUT } - case timed_out_diffs + case timed_out_blobs when 0 SecretDetection::Status::FOUND when found_secrets.length @@ -286,15 +250,15 @@ module Gitlab end end - # This method accepts an array of diff sizes(in bytes) and groups them into an array + # This method accepts an array of blob sizes(in bytes) and groups them into an array # of arrays structure where each element is the group of indicies of the input - # array whose cumulative diff sizes has at least +MIN_CHUNK_SIZE_PER_PROC_BYTES+ - def group_by_chunk_size(diff_size_arr) + # array whose cumulative blob sizes has at least +MIN_CHUNK_SIZE_PER_PROC_BYTES+ + def group_by_chunk_size(blob_size_arr) cumulative_size = 0 chunk_indexes = [] chunk_idx_start = 0 - diff_size_arr.each_with_index do |size, index| + blob_size_arr.each_with_index do |size, index| cumulative_size += size next unless cumulative_size >= MIN_CHUNK_SIZE_PER_PROC_BYTES @@ -304,11 +268,11 @@ module Gitlab cumulative_size = 0 end - if cumulative_size.positive? && (chunk_idx_start < diff_size_arr.length) - chunk_indexes << if chunk_idx_start == diff_size_arr.length - 1 + if cumulative_size.positive? && (chunk_idx_start < blob_size_arr.length) + chunk_indexes << if chunk_idx_start == blob_size_arr.length - 1 [chunk_idx_start] else - (chunk_idx_start..diff_size_arr.length - 1).to_a + (chunk_idx_start..blob_size_arr.length - 1).to_a end end diff --git a/gems/gitlab-secret_detection/lib/gitlab/secret_detection/status.rb b/gems/gitlab-secret_detection/lib/gitlab/secret_detection/status.rb index 85d60b86009..200294fc2e7 100644 --- a/gems/gitlab-secret_detection/lib/gitlab/secret_detection/status.rb +++ b/gems/gitlab-secret_detection/lib/gitlab/secret_detection/status.rb @@ -8,7 +8,7 @@ module Gitlab FOUND = 1 # When scan operation completes with one or more findings FOUND_WITH_ERRORS = 2 # When scan operation completes with one or more findings along with some errors SCAN_TIMEOUT = 3 # When the scan operation runs beyond given time out - DIFF_TIMEOUT = 4 # When the scan operation on a diff runs beyond given time out + BLOB_TIMEOUT = 4 # When the scan operation on a blob runs beyond given time out SCAN_ERROR = 5 # When the scan operation fails due to regex error INPUT_ERROR = 6 # When the scan operation fails due to invalid input end diff --git a/gems/gitlab-secret_detection/spec/lib/gitlab/secret_detection/scan_spec.rb b/gems/gitlab-secret_detection/spec/lib/gitlab/secret_detection/scan_spec.rb index 8cba55865d1..71899cd77a1 100644 --- a/gems/gitlab-secret_detection/spec/lib/gitlab/secret_detection/scan_spec.rb +++ b/gems/gitlab-secret_detection/spec/lib/gitlab/secret_detection/scan_spec.rb @@ -5,13 +5,10 @@ require 'spec_helper' RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detection do subject(:scan) { described_class.new } - let(:diff_blob) do - Struct.new(:left_blob_id, :right_blob_id, :patch, :status, :binary, :over_patch_bytes_limit, keyword_init: true) + def new_blob(id:, data:) + Struct.new(:id, :data).new(id, data) end - let(:sha1_blank_sha) { ('0' * 40).freeze } - let(:sample_blob_id) { 'fe29d93da4843da433e62711ace82db601eb4f8f' } - let(:ruleset) do { "title" => "gitleaks config", @@ -67,108 +64,47 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio allow(scan).to receive(:parse_ruleset).and_return(ruleset) end - context 'when the diff does not contain a secret' do - let(:diffs) do + context 'when the blob does not contain a secret' do + let(:blobs) do [ - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1 @@\n+BASE_URL=https://foo.bar\n\\ No newline at end of file\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ) + new_blob(id: 1234, data: "no secrets") ] end it "does not match" do expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::NOT_FOUND) - expect(scan.secrets_scan(diffs)).to eq(expected_response) + expect(scan.secrets_scan(blobs)).to eq(expected_response) end - it "attempts to keyword match returning no diffs for further scan" do + it "attempts to keyword match returning no blobs for further scan" do expect(scan).to receive(:filter_by_keywords) - .with(diffs) + .with(blobs) .and_return([]) - scan.secrets_scan(diffs) + scan.secrets_scan(blobs) end it "does not attempt to regex match" do expect(scan).not_to receive(:match_rules_bulk) - scan.secrets_scan(diffs) + scan.secrets_scan(blobs) end end - context "when multiple diffs contains secrets" do - let(:diffs) do + context "when multiple blobs contains secrets" do + let(:blobs) do [ - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1 @@\n+glpat-12312312312312312312\n", # gitleaks:allow - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,3 @@\n+\n+\n+glptt-1231231231231231231212312312312312312312\n", # gitleaks:allow - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1 @@\n+data with no secret\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,2 @@\n+GR134894112312312312312312312\n+glft-12312312312312312312\n", # gitleaks:allow - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1 @@\n+data with no secret\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1 @@\n+data with no secret\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1 @@\n+glptt-1231231231231231231212312312312312312312\n", # gitleaks:allow - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,2 @@\n+glpat-12312312312312312312\n+GR134894112312312312312312312\n", # gitleaks:allow - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ) + new_blob(id: 111, data: "glpat-12312312312312312312"), # gitleaks:allow + new_blob(id: 222, data: "\n\nglptt-1231231231231231231212312312312312312312"), # gitleaks:allow + new_blob(id: 333, data: "data with no secret"), + new_blob(id: 444, + data: "GR134894112312312312312312312\nglft-12312312312312312312"), # gitleaks:allow + new_blob(id: 555, data: "data with no secret"), + new_blob(id: 666, data: "data with no secret"), + new_blob(id: 777, data: "\nglptt-1231231231231231231212312312312312312312"), # gitleaks:allow + new_blob(id: 888, + data: "glpat-12312312312312312312;GR134894112312312312312312312") # gitleaks:allow ] end @@ -177,51 +113,51 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio Gitlab::SecretDetection::Status::FOUND, [ Gitlab::SecretDetection::Finding.new( - diffs[0].right_blob_id, + blobs[0].id, Gitlab::SecretDetection::Status::FOUND, 1, ruleset['rules'][0]['id'], ruleset['rules'][0]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[1].right_blob_id, + blobs[1].id, Gitlab::SecretDetection::Status::FOUND, 3, ruleset['rules'][1]['id'], ruleset['rules'][1]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[3].right_blob_id, + blobs[3].id, Gitlab::SecretDetection::Status::FOUND, 1, ruleset['rules'][2]['id'], ruleset['rules'][2]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[3].right_blob_id, + blobs[3].id, Gitlab::SecretDetection::Status::FOUND, 2, ruleset['rules'][3]['id'], ruleset['rules'][3]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[6].right_blob_id, + blobs[6].id, Gitlab::SecretDetection::Status::FOUND, - 1, + 2, ruleset['rules'][1]['id'], ruleset['rules'][1]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[7].right_blob_id, + blobs[7].id, Gitlab::SecretDetection::Status::FOUND, 1, ruleset['rules'][0]['id'], ruleset['rules'][0]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[7].right_blob_id, + blobs[7].id, Gitlab::SecretDetection::Status::FOUND, - 2, + 1, ruleset['rules'][2]['id'], ruleset['rules'][2]['description'] ) @@ -229,93 +165,90 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio ) end - it "attempts to keyword match returning only filtered diffs for further scan" do - expected = diffs.reject { |d| d.patch.include?("data with no secret") } + it "attempts to keyword match returning only filtered blobs for further scan" do + expected = blobs.filter { |b| b.data != "data with no secret" } expect(scan).to receive(:filter_by_keywords) - .with(diffs) + .with(blobs) .and_return(expected) - scan.secrets_scan(diffs) + scan.secrets_scan(blobs) end it "matches multiple rules when running in main process" do - expect(scan.secrets_scan(diffs, subprocess: false)).to eq(expected_response) + expect(scan.secrets_scan(blobs, subprocess: false)).to eq(expected_response) + end + + context "in subprocess" do + let(:dummy_lines) do + 10_000 + end + + let(:large_blobs) do + dummy_data = "\nrandom data" * dummy_lines + [ + new_blob(id: 111, data: "glpat-12312312312312312312#{dummy_data}"), # gitleaks:allow + new_blob(id: 222, data: "\n\nglptt-1231231231231231231212312312312312312312#{dummy_data}"), # gitleaks:allow + new_blob(id: 333, data: "data with no secret#{dummy_data}"), + new_blob(id: 444, + data: "GR134894112312312312312312312\nglft-12312312312312312312#{dummy_data}"), # gitleaks:allow + new_blob(id: 555, data: "data with no secret#{dummy_data}"), + new_blob(id: 666, data: "data with no secret#{dummy_data}"), + new_blob(id: 777, data: "#{dummy_data}\nglptt-1231231231231231231212312312312312312312") # gitleaks:allow + ] + end + + it "matches multiple rules" do + expect(scan.secrets_scan(blobs, subprocess: true)).to eq(expected_response) + end + + it "allocates less memory than when running in main process" do + forked_stats = Benchmark::Malloc.new.run { scan.secrets_scan(large_blobs, subprocess: true) } + non_forked_stats = Benchmark::Malloc.new.run { scan.secrets_scan(large_blobs, subprocess: false) } + + max_processes = Gitlab::SecretDetection::Scan::MAX_PROCS_PER_REQUEST + + forked_memory = forked_stats.allocated.total_memory + non_forked_memory = non_forked_stats.allocated.total_memory + forked_obj_allocs = forked_stats.allocated.total_objects + non_forked_obj_allocs = non_forked_stats.allocated.total_objects + + expect(non_forked_memory).to be >= forked_memory * max_processes + expect(non_forked_obj_allocs).to be >= forked_obj_allocs * max_processes + end end end context "when configured with time out" do - let(:each_diff_timeout_secs) { 0.000_001 } # 1 micro-sec to intentionally timeout large diff + let(:each_blob_timeout_secs) { 0.000_001 } # 1 micro-sec to intentionally timeout large blob let(:large_data) do - ("\n+large data with a secret glpat-12312312312312312312" * 10_000_000).freeze # gitleaks:allow + ("large data with a secret glpat-12312312312312312312\n" * 10_000_000).freeze # gitleaks:allow end - let(:diffs) do + let(:blobs) do [ - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,2 @@\n+GR134894112312312312312312312\n", # gitleaks:allow - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,2 @@\n+data with no secret\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ) + new_blob(id: 111, data: "GR134894112312312312312312312"), # gitleaks:allow + new_blob(id: 333, data: "data with no secret"), + new_blob(id: 333, data: large_data) ] end - let(:all_large_diffs) do + let(:all_large_blobs) do [ - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ), - diff_blob.new( - left_blob_id: sha1_blank_sha, - right_blob_id: sample_blob_id, - patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n", - status: :STATUS_END_OF_PATCH, - binary: false, - over_patch_bytes_limit: false - ) + new_blob(id: 111, data: large_data), + new_blob(id: 222, data: large_data), + new_blob(id: 333, data: large_data) ] end it "whole secret detection scan operation times out" do - scan_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large diff + scan_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large blob expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::SCAN_TIMEOUT) begin - response = scan.secrets_scan(diffs, timeout: scan_timeout_secs) + response = scan.secrets_scan(blobs, timeout: scan_timeout_secs) expect(response).to eq(expected_response) rescue ArgumentError # When RSpec's main process terminates and attempts to clean up child processes upon completion, it terminates @@ -331,50 +264,50 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio end end - it "one of the diffs times out while others continue to get scanned" do + it "one of the blobs times out while others continue to get scanned" do expected_response = Gitlab::SecretDetection::Response.new( Gitlab::SecretDetection::Status::FOUND_WITH_ERRORS, [ Gitlab::SecretDetection::Finding.new( - diffs[0].right_blob_id, + blobs[0].id, Gitlab::SecretDetection::Status::FOUND, 1, ruleset['rules'][2]['id'], ruleset['rules'][2]['description'] ), Gitlab::SecretDetection::Finding.new( - diffs[2].right_blob_id, - Gitlab::SecretDetection::Status::DIFF_TIMEOUT + blobs[2].id, + Gitlab::SecretDetection::Status::BLOB_TIMEOUT ) ] ) - expect(scan.secrets_scan(diffs, diff_timeout: each_diff_timeout_secs)).to eq(expected_response) + expect(scan.secrets_scan(blobs, blob_timeout: each_blob_timeout_secs)).to eq(expected_response) end - it "all the diffs time out" do - # scan status changes to SCAN_TIMEOUT when *all* the diffs time out + it "all the blobs time out" do + # scan status changes to SCAN_TIMEOUT when *all* the blobs time out expected_scan_status = Gitlab::SecretDetection::Status::SCAN_TIMEOUT expected_response = Gitlab::SecretDetection::Response.new( expected_scan_status, [ Gitlab::SecretDetection::Finding.new( - all_large_diffs[0].right_blob_id, - Gitlab::SecretDetection::Status::DIFF_TIMEOUT + all_large_blobs[0].id, + Gitlab::SecretDetection::Status::BLOB_TIMEOUT ), Gitlab::SecretDetection::Finding.new( - all_large_diffs[1].right_blob_id, - Gitlab::SecretDetection::Status::DIFF_TIMEOUT + all_large_blobs[1].id, + Gitlab::SecretDetection::Status::BLOB_TIMEOUT ), Gitlab::SecretDetection::Finding.new( - all_large_diffs[2].right_blob_id, - Gitlab::SecretDetection::Status::DIFF_TIMEOUT + all_large_blobs[2].id, + Gitlab::SecretDetection::Status::BLOB_TIMEOUT ) ] ) - expect(scan.secrets_scan(all_large_diffs, diff_timeout: each_diff_timeout_secs)).to eq(expected_response) + expect(scan.secrets_scan(all_large_blobs, blob_timeout: each_blob_timeout_secs)).to eq(expected_response) end end end diff --git a/lib/api/api.rb b/lib/api/api.rb index 73ee3787825..d2e848550bd 100644 --- a/lib/api/api.rb +++ b/lib/api/api.rb @@ -353,6 +353,7 @@ module API mount ::API::Users mount ::API::UserCounts mount ::API::UserRunners + mount ::API::VirtualRegistries::Packages::Maven mount ::API::Wikis add_open_api_documentation! diff --git a/lib/api/concerns/virtual_registries/packages/endpoint.rb b/lib/api/concerns/virtual_registries/packages/endpoint.rb new file mode 100644 index 00000000000..8a58ac1a8d3 --- /dev/null +++ b/lib/api/concerns/virtual_registries/packages/endpoint.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +module API + module Concerns + module VirtualRegistries + module Packages + module Endpoint + extend ActiveSupport::Concern + + NO_BROWSER_EXECUTION_RESPONSE_HEADERS = { 'Content-Security-Policy' => "default-src 'none'" }.freeze + MAJOR_BROWSERS = %i[webkit firefox ie edge opera chrome].freeze + WEB_BROWSER_ERROR_MESSAGE = 'This endpoint is not meant to be accessed by a web browser.' + + TIMEOUTS = { + open: 10, + read: 10 + }.freeze + + RESPONSE_STATUSES = { + error: :bad_gateway, + timeout: :gateway_timeout + }.freeze + + included do + include ::API::Helpers::Authentication + + feature_category :virtual_registry + urgency :low + + helpers do + def require_non_web_browser! + browser = ::Browser.new(request.user_agent) + bad_request!(WEB_BROWSER_ERROR_MESSAGE) if MAJOR_BROWSERS.any? { |b| browser.method(:"#{b}?").call } + end + + def require_dependency_proxy_enabled! + not_found! unless ::Gitlab.config.dependency_proxy.enabled + end + + def send_successful_response_from(service_response:) + action, action_params = service_response.to_h.values_at(:action, :action_params) + case action + when :workhorse_send_url + workhorse_send_url(url: action_params[:url], headers: action_params[:headers]) + end + end + + def send_error_response_from!(service_response:) + case service_response.reason + when :unauthorized + unauthorized! + when :file_not_found_on_upstreams + not_found!(service_response.message) + else + bad_request!(service_response.message) + end + end + + def workhorse_send_url(url:, headers: {}) + send_workhorse_headers( + Gitlab::Workhorse.send_url( + url, + headers: headers, + allow_redirects: true, + timeouts: TIMEOUTS, + response_statuses: RESPONSE_STATUSES, + response_headers: NO_BROWSER_EXECUTION_RESPONSE_HEADERS + ) + ) + end + + def send_workhorse_headers(headers) + header(*headers) + env['api.format'] = :binary + content_type 'application/octet-stream' + status :ok + body '' + end + end + + after_validation do + not_found! unless Feature.enabled?(:virtual_registry_maven, current_user) + + require_non_web_browser! + require_dependency_proxy_enabled! + + authenticate! + end + end + end + end + end + end +end diff --git a/lib/api/virtual_registries/packages/maven.rb b/lib/api/virtual_registries/packages/maven.rb new file mode 100644 index 00000000000..3cd6b3be428 --- /dev/null +++ b/lib/api/virtual_registries/packages/maven.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +module API + module VirtualRegistries + module Packages + class Maven < ::API::Base + include ::API::Concerns::VirtualRegistries::Packages::Endpoint + + authenticate_with do |accept| + accept.token_types(:personal_access_token).sent_through(:http_private_token_header) + accept.token_types(:deploy_token).sent_through(:http_deploy_token_header) + accept.token_types(:job_token).sent_through(:http_job_token_header) + + accept.token_types( + :personal_access_token_with_username, + :deploy_token_with_username, + :job_token_with_username + ).sent_through(:http_basic_auth) + end + + helpers do + include ::Gitlab::Utils::StrongMemoize + + def registry + ::VirtualRegistries::Packages::Maven::Registry.find(declared_params[:id]) + end + strong_memoize_attr :registry + end + + desc 'Download endpoint of the Maven virtual registry.' do + detail 'This feature was introduced in GitLab 17.3. \ + This feature is currently in experiment state. \ + This feature behind the `virtual_registry_maven` feature flag.' + success [ + { code: 200 } + ] + failure [ + { code: 400, message: 'Bad request' }, + { code: 401, message: 'Unauthorized' }, + { code: 403, message: 'Forbidden' }, + { code: 404, message: 'Not Found' } + ] + tags %w[maven_virtual_registries] + hidden true + end + params do + requires :id, + type: Integer, + desc: 'The ID of the Maven virtual registry' + requires :path, + type: String, + file_path: true, + desc: 'Package path', + documentation: { example: 'foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar' } + end + get 'virtual_registries/packages/maven/:id/*path', format: false do + service_response = ::VirtualRegistries::Packages::Maven::HandleFileRequestService.new( + registry: registry, + current_user: current_user, + params: { path: declared_params[:path] } + ).execute + + send_error_response_from!(service_response: service_response) if service_response.error? + send_successful_response_from(service_response: service_response) + end + end + end + end +end diff --git a/lib/feature.rb b/lib/feature.rb index 15e84e21797..96caf357ad5 100644 --- a/lib/feature.rb +++ b/lib/feature.rb @@ -4,43 +4,24 @@ require 'flipper/adapters/active_record' require 'flipper/adapters/active_support_cache_store' module Feature - module BypassLoadBalancer - FLAG = 'FEATURE_FLAGS_BYPASS_LOAD_BALANCER' - class FlipperRecord < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord -- This class perfectly replaces - # Flipper::Adapters::ActiveRecord::Model, which inherits ActiveRecord::Base - include DatabaseReflection - self.abstract_class = true + class FlipperRecord < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord -- This class perfectly replaces + # Flipper::Adapters::ActiveRecord::Model, which inherits ActiveRecord::Base + include DatabaseReflection + self.abstract_class = true - # Bypass the load balancer by restoring the default behavior of `connection` - # before the load balancer patches ActiveRecord::Base - def self.connection - retrieve_connection - end - end - - class FlipperFeature < FlipperRecord - self.table_name = 'features' - end - - class FlipperGate < FlipperRecord - self.table_name = 'feature_gates' - end - - def self.enabled? - Gitlab::Utils.to_boolean(ENV[FLAG], default: false) + # Bypass the load balancer by restoring the default behavior of `connection` + # before the load balancer patches ActiveRecord::Base + def self.connection + retrieve_connection end end - # Classes to override flipper table names - class FlipperFeature < Flipper::Adapters::ActiveRecord::Feature - include DatabaseReflection + class FlipperFeature < FlipperRecord + self.table_name = 'features' + end - # Using `self.table_name` won't work. ActiveRecord bug? - superclass.table_name = 'features' - - def self.feature_names - pluck(:key) - end + class FlipperGate < FlipperRecord + self.table_name = 'feature_gates' end class OptOut @@ -53,10 +34,6 @@ module Feature end end - class FlipperGate < Flipper::Adapters::ActiveRecord::Gate - superclass.table_name = 'feature_gates' - end - # Generates the same flipper_id when in a request # If not in a request, it generates a unique flipper_id every time class FlipperRequest @@ -97,8 +74,7 @@ module Feature end def persisted_names - model = BypassLoadBalancer.enabled? ? BypassLoadBalancer::FlipperRecord : ApplicationRecord - return [] unless model.database.exists? + return [] unless FlipperRecord.database.exists? # This loads names of all stored feature flags # and returns a stable Set in the following order: @@ -344,8 +320,7 @@ module Feature # During setup the database does not exist yet. So we haven't stored a value # for the feature yet and return the default. - model = BypassLoadBalancer.enabled? ? BypassLoadBalancer::FlipperRecord : ApplicationRecord - return unless model.database.exists? + return unless FlipperRecord.database.exists? flag_stack = ::Thread.current[:feature_flag_recursion_check] || [] Thread.current[:feature_flag_recursion_check] = flag_stack @@ -379,15 +354,9 @@ module Feature end def build_flipper_instance(memoize: false) - active_record_adapter = if BypassLoadBalancer.enabled? - Flipper::Adapters::ActiveRecord.new( - feature_class: BypassLoadBalancer::FlipperFeature, - gate_class: BypassLoadBalancer::FlipperGate) - else - Flipper::Adapters::ActiveRecord.new( - feature_class: FlipperFeature, - gate_class: FlipperGate) - end + active_record_adapter = Flipper::Adapters::ActiveRecord.new( + feature_class: FlipperFeature, + gate_class: FlipperGate) # Redis L2 cache redis_cache_adapter = ActiveSupportCacheStoreAdapter.new( diff --git a/lib/gitlab/database/load_balancing/host.rb b/lib/gitlab/database/load_balancing/host.rb index 0d1baf5ebe0..5e4ad301370 100644 --- a/lib/gitlab/database/load_balancing/host.rb +++ b/lib/gitlab/database/load_balancing/host.rb @@ -279,11 +279,11 @@ module Gitlab end def ignore_replication_lag_time? - Feature::BypassLoadBalancer.enabled? && Feature.enabled?(:load_balancer_ignore_replication_lag_time, type: :ops) + Feature.enabled?(:load_balancer_ignore_replication_lag_time, type: :ops) end def double_replication_lag_time? - Feature::BypassLoadBalancer.enabled? && Feature.enabled?(:load_balancer_double_replication_lag_time, type: :ops) + Feature.enabled?(:load_balancer_double_replication_lag_time, type: :ops) end end end diff --git a/lib/gitlab/database_importers/work_items/base_type_importer.rb b/lib/gitlab/database_importers/work_items/base_type_importer.rb index ce2e845a449..451d97ba951 100644 --- a/lib/gitlab/database_importers/work_items/base_type_importer.rb +++ b/lib/gitlab/database_importers/work_items/base_type_importer.rb @@ -97,6 +97,7 @@ module Gitlab :crm_contacts, :current_user_todos, :description, + :development, :hierarchy, :iteration, :labels, diff --git a/lib/web_ide/extensions_marketplace.rb b/lib/web_ide/extensions_marketplace.rb index c407c5c8867..94a6797572f 100644 --- a/lib/web_ide/extensions_marketplace.rb +++ b/lib/web_ide/extensions_marketplace.rb @@ -53,13 +53,18 @@ module WebIde result = { enabled: false, reason: disabled_reason, help_url: help_url } - if disabled_reason == :opt_in_unset || disabled_reason == :opt_in_disabled - result[:user_preferences_url] = user_preferences_url - end - - result + result.merge(gallery_disabled_extra_attributes(disabled_reason: disabled_reason, user: user)) end + # rubocop:disable Lint/UnusedMethodArgument -- `user:` param is used in EE + def self.gallery_disabled_extra_attributes(disabled_reason:, user:) + return { user_preferences_url: user_preferences_url } if disabled_reason == :opt_in_unset + return { user_preferences_url: user_preferences_url } if disabled_reason == :opt_in_disabled + + {} + end + # rubocop:enable Lint/UnusedMethodArgument + def self.help_url ::Gitlab::Routing.url_helpers.help_page_url('user/project/web_ide/index', anchor: 'extension-marketplace') end @@ -72,3 +77,5 @@ module WebIde private_class_method :help_url, :user_preferences_url end end + +WebIde::ExtensionsMarketplace.prepend_mod diff --git a/lib/web_ide/settings/extensions_gallery_metadata_generator.rb b/lib/web_ide/settings/extensions_gallery_metadata_generator.rb index 57cbd28d634..19fee10c6e2 100644 --- a/lib/web_ide/settings/extensions_gallery_metadata_generator.rb +++ b/lib/web_ide/settings/extensions_gallery_metadata_generator.rb @@ -9,14 +9,14 @@ module WebIde # the "gitlab-web-ide" and "gitlab-web-ide-vscode-fork" projects # (https://gitlab.com/gitlab-org/gitlab-web-ide & https://gitlab.com/gitlab-org/gitlab-web-ide-vscode-fork), # so we must ensure that any changes made here are also reflected in those projects. - DISABLED_REASONS = - %i[ - no_user - no_flag - instance_disabled - opt_in_unset - opt_in_disabled - ].to_h { |reason| [reason, reason] }.freeze + # Please also see EE_DISABLED_REASONS in the relevant EE module. + DISABLED_REASONS = %i[ + no_user + no_flag + instance_disabled + opt_in_unset + opt_in_disabled + ].to_h { |reason| [reason, reason] }.freeze # @param [Hash] context # @return [Hash] @@ -31,7 +31,7 @@ module WebIde extensions_marketplace_feature_flag_enabled } - extensions_gallery_metadata = metadata_for_user( + extensions_gallery_metadata = build_metadata( user: user, flag_enabled: extensions_marketplace_feature_flag_enabled ) @@ -43,11 +43,23 @@ module WebIde # @param [User, nil] user # @param [Boolean, nil] flag_enabled # @return [Hash] - def self.metadata_for_user(user:, flag_enabled:) + def self.build_metadata(user:, flag_enabled:) return metadata_disabled(:no_user) unless user return metadata_disabled(:no_flag) if flag_enabled.nil? return metadata_disabled(:instance_disabled) unless flag_enabled + build_metadata_for_user(user) + end + + def self.disabled_reasons + DISABLED_REASONS + end + + # note: This is overridden in EE + # + # @param [User] user + # @return [Hash] + def self.build_metadata_for_user(user) # noinspection RubyNilAnalysis -- RubyMine doesn't realize user can't be nil because of guard clause above opt_in_status = user.extensions_marketplace_opt_in_status.to_sym @@ -73,10 +85,13 @@ module WebIde # @param [symbol] reason # @return [Hash] def self.metadata_disabled(reason) - { enabled: false, disabled_reason: DISABLED_REASONS.fetch(reason) } + { enabled: false, disabled_reason: disabled_reasons.fetch(reason) } end - private_class_method :metadata_for_user, :metadata_enabled, :metadata_disabled + private_class_method :build_metadata, :build_metadata_for_user, :disabled_reasons, :metadata_enabled, + :metadata_disabled end end end + +WebIde::Settings::ExtensionsGalleryMetadataGenerator.prepend_mod diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 8417bf5aa54..54db409ae4f 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -867,6 +867,9 @@ msgstr "" msgid "%{group_name}&%{epic_iid} · created %{epic_created} by %{author}" msgstr "" +msgid "%{host} connection failed: %{error}." +msgstr "" + msgid "%{host} could not be reached. %{cta}" msgstr "" @@ -27595,6 +27598,9 @@ msgstr "" msgid "Import|You can import a Subversion repository by using third-party tools. %{svn_link}." msgstr "" +msgid "Import|You do not have permission to view import source users for this namespace" +msgstr "" + msgid "Import|You have insufficient permissions to update the import source user" msgstr "" diff --git a/spec/factories/import_source_users.rb b/spec/factories/import_source_users.rb index 079b661cd75..2943d1a8355 100644 --- a/spec/factories/import_source_users.rb +++ b/spec/factories/import_source_users.rb @@ -31,6 +31,10 @@ FactoryBot.define do status { 2 } end + trait :rejected do + status { 3 } + end + trait :completed do with_reassign_to_user status { 5 } diff --git a/spec/factories/virtual_registries/packages/maven/registries.rb b/spec/factories/virtual_registries/packages/maven/registries.rb index cdfa5d1b012..524cc076155 100644 --- a/spec/factories/virtual_registries/packages/maven/registries.rb +++ b/spec/factories/virtual_registries/packages/maven/registries.rb @@ -4,5 +4,9 @@ FactoryBot.define do factory :virtual_registries_packages_maven_registry, class: 'VirtualRegistries::Packages::Maven::Registry' do group cache_validity_hours { 1 } + + trait :with_upstream do + upstream { association(:virtual_registries_packages_maven_upstream, group: group) } + end end end diff --git a/spec/factories/virtual_registries/packages/maven/upstreams.rb b/spec/factories/virtual_registries/packages/maven/upstreams.rb index bb43effa890..16599344ea4 100644 --- a/spec/factories/virtual_registries/packages/maven/upstreams.rb +++ b/spec/factories/virtual_registries/packages/maven/upstreams.rb @@ -2,7 +2,7 @@ FactoryBot.define do factory :virtual_registries_packages_maven_upstream, class: 'VirtualRegistries::Packages::Maven::Upstream' do - url { 'http://local.test/maven' } + sequence(:url) { |n| "http://local.test/maven/#{n}" } username { 'user' } password { 'password' } registry { association(:virtual_registries_packages_maven_registry) } diff --git a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb index c668bae4c77..70364097b27 100644 --- a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb +++ b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb @@ -20,8 +20,9 @@ RSpec.describe 'Merge request > User toggles whitespace changes', :js, feature_c end describe 'clicking "Hide whitespace changes" button' do - it 'toggles the "Hide whitespace changes" button', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333793' do + it 'toggles the "Hide whitespace changes" button' do find_by_testid('show-whitespace').click + wait_for_requests visit diffs_project_merge_request_path(project, merge_request) diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb index fad586527fe..acf39fbbe53 100644 --- a/spec/lib/feature_spec.rb +++ b/spec/lib/feature_spec.rb @@ -8,1173 +8,1166 @@ RSpec.describe Feature, :clean_gitlab_redis_feature_flag, stub_feature_flags: fa # Pick a long-lasting real feature flag to test that we can check feature flags in the load balancer let(:load_balancer_test_feature_flag) { :require_email_verification } - where(:bypass_load_balancer) { [true, false] } - - with_them do - def wrap_all_methods_with_flag_check(lb, flag) - lb.methods(false).each do |meth| - allow(lb).to receive(meth).and_wrap_original do |m, *args, **kwargs, &block| - Feature.enabled?(flag) - m.call(*args, **kwargs, &block) - end + def wrap_all_methods_with_flag_check(lb, flag) + lb.methods(false).each do |meth| + allow(lb).to receive(meth).and_wrap_original do |m, *args, **kwargs, &block| + Feature.enabled?(flag) + m.call(*args, **kwargs, &block) end end - before do - if bypass_load_balancer - stub_env(Feature::BypassLoadBalancer::FLAG, 'true') - wrap_all_methods_with_flag_check(ApplicationRecord.load_balancer, load_balancer_test_feature_flag) - end + end + before do + wrap_all_methods_with_flag_check(ApplicationRecord.load_balancer, load_balancer_test_feature_flag) - # reset Flipper AR-engine - Feature.reset + # reset Flipper AR-engine + Feature.reset # rubocop:disable RSpec/DescribedClass -- in a nested group described_class becomes Feature::Target + end + + describe '.current_request' do + it 'returns a FlipperRequest with a flipper_id' do + flipper_request = described_class.current_request + + expect(flipper_request.flipper_id).to include("FlipperRequest:") end - describe '.current_request' do - it 'returns a FlipperRequest with a flipper_id' do - flipper_request = described_class.current_request + context 'when request store is inactive' do + it 'does not cache flipper_id' do + previous_id = described_class.current_request.flipper_id - expect(flipper_request.flipper_id).to include("FlipperRequest:") - end - - context 'when request store is inactive' do - it 'does not cache flipper_id' do - previous_id = described_class.current_request.flipper_id - - expect(described_class.current_request.flipper_id).not_to eq(previous_id) - end - end - - context 'when request store is active', :request_store do - it 'caches flipper_id when request store is active' do - previous_id = described_class.current_request.flipper_id - - expect(described_class.current_request.flipper_id).to eq(previous_id) - end - - it 'returns a new flipper_id when request ends' do - previous_id = described_class.current_request.flipper_id - - RequestStore.end! - - expect(described_class.current_request.flipper_id).not_to eq(previous_id) - end + expect(described_class.current_request.flipper_id).not_to eq(previous_id) end end - describe '.gitlab_instance' do - it 'returns a FlipperGitlabInstance with a flipper_id' do - flipper_request = described_class.gitlab_instance + context 'when request store is active', :request_store do + it 'caches flipper_id when request store is active' do + previous_id = described_class.current_request.flipper_id - expect(flipper_request.flipper_id).to include("FlipperGitlabInstance:") + expect(described_class.current_request.flipper_id).to eq(previous_id) end - it 'caches flipper_id' do - previous_id = described_class.gitlab_instance.flipper_id + it 'returns a new flipper_id when request ends' do + previous_id = described_class.current_request.flipper_id - expect(described_class.gitlab_instance.flipper_id).to eq(previous_id) + RequestStore.end! + + expect(described_class.current_request.flipper_id).not_to eq(previous_id) end end + end - describe '.get' do - let(:feature) { double(:feature) } - let(:key) { 'my_feature' } + describe '.gitlab_instance' do + it 'returns a FlipperGitlabInstance with a flipper_id' do + flipper_request = described_class.gitlab_instance - it 'returns the Flipper feature' do - expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key) - .and_return(feature) - - expect(described_class.get(key)).to eq(feature) - end + expect(flipper_request.flipper_id).to include("FlipperGitlabInstance:") end - describe '.persisted_names' do - it 'returns the names of the persisted features' do - Feature.enable('foo') + it 'caches flipper_id' do + previous_id = described_class.gitlab_instance.flipper_id + expect(described_class.gitlab_instance.flipper_id).to eq(previous_id) + end + end + + describe '.get' do + let(:feature) { double(:feature) } + let(:key) { 'my_feature' } + + it 'returns the Flipper feature' do + expect_any_instance_of(Flipper::DSL).to receive(:feature).with(key) + .and_return(feature) + + expect(described_class.get(key)).to eq(feature) + end + end + + describe '.persisted_names' do + it 'returns the names of the persisted features' do + described_class.enable('foo') + + expect(described_class.persisted_names).to contain_exactly('foo') + end + + it 'returns an empty Array when no features are presisted' do + expect(described_class.persisted_names).to be_empty + end + + it 'caches the feature names when request store is active', + :request_store, :use_clean_rails_memory_store_caching do + described_class.enable('foo') + + expect(Gitlab::ProcessMemoryCache.cache_backend) + .to receive(:fetch) + .once + .with('flipper/v1/features', { expires_in: 1.minute }) + .and_call_original + + 2.times do expect(described_class.persisted_names).to contain_exactly('foo') end + end - it 'returns an empty Array when no features are presisted' do - expect(described_class.persisted_names).to be_empty + it 'fetches all flags once in a single query', :request_store do + described_class.enable('foo1') + described_class.enable('foo2') + + queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do + expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2') + + RequestStore.clear! + + expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2') end - it 'caches the feature names when request store is active', - :request_store, :use_clean_rails_memory_store_caching do - Feature.enable('foo') + expect(queries.count).to eq(1) + end + end - expect(Gitlab::ProcessMemoryCache.cache_backend) - .to receive(:fetch) - .once - .with('flipper/v1/features', { expires_in: 1.minute }) - .and_call_original + describe '.persisted_name?' do + context 'when the feature is persisted' do + it 'returns true when feature name is a string' do + described_class.enable('foo') + + expect(described_class.persisted_name?('foo')).to eq(true) + end + + it 'returns true when feature name is a symbol' do + described_class.enable('foo') + + expect(described_class.persisted_name?(:foo)).to eq(true) + end + end + + context 'when the feature is not persisted' do + it 'returns false when feature name is a string' do + expect(described_class.persisted_name?('foo')).to eq(false) + end + + it 'returns false when feature name is a symbol' do + expect(described_class.persisted_name?(:bar)).to eq(false) + end + end + end + + describe '.all' do + let(:features) { Set.new } + + it 'returns the Flipper features as an array' do + expect_any_instance_of(Flipper::DSL).to receive(:features) + .and_return(features) + + expect(described_class.all).to eq(features.to_a) + end + end + + describe '.flipper' do + context 'when request store is inactive' do + it 'memoizes the Flipper instance but does not not enable Flipper memoization' do + expect(Flipper).to receive(:new).once.and_call_original 2.times do - expect(described_class.persisted_names).to contain_exactly('foo') - end - end - - it 'fetches all flags once in a single query', :request_store do - Feature.enable('foo1') - Feature.enable('foo2') - - queries = ActiveRecord::QueryRecorder.new(skip_cached: false) do - expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2') - - RequestStore.clear! - - expect(described_class.persisted_names).to contain_exactly('foo1', 'foo2') - end - - expect(queries.count).to eq(1) - end - end - - describe '.persisted_name?' do - context 'when the feature is persisted' do - it 'returns true when feature name is a string' do - Feature.enable('foo') - - expect(described_class.persisted_name?('foo')).to eq(true) - end - - it 'returns true when feature name is a symbol' do - Feature.enable('foo') - - expect(described_class.persisted_name?(:foo)).to eq(true) - end - end - - context 'when the feature is not persisted' do - it 'returns false when feature name is a string' do - expect(described_class.persisted_name?('foo')).to eq(false) - end - - it 'returns false when feature name is a symbol' do - expect(described_class.persisted_name?(:bar)).to eq(false) - end - end - end - - describe '.all' do - let(:features) { Set.new } - - it 'returns the Flipper features as an array' do - expect_any_instance_of(Flipper::DSL).to receive(:features) - .and_return(features) - - expect(described_class.all).to eq(features.to_a) - end - end - - describe '.flipper' do - context 'when request store is inactive' do - it 'memoizes the Flipper instance but does not not enable Flipper memoization' do - expect(Flipper).to receive(:new).once.and_call_original - - 2.times do - described_class.flipper - end - - expect(described_class.flipper.adapter.memoizing?).to eq(false) - end - end - - context 'when request store is active', :request_store do - it 'memoizes the Flipper instance' do - expect(Flipper).to receive(:new).once.and_call_original - described_class.flipper - described_class.instance_variable_set(:@flipper, nil) - described_class.flipper - - expect(described_class.flipper.adapter.memoizing?).to eq(true) end + + expect(described_class.flipper.adapter.memoizing?).to eq(false) end end - describe '.enabled?' do + context 'when request store is active', :request_store do + it 'memoizes the Flipper instance' do + expect(Flipper).to receive(:new).once.and_call_original + + described_class.flipper + described_class.instance_variable_set(:@flipper, nil) + described_class.flipper + + expect(described_class.flipper.adapter.memoizing?).to eq(true) + end + end + end + + describe '.enabled?' do + before do + allow(described_class).to receive(:log_feature_flag_states?).and_return(false) + + stub_feature_flag_definition(:disabled_feature_flag) + stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true) + end + + context 'when using redis cache', :use_clean_rails_redis_caching do + it 'does not make recursive feature-flag calls' do + expect(described_class).to receive(:enabled?).once.and_call_original + described_class.enabled?(:disabled_feature_flag) + end + end + + context 'when self-recursive' do before do - allow(Feature).to receive(:log_feature_flag_states?).and_return(false) - - stub_feature_flag_definition(:disabled_feature_flag) - stub_feature_flag_definition(:enabled_feature_flag, default_enabled: true) - end - - context 'when using redis cache', :use_clean_rails_redis_caching do - it 'does not make recursive feature-flag calls' do - expect(described_class).to receive(:enabled?).once.and_call_original - described_class.enabled?(:disabled_feature_flag) - end - end - - context 'when self-recursive' do - before do - allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block| - original.call(name) do |ff| - Feature.enabled?(name) - block.call(ff) - end + allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block| + original.call(name) do |ff| + Feature.enabled?(name) + block.call(ff) end end - - it 'returns the default value' do - expect(described_class.enabled?(:enabled_feature_flag)).to eq true - end - - it 'detects self recursion' do - expect(Gitlab::ErrorTracking) - .to receive(:track_exception) - .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] }) - - described_class.enabled?(:enabled_feature_flag) - end end - context 'when deeply recursive' do - before do - allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block| - original.call(name) do |ff| - Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true) - block.call(ff) - end + it 'returns the default value' do + expect(described_class.enabled?(:enabled_feature_flag)).to eq true + end + + it 'detects self recursion' do + expect(Gitlab::ErrorTracking) + .to receive(:track_exception) + .with(have_attributes(message: 'self recursion'), { stack: [:enabled_feature_flag] }) + + described_class.enabled?(:enabled_feature_flag) + end + end + + context 'when deeply recursive' do + before do + allow(Feature).to receive(:with_feature).and_wrap_original do |original, name, &block| + original.call(name) do |ff| + Feature.enabled?(:"deeper_#{name}", type: :undefined, default_enabled_if_undefined: true) + block.call(ff) end end - - it 'detects deep recursion' do - expect(Gitlab::ErrorTracking) - .to receive(:track_exception) - .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10)) - - described_class.enabled?(:enabled_feature_flag) - end end - it 'returns false (and tracks / raises exception for dev) for undefined feature' do - expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + it 'detects deep recursion' do + expect(Gitlab::ErrorTracking) + .to receive(:track_exception) + .with(have_attributes(message: 'deep recursion'), stack: have_attributes(size: be > 10)) - expect(described_class.enabled?(:some_random_feature_fla, type: :undefined)).to be_falsey + described_class.enabled?(:enabled_feature_flag) end + end - it 'returns false for undefined feature with default_enabled_if_undefined: false' do - expect(described_class.enabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: false)).to be_falsey + it 'returns false (and tracks / raises exception for dev) for undefined feature' do + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + + expect(described_class.enabled?(:some_random_feature_fla, type: :undefined)).to be_falsey + end + + it 'returns false for undefined feature with default_enabled_if_undefined: false' do + expect(described_class.enabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: false)).to be_falsey + end + + it 'returns true for undefined feature with default_enabled_if_undefined: true' do + expect(described_class.enabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: true)).to be_truthy + end + + it 'returns false for existing disabled feature in the database' do + described_class.disable(:disabled_feature_flag) + + expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey + end + + it 'returns true for existing enabled feature in the database' do + described_class.enable(:enabled_feature_flag) + + expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy + end + + it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) } + it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) } + + it 'caches the status in L1 and L2 caches', + :request_store, :use_clean_rails_memory_store_caching do + described_class.enable(:disabled_feature_flag) + flipper_key = "flipper/v1/feature/disabled_feature_flag" + + expect(described_class.send(:l2_cache_backend)) + .to receive(:fetch) + .once + .with(flipper_key, { expires_in: 1.hour }) + .and_call_original + + expect(described_class.send(:l1_cache_backend)) + .to receive(:fetch) + .once + .with(flipper_key, { expires_in: 1.minute }) + .and_call_original + + 2.times do + expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy end + end - it 'returns true for undefined feature with default_enabled_if_undefined: true' do - expect(described_class.enabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: true)).to be_truthy - end + it 'returns the default value when the database does not exist' do + fake_default = double('fake default') - it 'returns false for existing disabled feature in the database' do - described_class.disable(:disabled_feature_flag) + base_class = Feature::FlipperRecord + expect(base_class).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" } - expect(described_class.enabled?(:disabled_feature_flag)).to be_falsey - end + expect(described_class.enabled?(:a_feature, type: :undefined, default_enabled_if_undefined: fake_default)).to eq(fake_default) + end - it 'returns true for existing enabled feature in the database' do + context 'logging is enabled', :request_store do + before do + allow(described_class).to receive(:log_feature_flag_states?).and_call_original + + stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true) + + described_class.enable(:feature_flag_state_logs) described_class.enable(:enabled_feature_flag) - - expect(described_class.enabled?(:enabled_feature_flag)).to be_truthy + described_class.enabled?(:enabled_feature_flag) end - it { expect(described_class.send(:l1_cache_backend)).to eq(Gitlab::ProcessMemoryCache.cache_backend) } - it { expect(described_class.send(:l2_cache_backend)).to eq(Gitlab::Redis::FeatureFlag.cache_store) } + it 'does not log feature_flag_state_logs' do + expect(described_class.logged_states).not_to have_key("feature_flag_state_logs") + end - it 'caches the status in L1 and L2 caches', - :request_store, :use_clean_rails_memory_store_caching do - described_class.enable(:disabled_feature_flag) - flipper_key = "flipper/v1/feature/disabled_feature_flag" + it 'logs other feature flags' do + expect(described_class.logged_states).to have_key(:enabled_feature_flag) + expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy + end + end - expect(described_class.send(:l2_cache_backend)) - .to receive(:fetch) - .once - .with(flipper_key, { expires_in: 1.hour }) - .and_call_original + context 'cached feature flag', :request_store do + before do + described_class.send(:flipper).memoize = false + described_class.enabled?(:disabled_feature_flag) + end - expect(described_class.send(:l1_cache_backend)) - .to receive(:fetch) - .once - .with(flipper_key, { expires_in: 1.minute }) - .and_call_original - - 2.times do + it 'caches the status in L1 cache for the first minute' do + expect do + expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original + expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch) expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy - end + end.not_to exceed_query_limit(0) end - it 'returns the default value when the database does not exist' do - fake_default = double('fake default') - - base_class = Feature::BypassLoadBalancer.enabled? ? Feature::BypassLoadBalancer::FlipperRecord : ActiveRecord::Base - expect(base_class).to receive(:connection) { raise ActiveRecord::NoDatabaseError, "No database" } - - expect(described_class.enabled?(:a_feature, type: :undefined, default_enabled_if_undefined: fake_default)).to eq(fake_default) - end - - context 'logging is enabled', :request_store do - before do - allow(Feature).to receive(:log_feature_flag_states?).and_call_original - - stub_feature_flag_definition(:enabled_feature_flag, log_state_changes: true) - - described_class.enable(:feature_flag_state_logs) - described_class.enable(:enabled_feature_flag) - described_class.enabled?(:enabled_feature_flag) - end - - it 'does not log feature_flag_state_logs' do - expect(described_class.logged_states).not_to have_key("feature_flag_state_logs") - end - - it 'logs other feature flags' do - expect(described_class.logged_states).to have_key(:enabled_feature_flag) - expect(described_class.logged_states[:enabled_feature_flag]).to be_truthy - end - end - - context 'cached feature flag', :request_store do - before do - described_class.send(:flipper).memoize = false - described_class.enabled?(:disabled_feature_flag) - end - - it 'caches the status in L1 cache for the first minute' do + it 'caches the status in L2 cache after 2 minutes' do + travel_to 2.minutes.from_now do expect do expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original - expect(described_class.send(:l2_cache_backend)).not_to receive(:fetch) + expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy end.not_to exceed_query_limit(0) end - - it 'caches the status in L2 cache after 2 minutes' do - travel_to 2.minutes.from_now do - expect do - expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original - expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original - expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy - end.not_to exceed_query_limit(0) - end - end - - it 'fetches the status after an hour' do - travel_to 61.minutes.from_now do - expect do - expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original - expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original - expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy - end.not_to exceed_query_limit(1) - end - end end - [:current_request, :request, described_class.current_request].each do |thing| - context "with #{thing} actor" do - context 'when request store is inactive' do - it 'returns the approximate percentage set' do - number_of_times = 1_000 - percentage = 50 - described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage) + it 'fetches the status after an hour' do + travel_to 61.minutes.from_now do + expect do + expect(described_class.send(:l1_cache_backend)).to receive(:fetch).once.and_call_original + expect(described_class.send(:l2_cache_backend)).to receive(:fetch).once.and_call_original + expect(described_class.enabled?(:disabled_feature_flag)).to be_truthy + end.not_to exceed_query_limit(1) + end + end + end - gate_values = Array.new(number_of_times) do - described_class.enabled?(:enabled_feature_flag, thing) - end + [:current_request, :request, described_class.current_request].each do |thing| + context "with #{thing} actor" do + context 'when request store is inactive' do + it 'returns the approximate percentage set' do + number_of_times = 1_000 + percentage = 50 + described_class.enable_percentage_of_actors(:enabled_feature_flag, percentage) - margin_of_error = 0.05 * number_of_times - expected_size = number_of_times * percentage / 100 - expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size) + gate_values = Array.new(number_of_times) do + described_class.enabled?(:enabled_feature_flag, thing) end + + margin_of_error = 0.05 * number_of_times + expected_size = number_of_times * percentage / 100 + expect(gate_values.count { |v| v }).to be_within(margin_of_error).of(expected_size) end + end - context 'when request store is active', :request_store do - it 'always returns the same gate value' do - described_class.enable_percentage_of_actors(:enabled_feature_flag, 50) + context 'when request store is active', :request_store do + it 'always returns the same gate value' do + described_class.enable_percentage_of_actors(:enabled_feature_flag, 50) - previous_gate_value = described_class.enabled?(:enabled_feature_flag, thing) + previous_gate_value = described_class.enabled?(:enabled_feature_flag, thing) - 1_000.times do - expect(described_class.enabled?(:enabled_feature_flag, thing)).to eq(previous_gate_value) - end + 1_000.times do + expect(described_class.enabled?(:enabled_feature_flag, thing)).to eq(previous_gate_value) end end end end + end - context 'with gitlab_instance actor' do - it 'always returns the same gate value' do - described_class.enable(:enabled_feature_flag, described_class.gitlab_instance) + context 'with gitlab_instance actor' do + it 'always returns the same gate value' do + described_class.enable(:enabled_feature_flag, described_class.gitlab_instance) - expect(described_class.enabled?(:enabled_feature_flag, described_class.gitlab_instance)).to be_truthy + expect(described_class.enabled?(:enabled_feature_flag, described_class.gitlab_instance)).to be_truthy + end + end + + context 'with :instance actor' do + it 'always returns the same gate value' do + described_class.enable(:enabled_feature_flag, :instance) + + expect(described_class.enabled?(:enabled_feature_flag, :instance)).to be_truthy + end + end + + context 'with a group member' do + let(:key) { :awesome_feature } + let(:guinea_pigs) { create_list(:user, 3) } + + before do + described_class.reset + stub_feature_flag_definition(key) + Flipper.unregister_groups + Flipper.register(:guinea_pigs) do |actor| + guinea_pigs.include?(actor.thing) + end + described_class.enable(key, described_class.group(:guinea_pigs)) + end + + it 'is true for all group members' do + expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy + expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy + expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy + end + + it 'is false for any other actor' do + expect(described_class.enabled?(key, create(:user))).to be_falsey + end + end + + context 'with an individual actor' do + let(:actor) { stub_feature_flag_gate('CustomActor:5') } + let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } + + before do + described_class.enable(:enabled_feature_flag, actor) + end + + it 'returns true when same actor is informed' do + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy + end + + it 'returns false when different actor is informed' do + expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey + end + + it 'returns false when no actor is informed' do + expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey + end + end + + context 'with invalid actor' do + let(:actor) { double('invalid actor') } + + context 'when is dev_or_test_env' do + it 'does raise exception' do + expect { described_class.enabled?(:enabled_feature_flag, actor) } + .to raise_error(/needs to include `FeatureGate` or implement `flipper_id`/) + end + end + end + + context 'validates usage of feature flag with YAML definition' do + let(:definition) do + Feature::Definition.new('development/my_feature_flag.yml', + name: 'my_feature_flag', + type: 'development', + default_enabled: default_enabled + ).tap(&:validate!) + end + + let(:default_enabled) { false } + + before do + stub_env('LAZILY_CREATE_FEATURE_FLAG', '0') + lb_ff_definition = Feature::Definition.get(load_balancer_test_feature_flag) + allow(Feature::Definition).to receive(:valid_usage!).and_call_original + allow(Feature::Definition).to receive(:definitions) do + { definition.key => definition, lb_ff_definition.key => lb_ff_definition } end end - context 'with :instance actor' do - it 'always returns the same gate value' do - described_class.enable(:enabled_feature_flag, :instance) + it 'when usage is correct' do + expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error + end - expect(described_class.enabled?(:enabled_feature_flag, :instance)).to be_truthy + it 'when invalid type is used' do + expect { described_class.enabled?(:my_feature_flag, type: :ops) } + .to raise_error(/The given `type: :ops`/) + end + + context 'when default_enabled: is false in the YAML definition' do + it 'reads the default from the YAML definition' do + expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled) end end - context 'with a group member' do - let(:key) { :awesome_feature } - let(:guinea_pigs) { create_list(:user, 3) } + context 'when default_enabled: is true in the YAML definition' do + let(:default_enabled) { true } - before do - described_class.reset - stub_feature_flag_definition(key) - Flipper.unregister_groups - Flipper.register(:guinea_pigs) do |actor| - guinea_pigs.include?(actor.thing) + it 'reads the default from the YAML definition' do + expect(described_class.enabled?(:my_feature_flag)).to eq(true) + end + + context 'and feature has been disabled' do + before do + described_class.disable(:my_feature_flag) end - described_class.enable(key, described_class.group(:guinea_pigs)) - end - it 'is true for all group members' do - expect(described_class.enabled?(key, guinea_pigs[0])).to be_truthy - expect(described_class.enabled?(key, guinea_pigs[1])).to be_truthy - expect(described_class.enabled?(key, guinea_pigs[2])).to be_truthy - end - - it 'is false for any other actor' do - expect(described_class.enabled?(key, create(:user))).to be_falsey - end - end - - context 'with an individual actor' do - let(:actor) { stub_feature_flag_gate('CustomActor:5') } - let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } - - before do - described_class.enable(:enabled_feature_flag, actor) - end - - it 'returns true when same actor is informed' do - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be_truthy - end - - it 'returns false when different actor is informed' do - expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be_falsey - end - - it 'returns false when no actor is informed' do - expect(described_class.enabled?(:enabled_feature_flag)).to be_falsey - end - end - - context 'with invalid actor' do - let(:actor) { double('invalid actor') } - - context 'when is dev_or_test_env' do - it 'does raise exception' do - expect { described_class.enabled?(:enabled_feature_flag, actor) } - .to raise_error(/needs to include `FeatureGate` or implement `flipper_id`/) - end - end - end - - context 'validates usage of feature flag with YAML definition' do - let(:definition) do - Feature::Definition.new('development/my_feature_flag.yml', - name: 'my_feature_flag', - type: 'development', - default_enabled: default_enabled - ).tap(&:validate!) - end - - let(:default_enabled) { false } - - before do - stub_env('LAZILY_CREATE_FEATURE_FLAG', '0') - lb_ff_definition = Feature::Definition.get(load_balancer_test_feature_flag) - allow(Feature::Definition).to receive(:valid_usage!).and_call_original - allow(Feature::Definition).to receive(:definitions) do - { definition.key => definition, lb_ff_definition.key => lb_ff_definition } + it 'is not enabled' do + expect(described_class.enabled?(:my_feature_flag)).to eq(false) end end - it 'when usage is correct' do - expect { described_class.enabled?(:my_feature_flag) }.not_to raise_error - end - - it 'when invalid type is used' do - expect { described_class.enabled?(:my_feature_flag, type: :ops) } - .to raise_error(/The given `type: :ops`/) - end - - context 'when default_enabled: is false in the YAML definition' do - it 'reads the default from the YAML definition' do - expect(described_class.enabled?(:my_feature_flag)).to eq(default_enabled) + context 'with a cached value and the YAML definition is changed thereafter' do + before do + described_class.enabled?(:my_feature_flag) end - end - context 'when default_enabled: is true in the YAML definition' do - let(:default_enabled) { true } + it 'reads new default value' do + allow(definition).to receive(:default_enabled).and_return(true) - it 'reads the default from the YAML definition' do expect(described_class.enabled?(:my_feature_flag)).to eq(true) end + end - context 'and feature has been disabled' do - before do - described_class.disable(:my_feature_flag) - end + context 'when YAML definition does not exist for an optional type' do + let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first } - it 'is not enabled' do - expect(described_class.enabled?(:my_feature_flag)).to eq(false) + context 'when in dev or test environment' do + it 'raises an error for dev' do + expect { described_class.enabled?(:non_existent_flag, type: optional_type) } + .to raise_error( + Feature::InvalidFeatureFlagError, + "The feature flag YAML definition for 'non_existent_flag' does not exist") end end - context 'with a cached value and the YAML definition is changed thereafter' do + context 'when in production' do before do - described_class.enabled?(:my_feature_flag) + allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false) end - it 'reads new default value' do - allow(definition).to receive(:default_enabled).and_return(true) - - expect(described_class.enabled?(:my_feature_flag)).to eq(true) - end - end - - context 'when YAML definition does not exist for an optional type' do - let(:optional_type) { described_class::Shared::TYPES.find { |name, attrs| attrs[:optional] }.first } - - context 'when in dev or test environment' do - it 'raises an error for dev' do - expect { described_class.enabled?(:non_existent_flag, type: optional_type) } - .to raise_error( - Feature::InvalidFeatureFlagError, - "The feature flag YAML definition for 'non_existent_flag' does not exist") - end - end - - context 'when in production' do + context 'when database exists' do before do - allow(Gitlab::ErrorTracking).to receive(:should_raise_for_dev?).and_return(false) + allow(ApplicationRecord.database).to receive(:exists?).and_return(true) end - context 'when database exists' do - before do - allow(ApplicationRecord.database).to receive(:exists?).and_return(true) - end + it 'checks the persisted status and returns false' do + expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original - it 'checks the persisted status and returns false' do - expect(described_class).to receive(:with_feature).with(:non_existent_flag).and_call_original + expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false) + end + end - expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false) - end + context 'when database does not exist' do + before do + allow(ApplicationRecord.database).to receive(:exists?).and_return(false) end - context 'when database does not exist' do - before do - allow(ApplicationRecord.database).to receive(:exists?).and_return(false) - end + it 'returns false without checking the status in the database' do + expect(described_class).not_to receive(:get) - it 'returns false without checking the status in the database' do - expect(described_class).not_to receive(:get) - - expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false) - end + expect(described_class.enabled?(:non_existent_flag, type: optional_type)).to eq(false) end end end end end end + end - describe '.disabled?' do - it 'returns true (and tracks / raises exception for dev) for undefined feature' do - expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + describe '.disabled?' do + it 'returns true (and tracks / raises exception for dev) for undefined feature' do + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) - expect(described_class.disabled?(:some_random_feature_flag, type: :undefined)).to be_truthy - end - - it 'returns true for undefined feature with default_enabled_if_undefined: false' do - expect(described_class.disabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: false)).to be_truthy - end - - it 'returns false for undefined feature with default_enabled_if_undefined: true' do - expect(described_class.disabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: true)).to be_falsey - end - - it 'returns true for existing disabled feature in the database' do - stub_feature_flag_definition(:disabled_feature_flag) - described_class.disable(:disabled_feature_flag) - - expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy - end - - it 'returns false for existing enabled feature in the database' do - stub_feature_flag_definition(:enabled_feature_flag) - described_class.enable(:enabled_feature_flag) - - expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey - end + expect(described_class.disabled?(:some_random_feature_flag, type: :undefined)).to be_truthy end - shared_examples_for 'logging' do - let(:expected_action) {} - let(:expected_extra) {} - - it 'logs the event' do - expect(Feature.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra) - - subject - end + it 'returns true for undefined feature with default_enabled_if_undefined: false' do + expect(described_class.disabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: false)).to be_truthy end - describe '.enable' do - subject { described_class.enable(key, thing) } + it 'returns false for undefined feature with default_enabled_if_undefined: true' do + expect(described_class.disabled?(:some_random_feature_flag, type: :undefined, default_enabled_if_undefined: true)).to be_falsey + end - let(:key) { :awesome_feature } - let(:thing) { true } + it 'returns true for existing disabled feature in the database' do + stub_feature_flag_definition(:disabled_feature_flag) + described_class.disable(:disabled_feature_flag) + + expect(described_class.disabled?(:disabled_feature_flag)).to be_truthy + end + + it 'returns false for existing enabled feature in the database' do + stub_feature_flag_definition(:enabled_feature_flag) + described_class.enable(:enabled_feature_flag) + + expect(described_class.disabled?(:enabled_feature_flag)).to be_falsey + end + end + + shared_examples_for 'logging' do + let(:expected_action) {} + let(:expected_extra) {} + + it 'logs the event' do + expect(described_class.logger).to receive(:info).at_least(:once).with(key: key, action: expected_action, **expected_extra) + + subject + end + end + + describe '.enable' do + subject { described_class.enable(key, thing) } + + let(:key) { :awesome_feature } + let(:thing) { true } + + it_behaves_like 'logging' do + let(:expected_action) { :enable } + let(:expected_extra) { { "extra.thing" => "true" } } + end + + # This is documented to return true, modify doc/administration/feature_flags.md if it changes + it 'returns true' do + expect(subject).to be true + end + + context 'when thing is an actor' do + let(:thing) { create(:user) } it_behaves_like 'logging' do - let(:expected_action) { :enable } - let(:expected_extra) { { "extra.thing" => "true" } } - end - - # This is documented to return true, modify doc/administration/feature_flags.md if it changes - it 'returns true' do - expect(subject).to be true - end - - context 'when thing is an actor' do - let(:thing) { create(:user) } - - it_behaves_like 'logging' do - let(:expected_action) { eq(:enable) | eq(:remove_opt_out) } - let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } - end + let(:expected_action) { eq(:enable) | eq(:remove_opt_out) } + let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } end end + end - describe '.disable' do - subject { described_class.disable(key, thing) } + describe '.disable' do + subject { described_class.disable(key, thing) } - let(:key) { :awesome_feature } - let(:thing) { false } + let(:key) { :awesome_feature } + let(:thing) { false } + + it_behaves_like 'logging' do + let(:expected_action) { :disable } + let(:expected_extra) { { "extra.thing" => "false" } } + end + + # This is documented to return true, modify doc/administration/feature_flags.md if it changes + it 'returns true' do + expect(subject).to be true + end + + context 'when thing is an actor' do + let(:thing) { create(:user) } + let(:flag_opts) { {} } it_behaves_like 'logging' do let(:expected_action) { :disable } - let(:expected_extra) { { "extra.thing" => "false" } } + let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } end - # This is documented to return true, modify doc/administration/feature_flags.md if it changes - it 'returns true' do - expect(subject).to be true - end - - context 'when thing is an actor' do - let(:thing) { create(:user) } - let(:flag_opts) { {} } - - it_behaves_like 'logging' do - let(:expected_action) { :disable } - let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } - end - - before do - stub_feature_flag_definition(key, flag_opts) - end - - context 'when the feature flag was enabled for this actor' do - before do - described_class.enable(key, thing) - end - - it 'marks this thing as disabled' do - expect { subject }.to change { thing_enabled? }.from(true).to(false) - end - - it 'does not change the global value' do - expect { subject }.not_to change { described_class.enabled?(key) }.from(false) - end - - it 'is possible to re-enable the feature' do - subject - - expect { described_class.enable(key, thing) } - .to change { thing_enabled? }.from(false).to(true) - end - end - - context 'when the feature flag is enabled globally' do - before do - described_class.enable(key) - end - - it 'does not mark this thing as disabled' do - expect { subject }.not_to change { thing_enabled? }.from(true) - end - - it 'does not change the global value' do - expect { subject }.not_to change { described_class.enabled?(key) }.from(true) - end - end - end - end - - describe '.group_ids_for' do - subject { described_class.group_ids_for(key) } - - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project) } - let_it_be(:user) { create(:user) } - - let(:key) { :awesome_feature } - - it 'returns empty array' do - expect(subject).to be_empty - end - - context 'when group actor is enabled' do - before do - described_class.enable(key, group) - end - - it 'returns the group id' do - expect(subject).to eq([group.id.to_s]) - end - end - - context 'when global flag is enabled' do - before do - described_class.enable(key) - end - - it 'returns empty array' do - expect(subject).to be_empty - end - end - - context 'when project actor is enabled' do - before do - described_class.enable(key, project) - end - - it 'returns empty array' do - expect(subject).to be_empty - end - end - - context 'when user actor is enabled' do - before do - described_class.enable(key, user) - end - - it 'returns empty array' do - expect(subject).to be_empty - end - end - end - - describe 'opt_out' do - subject { described_class.opt_out(key, thing) } - - let(:key) { :awesome_feature } - before do - stub_feature_flag_definition(key) - described_class.enable(key) + stub_feature_flag_definition(key, flag_opts) end - context 'when thing is an actor' do - let_it_be(:thing) { create(:project) } + context 'when the feature flag was enabled for this actor' do + before do + described_class.enable(key, thing) + end it 'marks this thing as disabled' do expect { subject }.to change { thing_enabled? }.from(true).to(false) end it 'does not change the global value' do - expect { subject }.not_to change { described_class.enabled?(key) }.from(true) + expect { subject }.not_to change { described_class.enabled?(key) }.from(false) end - it_behaves_like 'logging' do - let(:expected_action) { eq(:opt_out) } - let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } - end - - it 'stores the opt-out information as a gate' do + it 'is possible to re-enable the feature' do subject - flag = described_class.get(key) - - expect(flag.actors_value).to include(include(thing.flipper_id)) - expect(flag.actors_value).not_to include(thing.flipper_id) + expect { described_class.enable(key, thing) } + .to change { thing_enabled? }.from(false).to(true) end end - context 'when thing is a group' do - let(:thing) { Feature.group(:guinea_pigs) } - let(:guinea_pigs) { create_list(:user, 3) } - + context 'when the feature flag is enabled globally' do before do - Feature.reset - Flipper.unregister_groups - Flipper.register(:guinea_pigs) do |actor| - guinea_pigs.include?(actor.thing) - end + described_class.enable(key) end - it 'has no effect' do - expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true) - end - end - end - - describe 'remove_opt_out' do - subject { described_class.remove_opt_out(key, thing) } - - let(:key) { :awesome_feature } - - before do - stub_feature_flag_definition(key) - described_class.enable(key) - described_class.opt_out(key, thing) - end - - context 'when thing is an actor' do - let_it_be(:thing) { create(:project) } - - it 're-enables this thing' do - expect { subject }.to change { thing_enabled? }.from(false).to(true) + it 'does not mark this thing as disabled' do + expect { subject }.not_to change { thing_enabled? }.from(true) end it 'does not change the global value' do expect { subject }.not_to change { described_class.enabled?(key) }.from(true) end + end + end + end - it_behaves_like 'logging' do - let(:expected_action) { eq(:remove_opt_out) } - let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } - end + describe '.group_ids_for' do + subject { described_class.group_ids_for(key) } - it 'removes the opt-out information' do - subject + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project) } + let_it_be(:user) { create(:user) } - flag = described_class.get(key) + let(:key) { :awesome_feature } - expect(flag.actors_value).to be_empty - end + it 'returns empty array' do + expect(subject).to be_empty + end + + context 'when group actor is enabled' do + before do + described_class.enable(key, group) end - context 'when thing is a group' do - let(:thing) { Feature.group(:guinea_pigs) } - let(:guinea_pigs) { create_list(:user, 3) } - - before do - Feature.reset - Flipper.unregister_groups - Flipper.register(:guinea_pigs) do |actor| - guinea_pigs.include?(actor.thing) - end - end - - it 'has no effect' do - expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true) - end + it 'returns the group id' do + expect(subject).to eq([group.id.to_s]) end end - describe '.enable_percentage_of_time' do - subject { described_class.enable_percentage_of_time(key, percentage) } - - let(:key) { :awesome_feature } - let(:percentage) { 50 } - - it_behaves_like 'logging' do - let(:expected_action) { :enable_percentage_of_time } - let(:expected_extra) { { "extra.percentage" => percentage.to_s } } - end - - context 'when the flag is on' do - before do - described_class.enable(key) - end - - it 'fails with InvalidOperation' do - expect { subject }.to raise_error(described_class::InvalidOperation) - end - end - end - - describe '.disable_percentage_of_time' do - subject { described_class.disable_percentage_of_time(key) } - - let(:key) { :awesome_feature } - - it_behaves_like 'logging' do - let(:expected_action) { :disable_percentage_of_time } - let(:expected_extra) { {} } - end - end - - describe '.enable_percentage_of_actors' do - subject { described_class.enable_percentage_of_actors(key, percentage) } - - let(:key) { :awesome_feature } - let(:percentage) { 50 } - - it_behaves_like 'logging' do - let(:expected_action) { :enable_percentage_of_actors } - let(:expected_extra) { { "extra.percentage" => percentage.to_s } } - end - - context 'when the flag is on' do - before do - described_class.enable(key) - end - - it 'fails with InvalidOperation' do - expect { subject }.to raise_error(described_class::InvalidOperation) - end - end - end - - describe '.disable_percentage_of_actors' do - subject { described_class.disable_percentage_of_actors(key) } - - let(:key) { :awesome_feature } - - it_behaves_like 'logging' do - let(:expected_action) { :disable_percentage_of_actors } - let(:expected_extra) { {} } - end - end - - describe '.remove' do - subject { described_class.remove(key) } - - let(:key) { :awesome_feature } - let(:actor) { create(:user) } - + context 'when global flag is enabled' do before do described_class.enable(key) end + it 'returns empty array' do + expect(subject).to be_empty + end + end + + context 'when project actor is enabled' do + before do + described_class.enable(key, project) + end + + it 'returns empty array' do + expect(subject).to be_empty + end + end + + context 'when user actor is enabled' do + before do + described_class.enable(key, user) + end + + it 'returns empty array' do + expect(subject).to be_empty + end + end + end + + describe 'opt_out' do + subject { described_class.opt_out(key, thing) } + + let(:key) { :awesome_feature } + + before do + stub_feature_flag_definition(key) + described_class.enable(key) + end + + context 'when thing is an actor' do + let_it_be(:thing) { create(:project) } + + it 'marks this thing as disabled' do + expect { subject }.to change { thing_enabled? }.from(true).to(false) + end + + it 'does not change the global value' do + expect { subject }.not_to change { described_class.enabled?(key) }.from(true) + end + it_behaves_like 'logging' do - let(:expected_action) { :remove } - let(:expected_extra) { {} } + let(:expected_action) { eq(:opt_out) } + let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } end - context 'for a non-persisted feature' do - it 'returns nil' do - expect(described_class.remove(:non_persisted_feature_flag)).to be_nil - end + it 'stores the opt-out information as a gate' do + subject - it 'returns true, and cleans up' do - expect(subject).to be_truthy - expect(described_class.persisted_names).not_to include(key) - end + flag = described_class.get(key) + + expect(flag.actors_value).to include(include(thing.flipper_id)) + expect(flag.actors_value).not_to include(thing.flipper_id) end end - describe '.log_feature_flag_states?' do - let(:log_state_changes) { false } - let(:milestone) { "0.0" } - let(:flag_name) { :some_flag } - let(:flag_type) { 'development' } + context 'when thing is a group' do + let(:thing) { described_class.group(:guinea_pigs) } + let(:guinea_pigs) { create_list(:user, 3) } before do - Feature.enable(:feature_flag_state_logs) - Feature.enable(:some_flag) - - allow(Feature).to receive(:log_feature_flag_states?).and_return(false) - allow(Feature).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original - allow(Feature).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original - - stub_feature_flag_definition(flag_name, - type: flag_type, - milestone: milestone, - log_state_changes: log_state_changes) - end - - subject { described_class.log_feature_flag_states?(flag_name) } - - context 'when flag is feature_flag_state_logs' do - let(:milestone) { "14.6" } - let(:flag_name) { :feature_flag_state_logs } - let(:flag_type) { 'ops' } - let(:log_state_changes) { true } - - it { is_expected.to be_falsey } - end - - context 'when flag is old' do - it { is_expected.to be_falsey } - end - - context 'when flag is old while log_state_changes is not present ' do - let(:log_state_changes) { nil } - - it { is_expected.to be_falsey } - end - - context 'when flag is old but log_state_changes is true' do - let(:log_state_changes) { true } - - it { is_expected.to be_truthy } - end - - context 'when flag is new and not feature_flag_state_logs' do - let(:milestone) { "14.6" } - - before do - stub_version('14.5.123', 'deadbeef') + described_class.reset + Flipper.unregister_groups + Flipper.register(:guinea_pigs) do |actor| + guinea_pigs.include?(actor.thing) end - - it { is_expected.to be_truthy } end - context 'when milestone is nil' do - let(:milestone) { nil } + it 'has no effect' do + expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true) + end + end + end - it { is_expected.to be_falsey } + describe 'remove_opt_out' do + subject { described_class.remove_opt_out(key, thing) } + + let(:key) { :awesome_feature } + + before do + stub_feature_flag_definition(key) + described_class.enable(key) + described_class.opt_out(key, thing) + end + + context 'when thing is an actor' do + let_it_be(:thing) { create(:project) } + + it 're-enables this thing' do + expect { subject }.to change { thing_enabled? }.from(false).to(true) + end + + it 'does not change the global value' do + expect { subject }.not_to change { described_class.enabled?(key) }.from(true) + end + + it_behaves_like 'logging' do + let(:expected_action) { eq(:remove_opt_out) } + let(:expected_extra) { { "extra.thing" => thing.flipper_id.to_s } } + end + + it 'removes the opt-out information' do + subject + + flag = described_class.get(key) + + expect(flag.actors_value).to be_empty end end - context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do - let(:actor) { stub_feature_flag_gate('CustomActor:5') } - let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } - - # This is a bit unpleasant. For these tests we want to simulate stale reads - # from the database (due to database load balancing). A simple way to do - # that is to stub the response on the adapter Flipper uses for reading from - # the database. However, there isn't a convenient API for this. We know that - # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can - # find it that way. - let(:active_record_adapter) do - adapter = described_class.flipper - - loop do - break adapter unless adapter.instance_variable_get(:@adapter) - - adapter = adapter.instance_variable_get(:@adapter) - end - end + context 'when thing is a group' do + let(:thing) { described_class.group(:guinea_pigs) } + let(:guinea_pigs) { create_list(:user, 3) } before do - stub_feature_flag_definition(:enabled_feature_flag) + described_class.reset + Flipper.unregister_groups + Flipper.register(:guinea_pigs) do |actor| + guinea_pigs.include?(actor.thing) + end end - it 'gives the correct value when enabling for an additional actor' do - described_class.enable(:enabled_feature_flag, actor) - initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + it 'has no effect' do + expect { subject }.not_to change { described_class.enabled?(key, guinea_pigs.first) }.from(true) + end + end + end - # This should only be enabled for `actor` - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false) - expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + describe '.enable_percentage_of_time' do + subject { described_class.enable_percentage_of_time(key, percentage) } - # Enable for `another_actor` and simulate a stale read - described_class.enable(:enabled_feature_flag, another_actor) - allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + let(:key) { :awesome_feature } + let(:percentage) { 50 } - # Should read from the cache and be enabled for both of these actors - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + it_behaves_like 'logging' do + let(:expected_action) { :enable_percentage_of_time } + let(:expected_extra) { { "extra.percentage" => percentage.to_s } } + end + + context 'when the flag is on' do + before do + described_class.enable(key) end - it 'gives the correct value when enabling for percentage of time' do - described_class.enable_percentage_of_time(:enabled_feature_flag, 10) - initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + it 'fails with InvalidOperation' do + expect { subject }.to raise_error(described_class::InvalidOperation) + end + end + end - # Test against `gate_values` directly as otherwise it would be non-determistic - expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10) + describe '.disable_percentage_of_time' do + subject { described_class.disable_percentage_of_time(key) } - # Enable 50% of time and simulate a stale read - described_class.enable_percentage_of_time(:enabled_feature_flag, 50) - allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + let(:key) { :awesome_feature } - # Should read from the cache and be enabled 50% of the time - expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50) + it_behaves_like 'logging' do + let(:expected_action) { :disable_percentage_of_time } + let(:expected_extra) { {} } + end + end + + describe '.enable_percentage_of_actors' do + subject { described_class.enable_percentage_of_actors(key, percentage) } + + let(:key) { :awesome_feature } + let(:percentage) { 50 } + + it_behaves_like 'logging' do + let(:expected_action) { :enable_percentage_of_actors } + let(:expected_extra) { { "extra.percentage" => percentage.to_s } } + end + + context 'when the flag is on' do + before do + described_class.enable(key) end - it 'gives the correct value when disabling the flag' do - described_class.enable(:enabled_feature_flag, actor) - described_class.enable(:enabled_feature_flag, another_actor) - initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + it 'fails with InvalidOperation' do + expect { subject }.to raise_error(described_class::InvalidOperation) + end + end + end - # This be enabled for `actor` and `another_actor` - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + describe '.disable_percentage_of_actors' do + subject { described_class.disable_percentage_of_actors(key) } - # Disable for `another_actor` and simulate a stale read - described_class.disable(:enabled_feature_flag, another_actor) - allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + let(:key) { :awesome_feature } - # Should read from the cache and be enabled only for `actor` - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false) - expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + it_behaves_like 'logging' do + let(:expected_action) { :disable_percentage_of_actors } + let(:expected_extra) { {} } + end + end + + describe '.remove' do + subject { described_class.remove(key) } + + let(:key) { :awesome_feature } + let(:actor) { create(:user) } + + before do + described_class.enable(key) + end + + it_behaves_like 'logging' do + let(:expected_action) { :remove } + let(:expected_extra) { {} } + end + + context 'for a non-persisted feature' do + it 'returns nil' do + expect(described_class.remove(:non_persisted_feature_flag)).to be_nil end - it 'gives the correct value when deleting the flag' do - described_class.enable(:enabled_feature_flag, actor) - initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + it 'returns true, and cleans up' do + expect(subject).to be_truthy + expect(described_class.persisted_names).not_to include(key) + end + end + end - # This should only be enabled for `actor` - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) - expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + describe '.log_feature_flag_states?' do + let(:log_state_changes) { false } + let(:milestone) { "0.0" } + let(:flag_name) { :some_flag } + let(:flag_type) { 'development' } - # Remove and simulate a stale read - described_class.remove(:enabled_feature_flag) - allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + before do + described_class.enable(:feature_flag_state_logs) + described_class.enable(:some_flag) - # Should read from the cache and be disabled everywhere - expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false) - expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + allow(described_class).to receive(:log_feature_flag_states?).and_return(false) + allow(described_class).to receive(:log_feature_flag_states?).with(:feature_flag_state_logs).and_call_original + allow(described_class).to receive(:log_feature_flag_states?).with(:some_flag).and_call_original + + stub_feature_flag_definition(flag_name, + type: flag_type, + milestone: milestone, + log_state_changes: log_state_changes) + end + + subject { described_class.log_feature_flag_states?(flag_name) } + + context 'when flag is feature_flag_state_logs' do + let(:milestone) { "14.6" } + let(:flag_name) { :feature_flag_state_logs } + let(:flag_type) { 'ops' } + let(:log_state_changes) { true } + + it { is_expected.to be_falsey } + end + + context 'when flag is old' do + it { is_expected.to be_falsey } + end + + context 'when flag is old while log_state_changes is not present ' do + let(:log_state_changes) { nil } + + it { is_expected.to be_falsey } + end + + context 'when flag is old but log_state_changes is true' do + let(:log_state_changes) { true } + + it { is_expected.to be_truthy } + end + + context 'when flag is new and not feature_flag_state_logs' do + let(:milestone) { "14.6" } + + before do + stub_version('14.5.123', 'deadbeef') + end + + it { is_expected.to be_truthy } + end + + context 'when milestone is nil' do + let(:milestone) { nil } + + it { is_expected.to be_falsey } + end + end + + context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do + let(:actor) { stub_feature_flag_gate('CustomActor:5') } + let(:another_actor) { stub_feature_flag_gate('CustomActor:10') } + + # This is a bit unpleasant. For these tests we want to simulate stale reads + # from the database (due to database load balancing). A simple way to do + # that is to stub the response on the adapter Flipper uses for reading from + # the database. However, there isn't a convenient API for this. We know that + # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can + # find it that way. + let(:active_record_adapter) do + adapter = described_class.flipper + + loop do + break adapter unless adapter.instance_variable_get(:@adapter) + + adapter = adapter.instance_variable_get(:@adapter) end end - describe Feature::Target do - describe '#targets' do - let(:project) { create(:project) } - let(:group) { create(:group) } - let(:user_name) { project.first_owner.username } + before do + stub_feature_flag_definition(:enabled_feature_flag) + end + + it 'gives the correct value when enabling for an additional actor' do + described_class.enable(:enabled_feature_flag, actor) + initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + + # This should only be enabled for `actor` + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false) + expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + + # Enable for `another_actor` and simulate a stale read + described_class.enable(:enabled_feature_flag, another_actor) + allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + + # Should read from the cache and be enabled for both of these actors + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + end + + it 'gives the correct value when enabling for percentage of time' do + described_class.enable_percentage_of_time(:enabled_feature_flag, 10) + initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + + # Test against `gate_values` directly as otherwise it would be non-determistic + expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10) + + # Enable 50% of time and simulate a stale read + described_class.enable_percentage_of_time(:enabled_feature_flag, 50) + allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + + # Should read from the cache and be enabled 50% of the time + expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50) + end + + it 'gives the correct value when disabling the flag' do + described_class.enable(:enabled_feature_flag, actor) + described_class.enable(:enabled_feature_flag, another_actor) + initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + + # This be enabled for `actor` and `another_actor` + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + + # Disable for `another_actor` and simulate a stale read + described_class.disable(:enabled_feature_flag, another_actor) + allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + + # Should read from the cache and be enabled only for `actor` + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false) + expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + end + + it 'gives the correct value when deleting the flag' do + described_class.enable(:enabled_feature_flag, actor) + initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag)) + + # This should only be enabled for `actor` + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true) + expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + + # Remove and simulate a stale read + described_class.remove(:enabled_feature_flag) + allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values) + + # Should read from the cache and be disabled everywhere + expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false) + expect(described_class.enabled?(:enabled_feature_flag)).to be(false) + end + end + + describe Feature::Target do + describe '#targets' do + let(:project) { create(:project) } + let(:group) { create(:group) } + let(:user_name) { project.first_owner.username } + + subject do + described_class.new( + user: user_name, + project: project.full_path, + group: group.full_path, + repository: project.repository.full_path + ) + end + + it 'returns all found targets' do + expect(subject.targets).to be_an(Array) + expect(subject.targets).to eq([project.first_owner, project, group, project.repository]) + end + + context 'when repository target works with different types of repositories' do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :wiki_repo, group: group) } + let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) } + let(:personal_snippet) { create(:personal_snippet) } + let(:project_snippet) { create(:project_snippet, project: project) } + + let(:targets) do + [ + project, + project.wiki, + project_in_user_namespace, + personal_snippet, + project_snippet + ] + end subject do described_class.new( - user: user_name, - project: project.full_path, - group: group.full_path, - repository: project.repository.full_path + repository: targets.map { |t| t.repository.full_path }.join(",") ) end it 'returns all found targets' do expect(subject.targets).to be_an(Array) - expect(subject.targets).to eq([project.first_owner, project, group, project.repository]) - end - - context 'when repository target works with different types of repositories' do - let_it_be(:group) { create(:group) } - let_it_be(:project) { create(:project, :wiki_repo, group: group) } - let_it_be(:project_in_user_namespace) { create(:project, namespace: create(:user).namespace) } - let(:personal_snippet) { create(:personal_snippet) } - let(:project_snippet) { create(:project_snippet, project: project) } - - let(:targets) do - [ - project, - project.wiki, - project_in_user_namespace, - personal_snippet, - project_snippet - ] - end - - subject do - described_class.new( - repository: targets.map { |t| t.repository.full_path }.join(",") - ) - end - - it 'returns all found targets' do - expect(subject.targets).to be_an(Array) - expect(subject.targets).to eq(targets.map(&:repository)) - end + expect(subject.targets).to eq(targets.map(&:repository)) end end end diff --git a/spec/lib/gitlab/database/load_balancing/host_spec.rb b/spec/lib/gitlab/database/load_balancing/host_spec.rb index f87ceac0a89..fa2230176e2 100644 --- a/spec/lib/gitlab/database/load_balancing/host_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/host_spec.rb @@ -315,8 +315,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host, feature_category: :databas let(:load_balancer_ignore_replication_lag_time) { false } before do - # Enable feature flags in the load balancer. They are enabled in production and relate to behavior in these specs - stub_env(Feature::BypassLoadBalancer::FLAG, 'true') stub_feature_flags(load_balancer_double_replication_lag_time: load_balancer_double_replication_lag_time) stub_feature_flags(load_balancer_ignore_replication_lag_time: load_balancer_ignore_replication_lag_time) end diff --git a/spec/lib/web_ide/settings/extensions_gallery_metadata_generator_spec.rb b/spec/lib/web_ide/settings/extensions_gallery_metadata_generator_spec.rb index 4de5ce102a1..7a68f0a9998 100644 --- a/spec/lib/web_ide/settings/extensions_gallery_metadata_generator_spec.rb +++ b/spec/lib/web_ide/settings/extensions_gallery_metadata_generator_spec.rb @@ -77,6 +77,8 @@ RSpec.describe WebIde::Settings::ExtensionsGalleryMetadataGenerator, :web_ide_fa before do allow(user).to receive(:extensions_marketplace_opt_in_status) { opt_in_status.to_s } + # EE feature has to be stubbed since we run EE code through CE tests + allow(user).to receive(:enterprise_user?).and_return(false) allow(enums).to receive(:statuses).and_return({ unset: :unset, enabled: :enabled, disabled: :disabled }) end diff --git a/spec/migrations/20240806222106_add_dev_widget_to_tasks_spec.rb b/spec/migrations/20240806222106_add_dev_widget_to_tasks_spec.rb new file mode 100644 index 00000000000..f6c4f767bb4 --- /dev/null +++ b/spec/migrations/20240806222106_add_dev_widget_to_tasks_spec.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_migration! + +RSpec.describe AddDevWidgetToTasks, :migration, feature_category: :team_planning do + it_behaves_like 'migration that adds a widget to a work item type' do + let(:target_type_enum_value) { described_class::TASK_ENUM_VALUE } + let(:target_type) { :task } + end +end diff --git a/spec/models/virtual_registries/packages/maven/upstream_spec.rb b/spec/models/virtual_registries/packages/maven/upstream_spec.rb index d94be8b01c6..fb220446d69 100644 --- a/spec/models/virtual_registries/packages/maven/upstream_spec.rb +++ b/spec/models/virtual_registries/packages/maven/upstream_spec.rb @@ -135,4 +135,43 @@ RSpec.describe VirtualRegistries::Packages::Maven::Upstream, type: :model, featu expect(upstream_read.password).to eq('test') end end + + describe '#url_for' do + subject { upstream.url_for(path) } + + where(:path, :expected_url) do + 'path' | 'http://test.maven/path' + '' | 'http://test.maven/' + '/path' | 'http://test.maven/path' + '/sub/path' | 'http://test.maven/sub/path' + end + + with_them do + before do + upstream.url = 'http://test.maven/' + end + + it { is_expected.to eq(expected_url) } + end + end + + describe '#headers' do + subject { upstream.headers } + + where(:username, :password, :expected_headers) do + 'user' | 'pass' | { Authorization: 'Basic dXNlcjpwYXNz' } + 'user' | '' | {} + '' | 'pass' | {} + '' | '' | {} + end + + with_them do + before do + upstream.username = username + upstream.password = password + end + + it { is_expected.to eq(expected_headers) } + end + end end diff --git a/spec/requests/api/virtual_registries/packages/maven_spec.rb b/spec/requests/api/virtual_registries/packages/maven_spec.rb new file mode 100644 index 00000000000..877c9253201 --- /dev/null +++ b/spec/requests/api/virtual_registries/packages/maven_spec.rb @@ -0,0 +1,197 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::VirtualRegistries::Packages::Maven, feature_category: :virtual_registry do + using RSpec::Parameterized::TableSyntax + include WorkhorseHelpers + include HttpBasicAuthHelpers + + let_it_be(:registry) { create(:virtual_registries_packages_maven_registry, :with_upstream) } + let_it_be(:project) { create(:project, namespace: registry.group) } + let_it_be(:user) { project.creator } + + let(:upstream) { registry.upstream } + + describe 'GET /api/v4/virtual_registries/packages/maven/:id/*path' do + let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' } + let(:url) { "/virtual_registries/packages/maven/#{registry.id}/#{path}" } + let(:service_response) do + ServiceResponse.success( + payload: { action: :workhorse_send_url, + action_params: { url: upstream.url_for(path), headers: upstream.headers } } + ) + end + + let(:service_double) do + instance_double(::VirtualRegistries::Packages::Maven::HandleFileRequestService, execute: service_response) + end + + before do + allow(::VirtualRegistries::Packages::Maven::HandleFileRequestService) + .to receive(:new) + .with(registry: registry, current_user: user, params: { path: path }) + .and_return(service_double) + stub_config(dependency_proxy: { enabled: true }) # not enabled by default + end + + subject(:request) do + get api(url), headers: headers + end + + shared_examples 'returning the workhorse send_url response' do + it 'returns a workhorse send_url response' do + request + + expect(response).to have_gitlab_http_status(:ok) + expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with('send-url:') + expect(response.headers['Content-Type']).to eq('application/octet-stream') + expect(response.headers['Content-Length'].to_i).to eq(0) + expect(response.body).to eq('') + + send_data_type, send_data = workhorse_send_data + + expected_headers = upstream.headers.deep_stringify_keys.deep_transform_values do |value| + [value] + end + + expected_resp_headers = described_class::NO_BROWSER_EXECUTION_RESPONSE_HEADERS.deep_transform_values do |value| + [value] + end + + expect(send_data_type).to eq('send-url') + expect(send_data['URL']).to be_present + expect(send_data['AllowRedirects']).to be_truthy + expect(send_data['DialTimeout']).to eq('10s') + expect(send_data['ResponseHeaderTimeout']).to eq('10s') + expect(send_data['ErrorResponseStatus']).to eq(502) + expect(send_data['TimeoutResponseStatus']).to eq(504) + expect(send_data['Header']).to eq(expected_headers) + expect(send_data['ResponseHeaders']).to eq(expected_resp_headers) + end + end + + context 'for authentication' do + context 'with a personal access token' do + let_it_be(:personal_access_token) { create(:personal_access_token, user: user) } + + context 'when sent by headers' do + let(:headers) { { 'Private-Token' => personal_access_token.token } } + + it_behaves_like 'returning the workhorse send_url response' + end + + context 'when sent by basic auth' do + let(:headers) { basic_auth_header(user.username, personal_access_token.token) } + + it_behaves_like 'returning the workhorse send_url response' + end + end + + context 'with a deploy token' do + let_it_be(:deploy_token) do + create(:deploy_token, :group, groups: [registry.group], read_virtual_registry: true) + end + + let_it_be(:user) { deploy_token } + + context 'when sent by headers' do + let(:headers) { { 'Deploy-Token' => deploy_token.token } } + + it_behaves_like 'returning the workhorse send_url response' + end + + context 'when sent by basic auth' do + let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) } + + it_behaves_like 'returning the workhorse send_url response' + end + end + + context 'with ci job token' do + let_it_be(:job) { create(:ci_build, user: user, status: :running, project: project) } + + context 'when sent by headers' do + let(:headers) { { 'Job-Token' => job.token } } + + it_behaves_like 'returning the workhorse send_url response' + end + + context 'when sent by basic auth' do + let(:headers) { basic_auth_header(::Gitlab::Auth::CI_JOB_USER, job.token) } + + it_behaves_like 'returning the workhorse send_url response' + end + end + end + + context 'with a valid user' do + let_it_be(:personal_access_token) { create(:personal_access_token, user: user) } + let(:headers) { { 'Private-Token' => personal_access_token.token } } + + context 'with service response errors' do + where(:reason, :expected_status) do + :path_not_present | :bad_request + :unauthorized | :unauthorized + :no_upstreams | :bad_request + :file_not_found_on_upstreams | :not_found + :upstream_not_available | :bad_request + end + + with_them do + let(:service_response) do + ServiceResponse.error(message: 'error', reason: reason) + end + + it "returns a #{params[:expected_status]} response" do + request + + expect(response).to have_gitlab_http_status(expected_status) + expect(response.body).to include('error') unless expected_status == :unauthorized + end + end + end + + context 'with feature flag virtual_registry_maven disabled' do + before do + stub_feature_flags(virtual_registry_maven: false) + end + + it_behaves_like 'returning response status', :not_found + end + + context 'with a web browser' do + described_class::MAJOR_BROWSERS.each do |browser| + context "when accessing with a #{browser} browser" do + before do + allow_next_instance_of(::Browser) do |b| + allow(b).to receive("#{browser}?").and_return(true) + end + end + + it 'returns a bad request response' do + request + + expect(response).to have_gitlab_http_status(:bad_request) + expect(response.body).to include(described_class::WEB_BROWSER_ERROR_MESSAGE) + end + end + end + end + + context 'with the dependency proxy disabled' do + before do + stub_config(dependency_proxy: { enabled: false }) + end + + it_behaves_like 'returning response status', :not_found + end + + context 'as anonymous' do + let(:headers) { {} } + + it_behaves_like 'returning response status', :unauthorized + end + end + end +end diff --git a/spec/requests/groups/group_members_controller_spec.rb b/spec/requests/groups/group_members_controller_spec.rb index 1c00a144af6..bb331ba7f71 100644 --- a/spec/requests/groups/group_members_controller_spec.rb +++ b/spec/requests/groups/group_members_controller_spec.rb @@ -31,4 +31,73 @@ RSpec.describe Groups::GroupMembersController, feature_category: :groups_and_pro it_behaves_like 'request_accessable' end + + describe 'GET /groups/*group_id/-/group_members/bulk_reassignment_file' do + let_it_be(:membershipable) do + create(:group, :public).tap do |group| + group.add_owner(user) + end + end + + subject(:request) do + get bulk_reassignment_file_group_group_members_path(group_id: membershipable) + end + + context 'when not signed in' do + it 'forbids access to the endpoint' do + request + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'when signed in' do + before do + sign_in(user) + end + + it 'responds with CSV data' do + request + + expect(response).to have_gitlab_http_status(:success) + end + + context 'and the user is not a group owner' do + let_it_be(:membershipable) { create(:group, :public) } + + it 'forbids access to the endpoint' do + request + + expect(response).to have_gitlab_http_status(:forbidden) + end + end + + context 'and the CSV is not generated properly' do + before do + allow_next_instance_of(Import::SourceUsers::GenerateCsvService) do |service| + allow(service).to receive(:execute).and_return(ServiceResponse.error(message: 'my error message')) + end + end + + it 'redirects with an error' do + request + + expect(response).to be_redirect + expect(flash[:alert]).to eq('my error message') + end + end + + context 'when :importer_user_mapping_reassignment_csv is disabled' do + before do + stub_feature_flags(importer_user_mapping_reassignment_csv: false) + end + + it 'responds with 404' do + request + + expect(response).to have_gitlab_http_status(:not_found) + end + end + end + end end diff --git a/spec/services/ci/click_house/data_ingestion/finished_pipelines_sync_service_spec.rb b/spec/services/ci/click_house/data_ingestion/finished_pipelines_sync_service_spec.rb index 29c9cac7d10..07a6208d557 100644 --- a/spec/services/ci/click_house/data_ingestion/finished_pipelines_sync_service_spec.rb +++ b/spec/services/ci/click_house/data_ingestion/finished_pipelines_sync_service_spec.rb @@ -223,20 +223,6 @@ RSpec.describe Ci::ClickHouse::DataIngestion::FinishedPipelinesSyncService, end end - context 'when the ci_pipelines_data_ingestion_to_click_house feature flag is off' do - before do - stub_feature_flags(ci_pipelines_data_ingestion_to_click_house: false) - end - - it 'skips execution' do - is_expected.to have_attributes({ - status: :error, - message: 'Feature ci_pipelines_data_ingestion_to_click_house is disabled', - reason: :disabled - }) - end - end - def create_sync_events(*pipelines) pipelines.each do |pipeline| Ci::FinishedPipelineChSyncEvent.new( diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb index e51b106bcba..aa1b307af67 100644 --- a/spec/services/ci/pipeline_trigger_service_spec.rb +++ b/spec/services/ci/pipeline_trigger_service_spec.rb @@ -185,8 +185,8 @@ RSpec.describe Ci::PipelineTriggerService, feature_category: :continuous_integra end end - context 'when params have an existsed job token' do - context 'when params have an existsed ref' do + context 'when params have a valid job token' do + context 'when params have an existing ref' do let(:params) { { token: job.token, ref: 'master', variables: nil } } it 'triggers a pipeline' do diff --git a/spec/services/import/source_users/generate_csv_service_spec.rb b/spec/services/import/source_users/generate_csv_service_spec.rb new file mode 100644 index 00000000000..1775487c0ca --- /dev/null +++ b/spec/services/import/source_users/generate_csv_service_spec.rb @@ -0,0 +1,87 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Import::SourceUsers::GenerateCsvService, feature_category: :importers do + let_it_be(:namespace) { create(:namespace) } + let_it_be(:current_user) { namespace.owner } + + let_it_be(:user_pending_assignment) { create(:import_source_user, :pending_reassignment, namespace: namespace) } + let_it_be(:user_awaiting_approval) { create(:import_source_user, :awaiting_approval, namespace: namespace) } + let_it_be(:rejected_user) { create(:import_source_user, :rejected, namespace: namespace) } + + subject(:service) { described_class.new(namespace, current_user: current_user) } + + describe '#execute' do + context 'when the user is a namespace owner', :aggregate_failures do + it 'returns spreadsheet data' do + result = service.execute + + expect(result).to be_success + + csv_data = CSV.parse(result.payload) + + expect(csv_data.size).to eq(3) + expect(csv_data[0]).to match_array(described_class::HEADERS) + + expect(csv_data).to include(an_array_matching([ + user_pending_assignment.source_hostname, + user_pending_assignment.import_type, + user_pending_assignment.source_user_identifier, + user_pending_assignment.source_name, + user_pending_assignment.source_username, + '', + '' + ])) + end + + it 'returns only data for this namespace' do + other_source_user = create(:import_source_user) + + result = service.execute + + csv_data = CSV.parse(result.payload) + source_user_identifiers = csv_data.pluck(2) + + expect(source_user_identifiers).not_to include(other_source_user.source_user_identifier) + end + + it 'returns only data for Import::SourceUser records with a re-assignable status' do + result = service.execute + + csv_data = CSV.parse(result.payload) + + source_user_identifiers = csv_data.pluck(2).drop(1) + expect(source_user_identifiers).to match_array([ + user_pending_assignment.source_user_identifier, + rejected_user.source_user_identifier + ]) + end + + context 'and there is no data to return' do + let(:namespace) { create(:namespace) } + + subject(:service) { described_class.new(namespace, current_user: namespace.owner) } + + it 'only returns the headers' do + result = service.execute + csv_data = CSV.parse(result.payload) + + expect(csv_data.size).to eq(1) + expect(csv_data[0]).to match_array(described_class::HEADERS) + end + end + end + + context 'when current user does not have permission' do + subject(:service) { described_class.new(namespace, current_user: create(:user)) } + + it 'returns error no permissions' do + result = service.execute + + expect(result).to be_error + expect(result.message).to eq('You do not have permission to view import source users for this namespace') + end + end + end +end diff --git a/spec/services/virtual_registries/packages/maven/handle_file_request_service_spec.rb b/spec/services/virtual_registries/packages/maven/handle_file_request_service_spec.rb new file mode 100644 index 00000000000..bee0d9bb58a --- /dev/null +++ b/spec/services/virtual_registries/packages/maven/handle_file_request_service_spec.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe VirtualRegistries::Packages::Maven::HandleFileRequestService, :aggregate_failures, feature_category: :virtual_registry do + let_it_be(:registry) { create(:virtual_registries_packages_maven_registry, :with_upstream) } + let_it_be(:project) { create(:project, namespace: registry.group) } + + let(:user) { project.creator } + let(:upstream) { registry.upstream } + let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' } + let(:upstream_resource_url) { upstream.url_for(path) } + let(:service) { described_class.new(registry: registry, current_user: user, params: { path: path }) } + + describe '#execute' do + subject(:execute) { service.execute } + + shared_examples 'returning a service response error response with' do |message:, reason:| + it 'returns an error' do + expect(execute).to be_a(ServiceResponse) + expect(execute).to be_error + expect(execute.message).to eq(message) + expect(execute.reason).to eq(reason) + end + end + + shared_examples 'returning a service response success response' do + before do + stub_external_registry_request + end + + it 'returns a success service response' do + expect(execute).to be_success + expect(execute.payload).to eq( + action: :workhorse_send_url, + action_params: { url: upstream_resource_url, headers: upstream.headers } + ) + end + end + + context 'with a User' do + it_behaves_like 'returning a service response success response' + + context 'with upstream returning an error' do + before do + stub_external_registry_request(status: 404) + end + + it_behaves_like 'returning a service response error response with', message: 'File not found on any upstream', + reason: :file_not_found_on_upstreams + end + + context 'with upstream head raising an error' do + before do + stub_external_registry_request(raise_error: true) + end + + it_behaves_like 'returning a service response error response with', message: 'Upstream not available', + reason: :upstream_not_available + end + end + + context 'with a DeployToken' do + let_it_be(:user) { create(:deploy_token, :group, groups: [registry.group], read_virtual_registry: true) } + + it_behaves_like 'returning a service response success response' + end + + context 'with no path' do + let(:path) { nil } + + it_behaves_like 'returning a service response error response with', message: 'Path not present', + reason: :path_not_present + end + + context 'with no user' do + let(:user) { nil } + + it_behaves_like 'returning a service response error response with', message: 'Unauthorized', reason: :unauthorized + end + + context 'with registry with no upstreams' do + before do + registry.upstream = nil + end + + it_behaves_like 'returning a service response error response with', message: 'No upstreams set', + reason: :no_upstreams + end + + def stub_external_registry_request(status: 200, raise_error: false) + request = stub_request(:head, upstream_resource_url) + .with(headers: upstream.headers) + + if raise_error + request.to_raise(Gitlab::HTTP::BlockedUrlError) + else + request.to_return(status: status, body: '') + end + end + end +end