Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2022-08-09 15:11:31 +00:00
parent 1f229cdc22
commit 283318c205
79 changed files with 1397 additions and 272 deletions

View File

@ -1,44 +1,9 @@
---
# Cop supports --auto-correct.
Naming/RescuedExceptionsVariableName:
# Offense count: 269
# Temporarily disabled due to too many offenses
Enabled: false
Exclude:
- 'app/controllers/admin/projects_controller.rb'
- 'app/controllers/projects/google_cloud/deployments_controller.rb'
- 'app/controllers/projects/google_cloud/service_accounts_controller.rb'
- 'app/controllers/projects/merge_requests/drafts_controller.rb'
- 'app/controllers/projects/milestones_controller.rb'
- 'app/controllers/projects/mirrors_controller.rb'
- 'app/controllers/projects/repositories_controller.rb'
- 'app/controllers/projects_controller.rb'
- 'app/finders/repositories/changelog_tag_finder.rb'
- 'app/graphql/mutations/issues/move.rb'
- 'app/graphql/resolvers/ci/config_resolver.rb'
- 'app/graphql/resolvers/environments_resolver.rb'
- 'app/helpers/application_helper.rb'
- 'app/models/application_setting.rb'
- 'app/models/blob_viewer/metrics_dashboard_yml.rb'
- 'app/models/ci/build.rb'
- 'app/models/ci/deleted_object.rb'
- 'app/models/concerns/prometheus_adapter.rb'
- 'app/models/concerns/repository_storage_movable.rb'
- 'app/models/concerns/x509_serial_number_attribute.rb'
- 'app/models/integrations/base_issue_tracker.rb'
- 'app/models/integrations/discord.rb'
- 'app/models/integrations/jenkins.rb'
- 'app/models/integrations/jira.rb'
- 'app/models/integrations/packagist.rb'
- 'app/models/integrations/pipelines_email.rb'
- 'app/models/integrations/prometheus.rb'
- 'app/models/performance_monitoring/prometheus_dashboard.rb'
- 'app/models/personal_access_token.rb'
- 'app/models/project.rb'
- 'app/models/repository.rb'
- 'app/models/snippet_repository.rb'
- 'app/models/u2f_registration.rb'
- 'app/models/wiki.rb'
- 'app/models/ci/pipeline_artifact.rb'
- 'app/models/concerns/database_event_tracking.rb'
- 'app/services/branches/delete_service.rb'
- 'app/services/branches/validate_new_service.rb'
- 'app/services/ci/job_artifacts/create_service.rb'
@ -52,6 +17,8 @@ Naming/RescuedExceptionsVariableName:
- 'app/services/dependency_proxy/request_token_service.rb'
- 'app/services/design_management/copy_design_collection/copy_service.rb'
- 'app/services/git/base_hooks_service.rb'
- 'app/services/google_cloud/create_cloudsql_instance_service.rb'
- 'app/services/google_cloud/setup_cloudsql_instance_service.rb'
- 'app/services/grafana/proxy_service.rb'
- 'app/services/groups/update_shared_runners_service.rb'
- 'app/services/issues/relative_position_rebalancing_service.rb'
@ -71,23 +38,29 @@ Naming/RescuedExceptionsVariableName:
- 'app/services/projects/hashed_storage/base_repository_service.rb'
- 'app/services/projects/transfer_service.rb'
- 'app/services/prometheus/proxy_service.rb'
- 'app/services/quick_actions/interpret_service.rb'
- 'app/services/resource_access_tokens/revoke_service.rb'
- 'app/services/tags/create_service.rb'
- 'app/services/tags/destroy_service.rb'
- 'app/services/users/validate_manual_otp_service.rb'
- 'app/services/users/validate_push_otp_service.rb'
- 'app/services/verify_pages_domain_service.rb'
- 'app/services/web_hooks/log_destroy_service.rb'
- 'app/validators/js_regex_validator.rb'
- 'app/workers/concerns/limited_capacity/worker.rb'
- 'app/workers/concerns/packages/cleanup_artifact_worker.rb'
- 'app/workers/gitlab/jira_import/import_issue_worker.rb'
- 'app/workers/gitlab_service_ping_worker.rb'
- 'app/workers/issuable_export_csv_worker.rb'
- 'app/workers/namespaces/root_statistics_worker.rb'
- 'app/workers/namespaces/schedule_aggregation_worker.rb'
- 'app/workers/packages/go/sync_packages_worker.rb'
- 'app/workers/project_destroy_worker.rb'
- 'app/workers/projects/git_garbage_collect_worker.rb'
- 'app/workers/projects/inactive_projects_deletion_notification_worker.rb'
- 'app/workers/remove_expired_members_worker.rb'
- 'app/workers/users/create_statistics_worker.rb'
- 'config/initializers/enumerator_next_patch.rb'
- 'config/initializers/rspec_profiling.rb'
- 'config/initializers/wikicloth_redos_patch.rb'
- 'db/post_migrate/20210606143426_add_index_for_container_registry_access_level.rb'
@ -113,11 +86,17 @@ Naming/RescuedExceptionsVariableName:
- 'ee/app/services/geo/framework_repository_sync_service.rb'
- 'ee/app/services/geo/move_repository_service.rb'
- 'ee/app/services/geo/repository_base_sync_service.rb'
- 'ee/app/services/gitlab_subscriptions/preview_billable_user_change_service.rb'
- 'ee/app/services/incident_management/oncall_rotations/create_service.rb'
- 'ee/app/services/incident_management/oncall_rotations/edit_service.rb'
- 'ee/app/services/namespaces/free_user_cap/deactivate_members_over_limit_service.rb'
- 'ee/app/services/namespaces/free_user_cap/remove_group_group_links_outside_hierarchy_service.rb'
- 'ee/app/services/namespaces/free_user_cap/remove_project_group_links_outside_hierarchy_service.rb'
- 'ee/app/services/namespaces/free_user_cap/update_prevent_sharing_outside_hierarchy_service.rb'
- 'ee/app/services/projects/licenses/create_policy_service.rb'
- 'ee/app/services/projects/licenses/update_policy_service.rb'
- 'ee/app/services/security/ingestion/ingest_report_service.rb'
- 'ee/app/services/security/ingestion/tasks/update_vulnerability_uuids.rb'
- 'ee/app/services/security/orchestration/assign_service.rb'
- 'ee/app/services/security/store_grouped_scans_service.rb'
- 'ee/app/services/security/store_scan_service.rb'
@ -128,6 +107,7 @@ Naming/RescuedExceptionsVariableName:
- 'ee/app/workers/geo/file_removal_worker.rb'
- 'ee/app/workers/geo/repositories_clean_up_worker.rb'
- 'ee/app/workers/geo/scheduler/scheduler_worker.rb'
- 'ee/app/workers/namespaces/free_user_cap/remediation_worker.rb'
- 'ee/app/workers/refresh_license_compliance_checks_worker.rb'
- 'ee/app/workers/repository_update_mirror_worker.rb'
- 'ee/app/workers/sync_seat_link_request_worker.rb'
@ -139,7 +119,6 @@ Naming/RescuedExceptionsVariableName:
- 'ee/lib/gitlab/elastic/bulk_indexer.rb'
- 'ee/lib/gitlab/spdx/catalogue_gateway.rb'
- 'ee/lib/tasks/gitlab/seed/metrics.rake'
- 'lib/api/environments.rb'
- 'lib/api/helpers.rb'
- 'lib/api/helpers/label_helpers.rb'
- 'lib/api/issues.rb'
@ -151,8 +130,8 @@ Naming/RescuedExceptionsVariableName:
- 'lib/gitlab/auth/ldap/adapter.rb'
- 'lib/gitlab/auth/otp/strategies/forti_authenticator/manual_otp.rb'
- 'lib/gitlab/auth/otp/strategies/forti_authenticator/push_otp.rb'
- 'lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans.rb'
- 'lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid.rb'
- 'lib/gitlab/background_task.rb'
- 'lib/gitlab/ci/config/external/file/artifact.rb'
- 'lib/gitlab/ci/pipeline/chain/config/process.rb'
- 'lib/gitlab/ci/pipeline/chain/validate/external.rb'
@ -162,6 +141,7 @@ Naming/RescuedExceptionsVariableName:
- 'lib/gitlab/database/batch_counter.rb'
- 'lib/gitlab/database/load_balancing/load_balancer.rb'
- 'lib/gitlab/database/load_balancing/service_discovery.rb'
- 'lib/gitlab/database/lock_writes_manager.rb'
- 'lib/gitlab/database/reindexing/grafana_notifier.rb'
- 'lib/gitlab/git/keep_around.rb'
- 'lib/gitlab/gitaly_client/call.rb'
@ -190,6 +170,7 @@ Naming/RescuedExceptionsVariableName:
- 'lib/gitlab/tracking.rb'
- 'lib/gitlab/url_blocker.rb'
- 'lib/gitlab/usage/metrics/aggregates/aggregate.rb'
- 'lib/gitlab/usage/service_ping/instrumented_payload.rb'
- 'lib/gitlab/usage_data.rb'
- 'lib/gitlab/utils/usage_data.rb'
- 'lib/gitlab/verify/batch_verifier.rb'

View File

@ -451,6 +451,7 @@ function outdentLines(textArea) {
}
function handleIndentOutdent(e, textArea) {
if (e.altKey || e.ctrlKey || e.shiftKey) return;
if (!e.metaKey) return;
switch (e.key) {

View File

@ -125,10 +125,6 @@
padding-right: 25px;
}
.fa {
color: $gray-darkest;
}
&:hover {
border-color: $gray-darkest;
}
@ -148,10 +144,6 @@
text-overflow: ellipsis;
width: 160px;
.fa {
position: absolute;
}
.gl-spinner {
position: absolute;
top: 9px;
@ -387,10 +379,6 @@
margin: 0;
text-align: left;
text-overflow: inherit;
&.btn .fa:not(:last-child) {
margin-left: 5px;
}
}
> button.dropdown-epic-button {
@ -651,14 +639,12 @@
border-color: $blue-300;
box-shadow: 0 0 4px $dropdown-input-focus-shadow;
~ .fa,
~ .dropdown-input-clear {
color: $gray-700;
}
}
&:hover {
~ .fa,
~ .dropdown-input-clear {
color: $gray-700;
}
@ -716,14 +702,6 @@
z-index: 9;
background-color: $dropdown-loading-bg;
font-size: 28px;
.fa {
position: absolute;
top: 50%;
left: 50%;
margin-top: -14px;
margin-left: -14px;
}
}
.dropdown-label-box {

View File

@ -1676,7 +1676,7 @@ svg.s16 {
background-color: #232150;
}
.identicon.bg3 {
background-color: #f1f1ff;
background-color: #1a1a40;
}
.identicon.bg4 {
background-color: #033464;

View File

@ -98,6 +98,8 @@ $white-light: #2b2b2b;
$white-normal: #333;
$white-dark: #444;
$theme-indigo-50: #1a1a40;
$border-color: #4f4f4f;
$nav-active-bg: rgba(255, 255, 255, 0.08);

View File

@ -48,8 +48,8 @@ class Admin::ProjectsController < Admin::ApplicationController
flash[:notice] = _("Project '%{project_name}' is in the process of being deleted.") % { project_name: @project.full_name }
redirect_to admin_projects_path, status: :found
rescue Projects::DestroyService::DestroyError => ex
redirect_to admin_projects_path, status: :found, alert: ex.message
rescue Projects::DestroyService::DestroyError => e
redirect_to admin_projects_path, status: :found, alert: e.message
end
# rubocop: disable CodeReuse/ActiveRecord

View File

@ -40,9 +40,9 @@ class Projects::GoogleCloud::DeploymentsController < Projects::GoogleCloud::Base
redirect_to project_new_merge_request_path(project, merge_request: cloud_run_mr_params)
end
end
rescue Google::Apis::ClientError, Google::Apis::ServerError, Google::Apis::AuthorizationError => error
track_event('deployments#cloud_run', 'error_gcp', error)
flash[:warning] = _('Google Cloud Error - %{error}') % { error: error }
rescue Google::Apis::ClientError, Google::Apis::ServerError, Google::Apis::AuthorizationError => e
track_event('deployments#cloud_run', 'error_gcp', e)
flash[:warning] = _('Google Cloud Error - %{error}') % { error: e }
redirect_to project_google_cloud_deployments_path(project)
end

View File

@ -26,9 +26,9 @@ class Projects::GoogleCloud::ServiceAccountsController < Projects::GoogleCloud::
track_event('service_accounts#index', 'success', js_data)
end
rescue Google::Apis::ClientError, Google::Apis::ServerError, Google::Apis::AuthorizationError => error
track_event('service_accounts#index', 'error_gcp', error)
flash[:warning] = _('Google Cloud Error - %{error}') % { error: error }
rescue Google::Apis::ClientError, Google::Apis::ServerError, Google::Apis::AuthorizationError => e
track_event('service_accounts#index', 'error_gcp', e)
flash[:warning] = _('Google Cloud Error - %{error}') % { error: e }
redirect_to project_google_cloud_configuration_path(project)
end
@ -45,9 +45,9 @@ class Projects::GoogleCloud::ServiceAccountsController < Projects::GoogleCloud::
track_event('service_accounts#create', 'success', response)
redirect_to project_google_cloud_configuration_path(project), notice: response.message
rescue Google::Apis::ClientError, Google::Apis::ServerError, Google::Apis::AuthorizationError => error
track_event('service_accounts#create', 'error_gcp', error)
flash[:warning] = _('Google Cloud Error - %{error}') % { error: error }
rescue Google::Apis::ClientError, Google::Apis::ServerError, Google::Apis::AuthorizationError => e
track_event('service_accounts#create', 'error_gcp', e)
flash[:warning] = _('Google Cloud Error - %{error}') % { error: e }
redirect_to project_google_cloud_configuration_path(project)
end
end

View File

@ -72,9 +72,9 @@ class Projects::MergeRequests::DraftsController < Projects::MergeRequests::Appli
strong_memoize(:draft_note) do
draft_notes.find(params[:id])
end
rescue ActiveRecord::RecordNotFound => ex
rescue ActiveRecord::RecordNotFound => e
# draft_note is allowed to be nil in #publish
raise ex unless allow_nil
raise e unless allow_nil
end
def draft_notes

View File

@ -92,8 +92,8 @@ class Projects::MilestonesController < Projects::ApplicationController
render json: { url: project_milestones_path(project) }
end
end
rescue Milestones::PromoteService::PromoteMilestoneError => error
redirect_to milestone, alert: error.message
rescue Milestones::PromoteService::PromoteMilestoneError => e
redirect_to milestone, alert: e.message
end
def flash_notice_for(milestone, group)

View File

@ -58,8 +58,8 @@ class Projects::MirrorsController < Projects::ApplicationController
else
render json: lookup
end
rescue ArgumentError => err
render json: { message: err.message }, status: :bad_request
rescue ArgumentError => e
render json: { message: e.message }, status: :bad_request
end
private

View File

@ -35,8 +35,8 @@ class Projects::RepositoriesController < Projects::ApplicationController
return if archive_not_modified?
send_git_archive @repository, **repo_params
rescue StandardError => ex
logger.error("#{self.class.name}: #{ex}")
rescue StandardError => e
logger.error("#{self.class.name}: #{e}")
git_not_found!
end

View File

@ -193,8 +193,8 @@ class ProjectsController < Projects::ApplicationController
flash[:notice] = _("Project '%{project_name}' is in the process of being deleted.") % { project_name: @project.full_name }
redirect_to dashboard_projects_path, status: :found
rescue Projects::DestroyService::DestroyError => ex
redirect_to edit_project_path(@project), status: :found, alert: ex.message
rescue Projects::DestroyService::DestroyError => e
redirect_to edit_project_path(@project), status: :found, alert: e.message
end
def new_issuable_address
@ -227,10 +227,10 @@ class ProjectsController < Projects::ApplicationController
project_path(@project),
notice: _("Housekeeping successfully started")
)
rescue ::Repositories::HousekeepingService::LeaseTaken => ex
rescue ::Repositories::HousekeepingService::LeaseTaken => e
redirect_to(
edit_project_path(@project, anchor: 'js-project-advanced-settings'),
alert: ex.to_s
alert: e.to_s
)
end
@ -241,10 +241,10 @@ class ProjectsController < Projects::ApplicationController
edit_project_path(@project, anchor: 'js-export-project'),
notice: _("Project export started. A download link will be sent by email and made available on this page.")
)
rescue Project::ExportLimitExceeded => ex
rescue Project::ExportLimitExceeded => e
redirect_to(
edit_project_path(@project, anchor: 'js-export-project'),
alert: ex.to_s
alert: e.to_s
)
end

View File

@ -0,0 +1,43 @@
# frozen_string_literal: true
module Groups
class AcceptingProjectTransfersFinder
def initialize(current_user)
@current_user = current_user
end
def execute
if Feature.disabled?(:include_groups_from_group_shares_in_project_transfer_locations)
return current_user.manageable_groups
end
groups_accepting_project_transfers =
[
current_user.manageable_groups,
managable_groups_originating_from_group_shares
]
groups = ::Group.from_union(groups_accepting_project_transfers)
groups.project_creation_allowed
end
private
attr_reader :current_user
def managable_groups_originating_from_group_shares
GroupGroupLink
.with_owner_or_maintainer_access
.groups_accessible_via(
groups_that_user_has_owner_or_maintainer_access_via_direct_membership
.select(:id)
)
end
def groups_that_user_has_owner_or_maintainer_access_via_direct_membership
# Only maintainers or above in a group has access to transfer projects to that group
current_user.owned_or_maintainers_groups
end
end
end

View File

@ -48,7 +48,7 @@ module Groups
if permission_scope_create_projects?
target_user.manageable_groups(include_groups_with_developer_maintainer_access: true)
elsif permission_scope_transfer_projects?
target_user.manageable_groups(include_groups_with_developer_maintainer_access: false)
Groups::AcceptingProjectTransfersFinder.new(target_user).execute # rubocop: disable CodeReuse/Finder
else
target_user.groups
end

View File

@ -37,14 +37,14 @@ module Repositories
begin
regex = Gitlab::UntrustedRegexp.new(@regex)
rescue RegexpError => ex
rescue RegexpError => e
# The error messages produced by default are not very helpful, so we
# raise a better one here. We raise the specific error here so its
# message is displayed in the API (where we catch this specific
# error).
raise(
Gitlab::Changelog::Error,
"The regular expression to use for finding the previous tag for a version is invalid: #{ex.message}"
"The regular expression to use for finding the previous tag for a version is invalid: #{e.message}"
)
end

View File

@ -19,8 +19,8 @@ module Mutations
begin
moved_issue = ::Issues::MoveService.new(project: source_project, current_user: current_user).execute(issue, target_project)
rescue ::Issues::MoveService::MoveError => error
errors = error.message
rescue ::Issues::MoveService::MoveError => e
errors = e.message
end
{

View File

@ -38,8 +38,8 @@ module Resolvers
.validate(content, dry_run: dry_run)
response(result)
rescue GRPC::InvalidArgument => error
Gitlab::ErrorTracking.track_and_raise_exception(error, sha: sha)
rescue GRPC::InvalidArgument => e
Gitlab::ErrorTracking.track_and_raise_exception(e, sha: sha)
end
private

View File

@ -22,8 +22,8 @@ module Resolvers
return unless project.present?
Environments::EnvironmentsFinder.new(project, context[:current_user], args).execute
rescue Environments::EnvironmentsFinder::InvalidStatesError => exception
raise Gitlab::Graphql::Errors::ArgumentError, exception.message
rescue Environments::EnvironmentsFinder::InvalidStatesError => e
raise Gitlab::Graphql::Errors::ArgumentError, e.message
end
end
end

View File

@ -19,23 +19,23 @@ module ApplicationHelper
def dispensable_render(...)
render(...)
rescue StandardError => error
rescue StandardError => e
if Feature.enabled?(:dispensable_render)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
nil
else
raise error
raise e
end
end
def dispensable_render_if_exists(...)
render_if_exists(...)
rescue StandardError => error
rescue StandardError => e
if Feature.enabled?(:dispensable_render)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
nil
else
raise error
raise e
end
end

View File

@ -790,10 +790,10 @@ class ApplicationSetting < ApplicationRecord
def parsed_kroki_url
@parsed_kroki_url ||= Gitlab::UrlBlocker.validate!(kroki_url, schemes: %w(http https), enforce_sanitization: true)[0]
rescue Gitlab::UrlBlocker::BlockedUrlError => error
rescue Gitlab::UrlBlocker::BlockedUrlError => e
self.errors.add(
:kroki_url,
"is not valid. #{error}"
"is not valid. #{e}"
)
end

View File

@ -86,6 +86,18 @@ class AuditEvent < ApplicationRecord
end
end
def target_type
super || details[:target_type]
end
def target_id
details[:target_id]
end
def target_details
super || details[:target_details]
end
private
def sanitize_message

View File

@ -36,10 +36,10 @@ module BlobViewer
yaml = ::Gitlab::Config::Loader::Yaml.new(blob.data).load_raw!
::PerformanceMonitoring::PrometheusDashboard.from_json(yaml)
[]
rescue Gitlab::Config::Loader::FormatError => error
["YAML syntax: #{error.message}"]
rescue ActiveModel::ValidationError => invalid
invalid.model.errors.messages.map { |messages| messages.join(': ') }
rescue Gitlab::Config::Loader::FormatError => e
["YAML syntax: #{e.message}"]
rescue ActiveModel::ValidationError => e
e.model.errors.messages.map { |messages| messages.join(': ') }
end
def exhaustive_metrics_dashboard_validation
@ -47,8 +47,8 @@ module BlobViewer
Gitlab::Metrics::Dashboard::Validator
.errors(yaml, dashboard_path: blob.path, project: project)
.map(&:message)
rescue Gitlab::Config::Loader::FormatError => error
[error.message]
rescue Gitlab::Config::Loader::FormatError => e
[e.message]
end
end
end

View File

@ -342,8 +342,8 @@ module Ci
# rubocop: disable CodeReuse/ServiceClass
Ci::RetryJobService.new(build.project, build.user).execute(build)
# rubocop: enable CodeReuse/ServiceClass
rescue Gitlab::Access::AccessDeniedError => ex
Gitlab::AppLogger.error "Unable to auto-retry job #{build.id}: #{ex}"
rescue Gitlab::Access::AccessDeniedError => e
Gitlab::AppLogger.error "Unable to auto-retry job #{build.id}: #{e}"
end
end
end

View File

@ -27,8 +27,8 @@ module Ci
def delete_file_from_storage
file.remove!
true
rescue StandardError => exception
Gitlab::ErrorTracking.track_exception(exception)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
false
end
end

View File

@ -62,8 +62,8 @@ module PrometheusAdapter
data: data,
last_update: Time.current.utc
}
rescue Gitlab::PrometheusClient::Error => err
{ success: false, result: err.message }
rescue Gitlab::PrometheusClient::Error => e
{ success: false, result: e.message }
end
def query_klass_for(query_name)

View File

@ -50,8 +50,8 @@ module RepositoryStorageMovable
begin
storage_move.container.set_repository_read_only!(skip_git_transfer_check: true)
rescue StandardError => err
storage_move.add_error(err.message)
rescue StandardError => e
storage_move.add_error(e.message)
next false
end

View File

@ -33,8 +33,8 @@ module X509SerialNumberAttribute
unless column.type == :binary
raise ArgumentError, "x509_serial_number_attribute #{name.inspect} is invalid since the column type is not :binary"
end
rescue StandardError => error
Gitlab::AppLogger.error "X509SerialNumberAttribute initialization: #{error.message}"
rescue StandardError => e
Gitlab::AppLogger.error "X509SerialNumberAttribute initialization: #{e.message}"
raise
end

View File

@ -176,6 +176,16 @@ class Group < Namespace
.where(project_authorizations: { user_id: user_ids })
end
scope :project_creation_allowed, -> do
permitted_levels = [
::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS,
::Gitlab::Access::MAINTAINER_PROJECT_ACCESS,
nil
]
where(project_creation_level: permitted_levels)
end
class << self
def sort_by_attribute(method)
if method == 'storage_size_desc'

View File

@ -14,6 +14,23 @@ class GroupGroupLink < ApplicationRecord
presence: true
scope :non_guests, -> { where('group_access > ?', Gitlab::Access::GUEST) }
scope :with_owner_or_maintainer_access, -> do
where(group_access: [Gitlab::Access::OWNER, Gitlab::Access::MAINTAINER])
end
scope :groups_accessible_via, -> (shared_with_group_ids) do
links = where(shared_with_group_id: shared_with_group_ids)
# a group share also gives you access to the descendants of the group being shared,
# so we must include the descendants as well in the result.
Group.id_in(links.select(:shared_group_id)).self_and_descendants
end
scope :groups_having_access_to, -> (shared_group_ids) do
links = where(shared_group_id: shared_group_ids)
Group.id_in(links.select(:shared_with_group_id))
end
scope :preload_shared_with_groups, -> { preload(:shared_with_group) }
scope :distinct_on_shared_with_group_id_with_group_access, -> do

View File

@ -100,8 +100,8 @@ module Integrations
message = "#{self.type} received response #{response.code} when attempting to connect to #{self.project_url}"
result = true
end
rescue Gitlab::HTTP::Error, Timeout::Error, SocketError, Errno::ECONNRESET, Errno::ECONNREFUSED, OpenSSL::SSL::SSLError => error
message = "#{self.type} had an error when trying to connect to #{self.project_url}: #{error.message}"
rescue Gitlab::HTTP::Error, Timeout::Error, SocketError, Errno::ECONNRESET, Errno::ECONNREFUSED, OpenSSL::SSL::SSLError => e
message = "#{self.type} had an error when trying to connect to #{self.project_url}: #{e.message}"
end
log_info(message)
result

View File

@ -88,8 +88,8 @@ module Integrations
embed.timestamp = Time.now.utc
end
end
rescue RestClient::Exception => error
log_error(error.message)
rescue RestClient::Exception => e
log_error(e.message)
false
end

View File

@ -53,8 +53,8 @@ module Integrations
begin
result = execute(data)
return { success: false, result: result[:message] } if result[:http_status] != 200
rescue StandardError => error
return { success: false, result: error }
rescue StandardError => e
return { success: false, result: e }
end
{ success: true, result: result[:message] }

View File

@ -364,8 +364,8 @@ module Integrations
)
true
rescue StandardError => error
log_exception(error, message: 'Issue transition failed', client_url: client_url)
rescue StandardError => e
log_exception(e, message: 'Issue transition failed', client_url: client_url)
false
end
@ -560,9 +560,9 @@ module Integrations
# Handle errors when doing Jira API calls
def jira_request
yield
rescue StandardError => error
@error = error
log_exception(error, message: 'Error sending message', client_url: client_url)
rescue StandardError => e
@error = e
log_exception(e, message: 'Error sending message', client_url: client_url)
nil
end

View File

@ -56,8 +56,8 @@ module Integrations
begin
result = execute(data)
return { success: false, result: result[:message] } if result[:http_status] != 202
rescue StandardError => error
return { success: false, result: error }
rescue StandardError => e
return { success: false, result: e }
end
{ success: true, result: result[:message] }

View File

@ -84,8 +84,8 @@ module Integrations
result = execute(data, force: true)
{ success: true, result: result }
rescue StandardError => error
{ success: false, result: error }
rescue StandardError => e
{ success: false, result: e }
end
def should_pipeline_be_notified?(data)

View File

@ -70,8 +70,8 @@ module Integrations
prometheus_client.ping
{ success: true, result: 'Checked API endpoint' }
rescue Gitlab::PrometheusClient::Error => err
{ success: false, result: err }
rescue Gitlab::PrometheusClient::Error => e
{ success: false, result: e }
end
def prometheus_client

View File

@ -57,10 +57,10 @@ module PerformanceMonitoring
self.class.from_json(reload_schema)
[]
rescue Gitlab::Metrics::Dashboard::Errors::LayoutError => error
[error.message]
rescue ActiveModel::ValidationError => exception
exception.model.errors.map { |attr, error| "#{attr}: #{error}" }
rescue Gitlab::Metrics::Dashboard::Errors::LayoutError => e
[e.message]
rescue ActiveModel::ValidationError => e
e.model.errors.map { |attr, error| "#{attr}: #{error}" }
end
private

View File

@ -58,8 +58,8 @@ class PersonalAccessToken < ApplicationRecord
begin
Gitlab::CryptoHelper.aes256_gcm_decrypt(encrypted_token)
rescue StandardError => ex
logger.warn "Failed to decrypt #{self.name} value stored in Redis for key ##{redis_key}: #{ex.class}"
rescue StandardError => e
logger.warn "Failed to decrypt #{self.name} value stored in Redis for key ##{redis_key}: #{e.class}"
encrypted_token
end
end

View File

@ -1760,8 +1760,8 @@ class Project < ApplicationRecord
repository.after_create
true
rescue StandardError => err
Gitlab::ErrorTracking.track_exception(err, project: { id: id, full_path: full_path, disk_path: disk_path })
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, project: { id: id, full_path: full_path, disk_path: disk_path })
errors.add(:base, _('Failed to create repository'))
false
end

View File

@ -646,8 +646,8 @@ class Repository
return if licensee_object.name.blank?
licensee_object
rescue Licensee::InvalidLicense => ex
Gitlab::ErrorTracking.track_exception(ex)
rescue Licensee::InvalidLicense => e
Gitlab::ErrorTracking.track_exception(e)
nil
end
memoize_method :license
@ -1072,9 +1072,9 @@ class Repository
) do |commit_id|
merge_request.update!(rebase_commit_sha: commit_id, merge_error: nil)
end
rescue StandardError => error
rescue StandardError => e
merge_request.update!(rebase_commit_sha: nil)
raise error
raise e
end
def squash(user, merge_request, message)

View File

@ -44,11 +44,11 @@ class SnippetRepository < ApplicationRecord
Gitlab::Git::CommitError,
Gitlab::Git::PreReceiveError,
Gitlab::Git::CommandError,
ArgumentError => error
ArgumentError => e
logger.error(message: "Snippet git error. Reason: #{error.message}", snippet: snippet.id)
logger.error(message: "Snippet git error. Reason: #{e.message}", snippet: snippet.id)
raise commit_error_exception(error)
raise commit_error_exception(e)
end
def transform_file_entries(files)

View File

@ -49,8 +49,8 @@ class U2fRegistration < ApplicationRecord
def create_webauthn_registration
converter = Gitlab::Auth::U2fWebauthnConverter.new(self)
WebauthnRegistration.create!(converter.convert)
rescue StandardError => ex
Gitlab::ErrorTracking.track_exception(ex, u2f_registration_id: self.id)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, u2f_registration_id: self.id)
end
def update_webauthn_registration

View File

@ -146,8 +146,8 @@ class Wiki
repository.create_if_not_exists(default_branch)
raise CouldNotCreateWikiError unless repository_exists?
rescue StandardError => err
Gitlab::ErrorTracking.track_exception(err, wiki: {
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, wiki: {
container_type: container.class.name,
container_id: container.id,
full_path: full_path,
@ -364,9 +364,9 @@ class Wiki
Gitlab::Git::CommitError,
Gitlab::Git::PreReceiveError,
Gitlab::Git::CommandError,
ArgumentError => error
ArgumentError => e
Gitlab::ErrorTracking.log_exception(error, action: action, wiki_id: id)
Gitlab::ErrorTracking.log_exception(e, action: action, wiki_id: id)
false
end

View File

@ -0,0 +1,87 @@
# frozen_string_literal: true
module AuditEvents
class BuildService
# Handle missing attributes
MissingAttributeError = Class.new(StandardError)
# @raise [MissingAttributeError] when required attributes are blank
#
# @return [BuildService]
def initialize(
author:, scope:, target:, message:,
created_at: DateTime.current, additional_details: {}, ip_address: nil, target_details: nil)
raise MissingAttributeError if missing_attribute?(author, scope, target, message)
@author = build_author(author)
@scope = scope
@target = build_target(target)
@ip_address = ip_address || build_ip_address
@message = build_message(message)
@created_at = created_at
@additional_details = additional_details
@target_details = target_details
end
# Create an instance of AuditEvent
#
# @return [AuditEvent]
def execute
AuditEvent.new(payload)
end
private
def missing_attribute?(author, scope, target, message)
author.blank? || scope.blank? || target.blank? || message.blank?
end
def payload
base_payload.merge(details: base_details_payload)
end
def base_payload
{
author_id: @author.id,
author_name: @author.name,
entity_id: @scope.id,
entity_type: @scope.class.name,
created_at: @created_at
}
end
def base_details_payload
@additional_details.merge({
author_name: @author.name,
author_class: @author.class.name,
target_id: @target.id,
target_type: @target.type,
target_details: @target_details || @target.details,
custom_message: @message
})
end
def build_author(author)
author.id = -2 if author.instance_of? DeployToken
author.id = -3 if author.instance_of? DeployKey
author
end
def build_target(target)
return target if target.is_a? ::Gitlab::Audit::NullTarget
::Gitlab::Audit::Target.new(target)
end
def build_message(message)
message
end
def build_ip_address
Gitlab::RequestContext.instance.client_ip || @author.current_sign_in_ip
end
end
end
AuditEvents::BuildService.prepend_mod_with('AuditEvents::BuildService')

View File

@ -1,19 +1,19 @@
- page_title _("Import repository")
- page_title _('Import repository')
%h1.page-title.gl-font-size-h-display
= _('Import repository')
%hr
- if @project.import_failed?
.card.border-danger
.card-header.bg-danger.text-white The repository could not be imported.
.card-body
%pre
:preserve
#{h(@project.import_state.last_error)}
= render Pajamas::AlertComponent.new(title: s_('Import|The repository could not be imported.'),
dismissible: false,
variant: :danger,
alert_options: { class: 'gl-mb-5' }) do |c|
= c.body do
= @project.import_state.last_error
= gitlab_ui_form_for @project, url: project_import_path(@project), method: :post, html: { class: 'js-project-import' } do |f|
= render "shared/import_form", f: f
= render 'shared/import_form', f: f
.form-actions
= f.submit 'Start import', class: "gl-button btn btn-confirm", data: { disable_with: false }
= f.submit 'Start import', class: 'gl-button btn btn-confirm', data: { disable_with: false }

View File

@ -15,9 +15,13 @@ module Pages
.clear_cache
end
if event.data[:root_namespace_id]
event.data.values_at(
:root_namespace_id,
:old_root_namespace_id,
:new_root_namespace_id
).compact.uniq.each do |namespace_id|
::Gitlab::Pages::CacheControl
.for_namespace(event.data[:root_namespace_id])
.for_namespace(namespace_id)
.clear_cache
end
end

View File

@ -0,0 +1,8 @@
---
name: include_groups_from_group_shares_in_project_transfer_locations
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/90127
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/366305
milestone: '15.2'
type: development
group: group::workspace
default_enabled: false

View File

@ -264,6 +264,28 @@ Users with the Owner role for a group can list streaming destinations and see th
1. On the main area, select the **Streams**.
1. View the verification token on the right side of each item.
## Payload schema
> Documentation for an audit event streaming schema was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358149) in GitLab 15.3.
Streamed audit events have a predictable schema in the body of the response.
| Field | Description | Notes |
|------------------|------------------------------------------------------------|-----------------------------------------------------------------------------------|
| `author_id` | User ID of the user who triggered the event | |
| `author_name` | Human-readable name of the author that triggered the event | Helpful when the author no longer exists |
| `created_at` | Timestamp when event was triggered | |
| `details` | JSON object containing additional metadata | Has no defined schema but often contains additional information about an event |
| `entity_id` | ID of the audit event's entity | |
| `entity_path` | Full path of the entity affected by the auditable event | |
| `entity_type` | String representation of the type of entity | Acceptable values include `User`, `Group`, and `Key`. This list is not exhaustive |
| `event_type` | String representation of the type of audit event | |
| `id` | Unique identifier for the audit event | Can be used for deduplication if required |
| `ip_address` | IP address of the host used to trigger the event | |
| `target_details` | Additional details about the target | |
| `target_id` | ID of the audit event's target | |
| `target_type` | String representation of the target's type | |
## Audit event streaming on Git operations
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/332747) in GitLab 14.9 [with a flag](../administration/feature_flags.md) named `audit_event_streaming_git_operations`. Disabled by default.

View File

@ -1103,7 +1103,7 @@ curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>"\
Gets a list of all discussion items for a single commit.
```plaintext
GET /projects/:id/commits/:commit_id/discussions
GET /projects/:id/repository/commits/:commit_id/discussions
```
| Attribute | Type | Required | Description |
@ -1237,7 +1237,7 @@ Diff comments contain also position:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions"
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions"
```
### Get single commit discussion item
@ -1245,7 +1245,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>"\
Returns a single discussion item for a specific project commit
```plaintext
GET /projects/:id/commits/:commit_id/discussions/:discussion_id
GET /projects/:id/repository/commits/:commit_id/discussions/:discussion_id
```
Parameters:
@ -1258,7 +1258,7 @@ Parameters:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions/<discussion_id>"
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions/<discussion_id>"
```
### Create new commit thread
@ -1267,7 +1267,7 @@ Creates a new thread to a single project commit. This is similar to creating
a note but other comments (replies) can be added to it later.
```plaintext
POST /projects/:id/commits/:commit_id/discussions
POST /projects/:id/repository/commits/:commit_id/discussions
```
Parameters:
@ -1294,7 +1294,7 @@ Parameters:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions?body=comment"
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions?body=comment"
```
The rules for creating the API request are the same as when
@ -1306,7 +1306,7 @@ with the exception of `base_sha`, `start_sha`, and `head_sha` attributes.
Adds a new note to the thread.
```plaintext
POST /projects/:id/commits/:commit_id/discussions/:discussion_id/notes
POST /projects/:id/repository/commits/:commit_id/discussions/:discussion_id/notes
```
Parameters:
@ -1322,7 +1322,7 @@ Parameters:
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions/<discussion_id>/notes?body=comment
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions/<discussion_id>/notes?body=comment
```
### Modify an existing commit thread note
@ -1330,7 +1330,7 @@ curl --request POST --header "PRIVATE-TOKEN: <your_access_token>"\
Modify or resolve an existing thread note of a commit.
```plaintext
PUT /projects/:id/commits/:commit_id/discussions/:discussion_id/notes/:note_id
PUT /projects/:id/repository/commits/:commit_id/discussions/:discussion_id/notes/:note_id
```
Parameters:
@ -1345,14 +1345,14 @@ Parameters:
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions/<discussion_id>/notes/1108?body=comment"
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions/<discussion_id>/notes/1108?body=comment"
```
Resolving a note:
```shell
curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions/<discussion_id>/notes/1108?resolved=true"
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions/<discussion_id>/notes/1108?resolved=true"
```
### Delete a commit thread note
@ -1360,7 +1360,7 @@ curl --request PUT --header "PRIVATE-TOKEN: <your_access_token>"\
Deletes an existing thread note of a commit.
```plaintext
DELETE /projects/:id/commits/:commit_id/discussions/:discussion_id/notes/:note_id
DELETE /projects/:id/repository/commits/:commit_id/discussions/:discussion_id/notes/:note_id
```
Parameters:
@ -1374,5 +1374,5 @@ Parameters:
```shell
curl --request DELETE --header "PRIVATE-TOKEN: <your_access_token>"\
"https://gitlab.example.com/api/v4/projects/5/commits/11/discussions/636"
"https://gitlab.example.com/api/v4/projects/5/repository/commits/11/discussions/636"
```

View File

@ -3311,7 +3311,7 @@ Input type: `IterationCadenceCreateInput`
| <a id="mutationiterationcadencecreategrouppath"></a>`groupPath` | [`ID!`](#id) | Group where the iteration cadence is created. |
| <a id="mutationiterationcadencecreateiterationsinadvance"></a>`iterationsInAdvance` | [`Int`](#int) | Upcoming iterations to be created when iteration cadence is set to automatic. |
| <a id="mutationiterationcadencecreaterollover"></a>`rollOver` | [`Boolean`](#boolean) | Whether the iteration cadence should roll over issues to the next iteration or not. |
| <a id="mutationiterationcadencecreatestartdate"></a>`startDate` | [`Time`](#time) | Timestamp of the iteration cadence start date. |
| <a id="mutationiterationcadencecreatestartdate"></a>`startDate` | [`Time`](#time) | Timestamp of the automation start date. |
| <a id="mutationiterationcadencecreatetitle"></a>`title` | [`String`](#string) | Title of the iteration cadence. |
#### Fields
@ -3357,7 +3357,7 @@ Input type: `IterationCadenceUpdateInput`
| <a id="mutationiterationcadenceupdateid"></a>`id` | [`IterationsCadenceID!`](#iterationscadenceid) | Global ID of the iteration cadence. |
| <a id="mutationiterationcadenceupdateiterationsinadvance"></a>`iterationsInAdvance` | [`Int`](#int) | Upcoming iterations to be created when iteration cadence is set to automatic. |
| <a id="mutationiterationcadenceupdaterollover"></a>`rollOver` | [`Boolean`](#boolean) | Whether the iteration cadence should roll over issues to the next iteration or not. |
| <a id="mutationiterationcadenceupdatestartdate"></a>`startDate` | [`Time`](#time) | Timestamp of the iteration cadence start date. |
| <a id="mutationiterationcadenceupdatestartdate"></a>`startDate` | [`Time`](#time) | Timestamp of the automation start date. |
| <a id="mutationiterationcadenceupdatetitle"></a>`title` | [`String`](#string) | Title of the iteration cadence. |
#### Fields
@ -13342,7 +13342,7 @@ Represents an iteration cadence.
| <a id="iterationcadenceid"></a>`id` | [`IterationsCadenceID!`](#iterationscadenceid) | Global ID of the iteration cadence. |
| <a id="iterationcadenceiterationsinadvance"></a>`iterationsInAdvance` | [`Int`](#int) | Upcoming iterations to be created when iteration cadence is set to automatic. |
| <a id="iterationcadencerollover"></a>`rollOver` | [`Boolean!`](#boolean) | Whether the iteration cadence should roll over issues to the next iteration or not. |
| <a id="iterationcadencestartdate"></a>`startDate` | [`Time`](#time) | Timestamp of the iteration cadence start date. |
| <a id="iterationcadencestartdate"></a>`startDate` | [`Time`](#time) | Timestamp of the automation start date. |
| <a id="iterationcadencetitle"></a>`title` | [`String!`](#string) | Title of the iteration cadence. |
### `JiraImport`

View File

@ -359,3 +359,19 @@ with a new push.
Threads are now resolved if a push makes a diff section outdated.
Threads on lines that don't change and top-level resolvable threads are not resolved.
## Display paginated merge request discussions
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/340172) in GitLab 15.1 [with a flag](../../administration/feature_flags.md) named `paginated_mr_discussions`. Disabled by default.
> - [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/364497) in GitLab 15.2.
> - [Enabled on self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/364497) in GitLab 15.3.
FLAG:
On self-managed GitLab, by default this feature is available. To hide the feature
per project or for your entire instance, ask an administrator to
[disable the feature flag](../../administration/feature_flags.md) named `paginated_mr_discussions`.
On GitLab.com, this feature is available.
A merge request can have many discussions. Loading them all in a single request
can be slow. To improve the performance of loading discussions, they are split into multiple
pages, loading sequentially.

View File

@ -187,6 +187,7 @@ To group issues by label:
> - [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/5077) in GitLab 14.1 [with a flag](../../../administration/feature_flags.md), named `iteration_cadences`. Disabled by default.
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/354977) in GitLab 15.0: All scheduled iterations must start on the same day of the week as the cadence start day. Start date of cadence cannot be edited after the first iteration starts.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/354878) in GitLab 15.0.
> - [Changed](https://gitlab.com/gitlab-org/gitlab/-/issues/367493) in GitLab 15.3: A new automation start date can be selected for cadence. Upcoming iterations will be scheduled to start on the same day of the week as the changed start date.
Iteration cadences automate iteration scheduling. You can use them to
automate creating iterations every 1, 2, 3, or 4 weeks. You can also
@ -207,7 +208,7 @@ To create an iteration cadence:
1. Select **New iteration cadence**.
1. Complete the fields.
- Enter the title and description of the iteration cadence.
- Enter the first iteration start date of the iteration cadence. Iterations will be scheduled to
- Select the automation start date of the iteration cadence. Iterations will be scheduled to
begin on the same day of the week as the day of the week of the start date.
- From the **Duration** dropdown list, select how many weeks each iteration should last.
- From the **Upcoming iterations** dropdown list, select how many upcoming iterations should be
@ -227,10 +228,9 @@ To edit an iteration cadence:
1. On the left sidebar, select **Issues > Iterations**.
1. Select **Edit iteration cadence**.
When you edit the **Duration**, **Upcoming iterations**, or **First iteration start date** fields,
only upcoming iterations are affected.
You can edit the first iteration start date of a cadence if the cadence has not started yet.
When you edit the **Automation start date** field,
you must set a new start date that doesn't overlap with the existing
current or past iterations.
Editing **Upcoming iterations** is a non-destructive action.
If ten upcoming iterations already exist, changing the number under **Upcoming iterations** to `2`
@ -273,25 +273,12 @@ To upgrade the iteration cadence to use the automation features:
1. On the top bar, select **Menu > Groups** and find your group.
1. On the left sidebar, select **Issues > Iterations**.
1. Select the three-dot menu (**{ellipsis_v}**) > **Edit cadence** for the cadence you want to upgrade.
1. Complete the required fields **Duration** and **Upcoming iterations**.
1. Complete the required fields **Duration**, **Upcoming iterations**, and **Automation start date**.
For **Automation start date**, you can select any date that doesn't overlap with the existing open iterations.
If you have upcoming iterations, the automatic scheduling adjusts them appropriately to fit
your chosen duration.
1. Select **Save changes**.
#### Start dates of converted cadences
The first iteration start date of your converted cadence is set to the start date of its
**first** existing iteration.
If you attempt to set a new start date, the conversion fails with an error message.
If your manual cadence is empty, converting it to use automatic scheduling is effectively
the same as creating a new automated cadence.
GitLab will start scheduling new iterations on the same day of the week as the start date,
starting from the nearest such day from the current date.
During the conversion process GitLab does not delete or modify existing **ongoing** or
**closed** iterations. If you have iterations with start dates in the future,
they are updated to fit your cadence settings.
#### Converted cadences example
For example, suppose it's Friday, April 15, and you have three iterations in a manual cadence:
@ -300,20 +287,21 @@ For example, suppose it's Friday, April 15, and you have three iterations in a m
- Tuesday, April 12 - Friday, April 15 (ongoing)
- Tuesday, May 3 - Friday, May 6 (upcoming)
On Friday, April 15, you convert the manual cadence
to automate scheduling iterations every week, up to two upcoming iterations.
The earliest possible **Automation start date** you can choose
is Saturday, April 16 in this scenario, because April 15 overlaps with
the ongoing iteration.
The first iteration is closed, and the second iteration is ongoing,
so they aren't deleted or modified in the conversion process.
To observe the weekly duration, the third iteration is updated so that it:
- Starts on Monday, April 18 - which is the nearest date that is Monday.
- Ends on Sunday, April 24.
Finally, to always have two upcoming iterations, an additional iteration is scheduled:
If you select Monday, April 18 as the automation start date to
automate scheduling iterations every week up to two upcoming iterations,
after the conversion you have the following iterations:
- Monday, April 4 - Friday, April 8 (closed)
- Tuesday, April 12 - Friday, April 15 (ongoing)
- Monday, April 18 - Sunday, April 24 (upcoming)
- Monday, April 25 - Sunday, May 1 (upcoming)
Your existing upcoming iteration "Tuesday, April 12 - Friday, April 15"
is changed to "April 18 - Sunday, April 24".
An additional upcoming iteration "April 25 - Sunday, May 1" is scheduled
to satisfy the requirement that there are at least two upcoming iterations scheduled.

175
lib/gitlab/audit/auditor.rb Normal file
View File

@ -0,0 +1,175 @@
# frozen_string_literal: true
module Gitlab
module Audit
class Auditor
attr_reader :scope, :name
# Record audit events
#
# @param [Hash] context
# @option context [String] :name the operation name to be audited, used for error tracking
# @option context [User] :author the user who authors the change
# @option context [User, Project, Group] :scope the scope which audit event belongs to
# @option context [Object] :target the target object being audited
# @option context [String] :message the message describing the action
# @option context [Hash] :additional_details the additional details we want to merge into audit event details.
# @option context [Time] :created_at the time that the event occurred (defaults to the current time)
#
# @example Using block (useful when events are emitted deep in the call stack)
# i.e. multiple audit events
#
# audit_context = {
# name: 'merge_approval_rule_updated',
# author: current_user,
# scope: project_alpha,
# target: merge_approval_rule,
# message: 'a user has attempted to update an approval rule'
# }
#
# # in the initiating service
# Gitlab::Audit::Auditor.audit(audit_context) do
# service.execute
# end
#
# # in the model
# Auditable.push_audit_event('an approver has been added')
# Auditable.push_audit_event('an approval group has been removed')
#
# @example Using standard method call
# i.e. single audit event
#
# merge_approval_rule.save
# Gitlab::Audit::Auditor.audit(audit_context)
#
# @return result of block execution
def self.audit(context, &block)
auditor = new(context)
return unless auditor.audit_enabled?
if block
auditor.multiple_audit(&block)
else
auditor.single_audit
end
end
def initialize(context = {})
@context = context
@name = @context.fetch(:name, 'audit_operation')
@stream_only = @context.fetch(:stream_only, false)
@author = @context.fetch(:author)
@scope = @context.fetch(:scope)
@target = @context.fetch(:target)
@created_at = @context.fetch(:created_at, DateTime.current)
@message = @context.fetch(:message, '')
@additional_details = @context.fetch(:additional_details, {})
@ip_address = @context[:ip_address]
@target_details = @context[:target_details]
@authentication_event = @context.fetch(:authentication_event, false)
@authentication_provider = @context[:authentication_provider]
end
def single_audit
events = [build_event(@message)]
record(events)
end
def multiple_audit
# For now we dont have any need to implement multiple audit event functionality in CE
# Defined in EE
end
def record(events)
log_events(events) unless @stream_only
send_to_stream(events)
end
def log_events(events)
log_authentication_event
log_to_database(events)
log_to_file(events)
end
def audit_enabled?
authentication_event?
end
def authentication_event?
@authentication_event
end
def log_authentication_event
return unless Gitlab::Database.read_write? && authentication_event?
event = AuthenticationEvent.new(authentication_event_payload)
event.save!
rescue ActiveRecord::RecordInvalid => error
::Gitlab::ErrorTracking.track_exception(error, audit_operation: @name)
end
def authentication_event_payload
{
# @author can be a User or various Gitlab::Audit authors.
# Only capture real users for successful authentication events.
user: author_if_user,
user_name: @author.name,
ip_address: @ip_address,
result: AuthenticationEvent.results[:success],
provider: @authentication_provider
}
end
def author_if_user
@author if @author.is_a?(User)
end
def send_to_stream(events)
# Defined in EE
end
def build_event(message)
AuditEvents::BuildService.new(
author: @author,
scope: @scope,
target: @target,
created_at: @created_at,
message: message,
additional_details: @additional_details,
ip_address: @ip_address,
target_details: @target_details
).execute
end
def log_to_database(events)
AuditEvent.bulk_insert!(events)
rescue ActiveRecord::RecordInvalid => error
::Gitlab::ErrorTracking.track_exception(error, audit_operation: @name)
end
def log_to_file(events)
file_logger = ::Gitlab::AuditJsonLogger.build
events.each { |event| file_logger.info(log_payload(event)) }
end
private
def log_payload(event)
payload = event.as_json
details = formatted_details(event.details)
payload["details"] = details
payload.merge!(details).as_json
end
def formatted_details(details)
details.merge(details.slice(:from, :to).transform_values(&:to_s))
end
end
end
end
Gitlab::Audit::Auditor.prepend_mod_with("Gitlab::Audit::Auditor")

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
module Gitlab
module Audit
class NullTarget
def id
nil
end
def type
nil
end
def details
nil
end
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Gitlab
module Audit
class Target
delegate :id, to: :@object
def initialize(object)
@object = object
end
def type
@object.class.name
end
def details
@object.try(:name) || @object.try(:audit_details) || 'unknown'
end
end
end
end

View File

@ -231,6 +231,12 @@ module Gitlab
"BatchedMigration[id: #{id}]"
end
def progress
return unless total_tuple_count.to_i > 0
100 * migrated_tuple_count / total_tuple_count
end
private
def validate_batched_jobs_status

View File

@ -42,6 +42,7 @@ module Gitlab
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectCreatedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectArchivedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectTransferedEvent
store.subscribe ::MergeRequests::CreateApprovalEventWorker, to: ::MergeRequests::ApprovedEvent
end

View File

@ -19991,6 +19991,9 @@ msgstr[1] ""
msgid "Importing..."
msgstr ""
msgid "Import|The repository could not be imported."
msgstr ""
msgid "Import|There is not a valid Git repository at this URL. If your HTTP repository is not publicly accessible, verify your credentials."
msgstr ""
@ -22029,15 +22032,33 @@ msgstr ""
msgid "Iterations"
msgstr ""
msgid "IterationsCadence|Manual iteration cadences are deprecated. Only automatic iteration cadences are allowed."
msgstr ""
msgid "IterationsCadence|The automation start date must come after the active iteration %{iteration_dates}."
msgstr ""
msgid "IterationsCadence|The automation start date must come after the past iteration %{iteration_dates}."
msgstr ""
msgid "IterationsCadence|The automation start date would retroactively create a past iteration. %{start_date} is the earliest possible start date."
msgstr ""
msgid "Iterations|Add iteration"
msgstr ""
msgid "Iterations|All"
msgstr ""
msgid "Iterations|All scheduled iterations will remain scheduled even if you use a smaller number."
msgstr ""
msgid "Iterations|Automatic scheduling"
msgstr ""
msgid "Iterations|Automation start date"
msgstr ""
msgid "Iterations|Cadence configuration is invalid."
msgstr ""
@ -22098,9 +22119,6 @@ msgstr ""
msgid "Iterations|Error loading iteration cadences."
msgstr ""
msgid "Iterations|First iteration start date"
msgstr ""
msgid "Iterations|Iteration cadences"
msgstr ""
@ -22164,15 +22182,15 @@ msgstr ""
msgid "Iterations|Start date"
msgstr ""
msgid "Iterations|The date of the first iteration to schedule. This date determines the day of the week when each iteration starts."
msgstr ""
msgid "Iterations|The duration of each iteration (in weeks)."
msgstr ""
msgid "Iterations|The iteration has been deleted."
msgstr ""
msgid "Iterations|The start date of the first iteration determines when your cadence begins."
msgstr ""
msgid "Iterations|This cadence can be converted to use automated scheduling"
msgstr ""
@ -44704,9 +44722,6 @@ msgstr ""
msgid "You cannot approve your own deployment."
msgstr ""
msgid "You cannot change the start date after the cadence has started. Please create a new cadence."
msgstr ""
msgid "You cannot combine replace_ids with add_ids or remove_ids"
msgstr ""

View File

@ -55,3 +55,5 @@ module QA
end
end
end
QA::Runtime::User.extend_mod_with('Runtime::User', namespace: QA)

View File

@ -0,0 +1,63 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::AcceptingProjectTransfersFinder do
let_it_be(:user) { create(:user) }
let_it_be(:group_where_direct_owner) { create(:group) }
let_it_be(:subgroup_of_group_where_direct_owner) { create(:group, parent: group_where_direct_owner) }
let_it_be(:group_where_direct_maintainer) { create(:group) }
let_it_be(:group_where_direct_maintainer_but_cant_create_projects) do
create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS)
end
let_it_be(:group_where_direct_developer) { create(:group) }
let_it_be(:shared_with_group_where_direct_owner_as_owner) { create(:group) }
let_it_be(:shared_with_group_where_direct_owner_as_guest) { create(:group) }
let_it_be(:shared_with_group_where_direct_owner_as_maintainer) { create(:group) }
let_it_be(:shared_with_group_where_direct_developer_as_owner) { create(:group) }
let_it_be(:subgroup_of_shared_with_group_where_direct_owner_as_maintainer) do
create(:group, parent: shared_with_group_where_direct_owner_as_maintainer)
end
before do
group_where_direct_owner.add_owner(user)
group_where_direct_maintainer.add_maintainer(user)
group_where_direct_developer.add_developer(user)
create(:group_group_link, :owner,
shared_with_group: group_where_direct_owner,
shared_group: shared_with_group_where_direct_owner_as_owner
)
create(:group_group_link, :guest,
shared_with_group: group_where_direct_owner,
shared_group: shared_with_group_where_direct_owner_as_guest
)
create(:group_group_link, :maintainer,
shared_with_group: group_where_direct_owner,
shared_group: shared_with_group_where_direct_owner_as_maintainer
)
create(:group_group_link, :owner,
shared_with_group: group_where_direct_developer,
shared_group: shared_with_group_where_direct_developer_as_owner
)
end
describe '#execute' do
subject(:result) { described_class.new(user).execute }
it 'only returns groups where the user has access to transfer projects to' do
expect(result).to match_array([
group_where_direct_owner,
subgroup_of_group_where_direct_owner,
group_where_direct_maintainer,
shared_with_group_where_direct_owner_as_owner,
shared_with_group_where_direct_owner_as_maintainer,
subgroup_of_shared_with_group_where_direct_owner_as_maintainer
])
end
end
end

View File

@ -76,7 +76,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
const promise = testAction(() => {}, null, {}, assertion.mutations, assertion.actions);
originalExpect(promise instanceof Promise).toBeTruthy();
originalExpect(promise instanceof Promise).toBe(true);
return promise;
});

View File

@ -5,7 +5,7 @@ describe('Mesh object', () => {
it('defaults to non-wireframe material', () => {
const object = new MeshObject(new BoxGeometry(10, 10, 10));
expect(object.material.wireframe).toBeFalsy();
expect(object.material.wireframe).toBe(false);
});
it('changes to wirefame material', () => {
@ -13,7 +13,7 @@ describe('Mesh object', () => {
object.changeMaterial('wireframe');
expect(object.material.wireframe).toBeTruthy();
expect(object.material.wireframe).toBe(true);
});
it('scales object down', () => {

View File

@ -369,7 +369,7 @@ describe('Design management index page', () => {
findDropzone().vm.$emit('change', [{ name: 'test' }]);
expect(mutate).toHaveBeenCalledWith(mutationVariables);
expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
expect(wrapper.vm.isSaving).toBeTruthy();
expect(wrapper.vm.isSaving).toBe(true);
expect(dropzoneClasses()).toContain('design-list-item');
expect(dropzoneClasses()).toContain('design-list-item-new');
});
@ -399,7 +399,7 @@ describe('Design management index page', () => {
await nextTick();
expect(wrapper.vm.filesToBeSaved).toEqual([]);
expect(wrapper.vm.isSaving).toBeFalsy();
expect(wrapper.vm.isSaving).toBe(false);
expect(wrapper.vm.isLatestVersion).toBe(true);
});
@ -412,7 +412,7 @@ describe('Design management index page', () => {
wrapper.vm.onUploadDesignError();
await nextTick();
expect(wrapper.vm.filesToBeSaved).toEqual([]);
expect(wrapper.vm.isSaving).toBeFalsy();
expect(wrapper.vm.isSaving).toBe(false);
expect(findDesignUpdateAlert().exists()).toBe(true);
expect(findDesignUpdateAlert().text()).toBe(UPLOAD_DESIGN_ERROR);
});

View File

@ -14,45 +14,45 @@ describe('diffs helper', () => {
describe('hasInlineLines', () => {
it('is false when the file does not exist', () => {
expect(diffsHelper.hasInlineLines()).toBeFalsy();
expect(diffsHelper.hasInlineLines()).toBe(false);
});
it('is false when the file does not have the highlighted_diff_lines property', () => {
const missingInline = getDiffFile({ highlighted_diff_lines: undefined });
expect(diffsHelper.hasInlineLines(missingInline)).toBeFalsy();
expect(diffsHelper.hasInlineLines(missingInline)).toBe(false);
});
it('is false when the file has zero highlighted_diff_lines', () => {
const emptyInline = getDiffFile({ highlighted_diff_lines: [] });
expect(diffsHelper.hasInlineLines(emptyInline)).toBeFalsy();
expect(diffsHelper.hasInlineLines(emptyInline)).toBe(false);
});
it('is true when the file has at least 1 highlighted_diff_lines', () => {
expect(diffsHelper.hasInlineLines(getDiffFile())).toBeTruthy();
expect(diffsHelper.hasInlineLines(getDiffFile())).toBe(true);
});
});
describe('hasParallelLines', () => {
it('is false when the file does not exist', () => {
expect(diffsHelper.hasParallelLines()).toBeFalsy();
expect(diffsHelper.hasParallelLines()).toBe(false);
});
it('is false when the file does not have the parallel_diff_lines property', () => {
const missingInline = getDiffFile({ parallel_diff_lines: undefined });
expect(diffsHelper.hasParallelLines(missingInline)).toBeFalsy();
expect(diffsHelper.hasParallelLines(missingInline)).toBe(false);
});
it('is false when the file has zero parallel_diff_lines', () => {
const emptyInline = getDiffFile({ parallel_diff_lines: [] });
expect(diffsHelper.hasParallelLines(emptyInline)).toBeFalsy();
expect(diffsHelper.hasParallelLines(emptyInline)).toBe(false);
});
it('is true when the file has at least 1 parallel_diff_lines', () => {
expect(diffsHelper.hasParallelLines(getDiffFile())).toBeTruthy();
expect(diffsHelper.hasParallelLines(getDiffFile())).toBe(true);
});
});
@ -61,16 +61,16 @@ describe('diffs helper', () => {
const noParallelLines = getDiffFile({ parallel_diff_lines: undefined });
const emptyParallelLines = getDiffFile({ parallel_diff_lines: [] });
expect(diffsHelper.isSingleViewStyle(noParallelLines)).toBeTruthy();
expect(diffsHelper.isSingleViewStyle(emptyParallelLines)).toBeTruthy();
expect(diffsHelper.isSingleViewStyle(noParallelLines)).toBe(true);
expect(diffsHelper.isSingleViewStyle(emptyParallelLines)).toBe(true);
});
it('is true when the file has at least 1 parallel line but no inline lines for any reason', () => {
const noInlineLines = getDiffFile({ highlighted_diff_lines: undefined });
const emptyInlineLines = getDiffFile({ highlighted_diff_lines: [] });
expect(diffsHelper.isSingleViewStyle(noInlineLines)).toBeTruthy();
expect(diffsHelper.isSingleViewStyle(emptyInlineLines)).toBeTruthy();
expect(diffsHelper.isSingleViewStyle(noInlineLines)).toBe(true);
expect(diffsHelper.isSingleViewStyle(emptyInlineLines)).toBe(true);
});
it('is true when the file does not have any inline lines or parallel lines for any reason', () => {
@ -83,13 +83,13 @@ describe('diffs helper', () => {
parallel_diff_lines: [],
});
expect(diffsHelper.isSingleViewStyle(noLines)).toBeTruthy();
expect(diffsHelper.isSingleViewStyle(emptyLines)).toBeTruthy();
expect(diffsHelper.isSingleViewStyle()).toBeTruthy();
expect(diffsHelper.isSingleViewStyle(noLines)).toBe(true);
expect(diffsHelper.isSingleViewStyle(emptyLines)).toBe(true);
expect(diffsHelper.isSingleViewStyle()).toBe(true);
});
it('is false when the file has both inline and parallel lines', () => {
expect(diffsHelper.isSingleViewStyle(getDiffFile())).toBeFalsy();
expect(diffsHelper.isSingleViewStyle(getDiffFile())).toBe(false);
});
});

View File

@ -18,8 +18,8 @@ describe('Multi-file editor library diff calculator', () => {
({ originalContent, newContent, lineNumber }) => {
const diff = computeDiff(originalContent, newContent)[0];
expect(diff.added).toBeTruthy();
expect(diff.modified).toBeTruthy();
expect(diff.added).toBe(true);
expect(diff.modified).toBe(true);
expect(diff.removed).toBeUndefined();
expect(diff.lineNumber).toBe(lineNumber);
},
@ -36,7 +36,7 @@ describe('Multi-file editor library diff calculator', () => {
({ originalContent, newContent, lineNumber }) => {
const diff = computeDiff(originalContent, newContent)[0];
expect(diff.added).toBeTruthy();
expect(diff.added).toBe(true);
expect(diff.modified).toBeUndefined();
expect(diff.removed).toBeUndefined();
expect(diff.lineNumber).toBe(lineNumber);
@ -56,7 +56,7 @@ describe('Multi-file editor library diff calculator', () => {
expect(diff.added).toBeUndefined();
expect(diff.modified).toBe(modified);
expect(diff.removed).toBeTruthy();
expect(diff.removed).toBe(true);
expect(diff.lineNumber).toBe(lineNumber);
},
);

View File

@ -38,7 +38,6 @@ describe('AddNamespaceButton', () => {
it('button is bound to the modal', () => {
const { value } = getBinding(findButton().element, 'gl-modal');
expect(value).toBeTruthy();
expect(value).toBe(ADD_NAMESPACE_MODAL_ID);
});
});

View File

@ -628,7 +628,7 @@ describe('common_utils', () => {
it('returns an empty object if `conversionFunction` parameter is not a function', () => {
const result = commonUtils.convertObjectProps(null, mockObjects.convertObjectProps.obj);
expect(isEmptyObject(result)).toBeTruthy();
expect(isEmptyObject(result)).toBe(true);
});
});
@ -645,9 +645,9 @@ describe('common_utils', () => {
: commonUtils[functionName];
it('returns an empty object if `obj` parameter is null, undefined or an empty object', () => {
expect(isEmptyObject(testFunction(null))).toBeTruthy();
expect(isEmptyObject(testFunction())).toBeTruthy();
expect(isEmptyObject(testFunction({}))).toBeTruthy();
expect(isEmptyObject(testFunction(null))).toBe(true);
expect(isEmptyObject(testFunction())).toBe(true);
expect(isEmptyObject(testFunction({}))).toBe(true);
});
it('converts object properties', () => {

View File

@ -387,6 +387,22 @@ describe('init markdown', () => {
expect(textArea.value).toEqual(text);
});
it.each`
keyEvent
${new KeyboardEvent('keydown', { key: ']', metaKey: false })}
${new KeyboardEvent('keydown', { key: ']', metaKey: true, shiftKey: true })}
${new KeyboardEvent('keydown', { key: ']', metaKey: true, altKey: true })}
${new KeyboardEvent('keydown', { key: ']', metaKey: true, ctrlKey: true })}
`('does not indent if meta is not set', ({ keyEvent }) => {
const text = '012\n456\n89';
textArea.value = text;
textArea.setSelectionRange(0, 0);
textArea.dispatchEvent(keyEvent);
expect(textArea.value).toEqual(text);
});
});
describe('with selection', () => {

View File

@ -23,17 +23,17 @@ describe('AccountAndLimits', () => {
describe('Changing of userInternalRegex when userDefaultExternal', () => {
it('is unchecked', () => {
expect($userDefaultExternal.prop('checked')).toBeFalsy();
expect($userDefaultExternal.prop('checked')).toBe(false);
expect($userInternalRegex.placeholder).toEqual(PLACEHOLDER_USER_EXTERNAL_DEFAULT_FALSE);
expect($userInternalRegex.readOnly).toBeTruthy();
expect($userInternalRegex.readOnly).toBe(true);
});
it('is checked', () => {
if (!$userDefaultExternal.prop('checked')) $userDefaultExternal.click();
expect($userDefaultExternal.prop('checked')).toBeTruthy();
expect($userDefaultExternal.prop('checked')).toBe(true);
expect($userInternalRegex.placeholder).toEqual(PLACEHOLDER_USER_EXTERNAL_DEFAULT_TRUE);
expect($userInternalRegex.readOnly).toBeFalsy();
expect($userInternalRegex.readOnly).toBe(false);
});
});
});

View File

@ -51,7 +51,7 @@ describe('Changed file icon', () => {
showTooltip: false,
});
expect(findTooltipText()).toBeFalsy();
expect(findTooltipText()).toBeUndefined();
});
describe.each`
@ -87,7 +87,7 @@ describe('Changed file icon', () => {
});
it('does not have tooltip text', () => {
expect(findTooltipText()).toBeFalsy();
expect(findTooltipText()).toBeUndefined();
});
});

View File

@ -0,0 +1,258 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Audit::Auditor do
let(:name) { 'audit_operation' }
let(:author) { create(:user) }
let(:group) { create(:group) }
let(:provider) { 'standard' }
let(:context) do
{ name: name,
author: author,
scope: group,
target: group,
authentication_event: true,
authentication_provider: provider,
message: "Signed in using standard authentication" }
end
let(:logger) { instance_spy(Gitlab::AuditJsonLogger) }
subject(:auditor) { described_class }
describe '.audit' do
context 'when authentication event' do
let(:audit!) { auditor.audit(context) }
it 'creates an authentication event' do
expect(AuthenticationEvent).to receive(:new).with(
{
user: author,
user_name: author.name,
ip_address: author.current_sign_in_ip,
result: AuthenticationEvent.results[:success],
provider: provider
}
).and_call_original
audit!
end
it 'logs audit events to database', :aggregate_failures do
freeze_time do
audit!
audit_event = AuditEvent.last
expect(audit_event.author_id).to eq(author.id)
expect(audit_event.entity_id).to eq(group.id)
expect(audit_event.entity_type).to eq(group.class.name)
expect(audit_event.created_at).to eq(Time.zone.now)
expect(audit_event.details[:target_id]).to eq(group.id)
expect(audit_event.details[:target_type]).to eq(group.class.name)
end
end
it 'logs audit events to file' do
expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
audit!
expect(logger).to have_received(:info).with(
hash_including(
'author_id' => author.id,
'author_name' => author.name,
'entity_id' => group.id,
'entity_type' => group.class.name,
'details' => kind_of(Hash)
)
)
end
context 'when overriding the create datetime' do
let(:context) do
{ name: name,
author: author,
scope: group,
target: group,
created_at: 3.weeks.ago,
authentication_event: true,
authentication_provider: provider,
message: "Signed in using standard authentication" }
end
it 'logs audit events to database', :aggregate_failures do
freeze_time do
audit!
audit_event = AuditEvent.last
expect(audit_event.author_id).to eq(author.id)
expect(audit_event.entity_id).to eq(group.id)
expect(audit_event.entity_type).to eq(group.class.name)
expect(audit_event.created_at).to eq(3.weeks.ago)
expect(audit_event.details[:target_id]).to eq(group.id)
expect(audit_event.details[:target_type]).to eq(group.class.name)
end
end
it 'logs audit events to file' do
freeze_time do
expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
audit!
expect(logger).to have_received(:info).with(
hash_including(
'author_id' => author.id,
'author_name' => author.name,
'entity_id' => group.id,
'entity_type' => group.class.name,
'details' => kind_of(Hash),
'created_at' => 3.weeks.ago.iso8601(3)
)
)
end
end
end
context 'when overriding the additional_details' do
additional_details = { action: :custom, from: false, to: true }
let(:context) do
{ name: name,
author: author,
scope: group,
target: group,
created_at: Time.zone.now,
additional_details: additional_details,
authentication_event: true,
authentication_provider: provider,
message: "Signed in using standard authentication" }
end
it 'logs audit events to database' do
freeze_time do
audit!
expect(AuditEvent.last.details).to include(additional_details)
end
end
it 'logs audit events to file' do
freeze_time do
expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
audit!
expect(logger).to have_received(:info).with(
hash_including(
'details' => hash_including('action' => 'custom', 'from' => 'false', 'to' => 'true'),
'action' => 'custom',
'from' => 'false',
'to' => 'true'
)
)
end
end
end
context 'when overriding the target_details' do
target_details = "this is my target details"
let(:context) do
{
name: name,
author: author,
scope: group,
target: group,
created_at: Time.zone.now,
target_details: target_details,
authentication_event: true,
authentication_provider: provider,
message: "Signed in using standard authentication"
}
end
it 'logs audit events to database' do
freeze_time do
audit!
audit_event = AuditEvent.last
expect(audit_event.details).to include({ target_details: target_details })
expect(audit_event.target_details).to eq(target_details)
end
end
it 'logs audit events to file' do
freeze_time do
expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
audit!
expect(logger).to have_received(:info).with(
hash_including(
'details' => hash_including('target_details' => target_details),
'target_details' => target_details
)
)
end
end
end
end
context 'when authentication event is false' do
let(:context) do
{ name: name, author: author, scope: group,
target: group, authentication_event: false, message: "sample message" }
end
it 'does not create an authentication event' do
expect { auditor.audit(context) }.not_to change(AuthenticationEvent, :count)
end
end
context 'when authentication event is invalid' do
let(:audit!) { auditor.audit(context) }
before do
allow(AuthenticationEvent).to receive(:new).and_raise(ActiveRecord::RecordInvalid)
allow(Gitlab::ErrorTracking).to receive(:track_exception)
end
it 'tracks error' do
audit!
expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(
kind_of(ActiveRecord::RecordInvalid),
{ audit_operation: name }
)
end
it 'does not throw exception' do
expect { auditor.audit(context) }.not_to raise_exception
end
end
context 'when audit events are invalid' do
let(:audit!) { auditor.audit(context) }
before do
allow(AuditEvent).to receive(:bulk_insert!).and_raise(ActiveRecord::RecordInvalid)
allow(Gitlab::ErrorTracking).to receive(:track_exception)
end
it 'tracks error' do
audit!
expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(
kind_of(ActiveRecord::RecordInvalid),
{ audit_operation: name }
)
end
it 'does not throw exception' do
expect { auditor.audit(context) }.not_to raise_exception
end
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Audit::NullTarget do
subject { described_class.new }
describe '#id' do
it 'returns nil' do
expect(subject.id).to eq(nil)
end
end
describe '#type' do
it 'returns nil' do
expect(subject.type).to eq(nil)
end
end
describe '#details' do
it 'returns nil' do
expect(subject.details).to eq(nil)
end
end
end

View File

@ -0,0 +1,47 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Audit::Target do
let(:object) { double('object') } # rubocop:disable Rspec/VerifiedDoubles
subject { described_class.new(object) }
describe '#id' do
it 'returns object id' do
allow(object).to receive(:id).and_return(object_id)
expect(subject.id).to eq(object_id)
end
end
describe '#type' do
it 'returns object class name' do
allow(object).to receive_message_chain(:class, :name).and_return('User')
expect(subject.type).to eq('User')
end
end
describe '#details' do
using RSpec::Parameterized::TableSyntax
where(:name, :audit_details, :details) do
'jackie' | 'wanderer' | 'jackie'
'jackie' | nil | 'jackie'
nil | 'wanderer' | 'wanderer'
nil | nil | 'unknown'
end
before do
allow(object).to receive(:name).and_return(name) if name
allow(object).to receive(:audit_details).and_return(audit_details) if audit_details
end
with_them do
it 'returns details' do
expect(subject.details).to eq(details)
end
end
end
end

View File

@ -617,6 +617,49 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
describe '#progress' do
subject { migration.progress }
context 'when the migration is finished' do
let(:migration) do
create(:batched_background_migration, :finished, total_tuple_count: 1).tap do |record|
create(:batched_background_migration_job, :succeeded, batched_migration: record, batch_size: 1)
end
end
it 'returns 100' do
expect(subject).to be 100
end
end
context 'when the migration does not have jobs' do
let(:migration) { create(:batched_background_migration, :active) }
it 'returns zero' do
expect(subject).to be 0
end
end
context 'when the `total_tuple_count` is zero' do
let(:migration) { create(:batched_background_migration, :active, total_tuple_count: 0) }
let!(:batched_job) { create(:batched_background_migration_job, :succeeded, batched_migration: migration) }
it 'returns nil' do
expect(subject).to be nil
end
end
context 'when migration has completed jobs' do
let(:migration) { create(:batched_background_migration, :active, total_tuple_count: 100) }
let!(:batched_job) { create(:batched_background_migration_job, :succeeded, batched_migration: migration, batch_size: 8) }
it 'calculates the progress' do
expect(subject).to be 8
end
end
end
describe '.for_configuration' do
let!(:attributes) do
{

View File

@ -30,6 +30,54 @@ RSpec.describe GroupGroupLink do
end
end
describe '.with_owner_or_maintainer_access' do
let_it_be(:group_group_link_maintainer) { create :group_group_link, :maintainer }
let_it_be(:group_group_link_owner) { create :group_group_link, :owner }
let_it_be(:group_group_link_reporter) { create :group_group_link, :reporter }
let_it_be(:group_group_link_guest) { create :group_group_link, :guest }
it 'returns all records which have OWNER or MAINTAINER access' do
expect(described_class.with_owner_or_maintainer_access).to match_array([
group_group_link_maintainer,
group_group_link_owner
])
end
end
context 'access via group shares' do
let_it_be(:shared_with_group_1) { create(:group) }
let_it_be(:shared_with_group_2) { create(:group) }
let_it_be(:shared_with_group_3) { create(:group) }
let_it_be(:shared_group_1) { create(:group) }
let_it_be(:shared_group_2) { create(:group) }
let_it_be(:shared_group_3) { create(:group) }
let_it_be(:shared_group_1_subgroup) { create(:group, parent: shared_group_1) }
before do
create :group_group_link, shared_with_group: shared_with_group_1, shared_group: shared_group_1
create :group_group_link, shared_with_group: shared_with_group_2, shared_group: shared_group_2
create :group_group_link, shared_with_group: shared_with_group_3, shared_group: shared_group_3
end
describe '.groups_accessible_via' do
it 'returns other groups that you can get access to, via the group shares of the specified groups' do
group_ids = [shared_with_group_1.id, shared_with_group_2.id]
expected_result = Group.id_in([shared_group_1.id, shared_group_1_subgroup.id, shared_group_2.id])
expect(described_class.groups_accessible_via(group_ids)).to match_array(expected_result)
end
end
describe '.groups_having_access_to' do
it 'returns all other groups that are having access to these specified groups, via group share' do
group_ids = [shared_group_1.id, shared_group_2.id]
expected_result = Group.id_in([shared_with_group_1.id, shared_with_group_2.id])
expect(described_class.groups_having_access_to(group_ids)).to match_array(expected_result)
end
end
end
describe '.distinct_on_shared_with_group_id_with_group_access' do
let_it_be(:sub_shared_group) { create(:group, parent: shared_group) }
let_it_be(:other_group) { create(:group) }

View File

@ -806,6 +806,20 @@ RSpec.describe Group do
end
end
describe '.project_creation_allowed' do
let_it_be(:group_1) { create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS) }
let_it_be(:group_2) { create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
let_it_be(:group_3) { create(:group, project_creation_level: Gitlab::Access::MAINTAINER_PROJECT_ACCESS) }
let_it_be(:group_4) { create(:group, project_creation_level: nil) }
it 'only includes groups where project creation is allowed' do
result = described_class.project_creation_allowed
expect(result).to include(group_2, group_3, group_4)
expect(result).not_to include(group_1)
end
end
describe 'by_ids_or_paths' do
let(:group_path) { 'group_path' }
let!(:group) { create(:group, path: group_path) }

View File

@ -0,0 +1,154 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe AuditEvents::BuildService do
let(:author) { build_stubbed(:author, current_sign_in_ip: '127.0.0.1') }
let(:deploy_token) { build_stubbed(:deploy_token, user: author) }
let(:scope) { build_stubbed(:group) }
let(:target) { build_stubbed(:project) }
let(:ip_address) { '192.168.8.8' }
let(:message) { 'Added an interesting field from project Gotham' }
let(:additional_details) { { action: :custom } }
subject(:service) do
described_class.new(
author: author,
scope: scope,
target: target,
message: message,
additional_details: additional_details,
ip_address: ip_address
)
end
describe '#execute', :request_store do
subject(:event) { service.execute }
before do
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(ip_address)
end
it 'sets correct attributes', :aggregate_failures do
freeze_time do
expect(event).to have_attributes(
author_id: author.id,
author_name: author.name,
entity_id: scope.id,
entity_type: scope.class.name)
expect(event.details).to eq(
author_name: author.name,
author_class: author.class.name,
target_id: target.id,
target_type: target.class.name,
target_details: target.name,
custom_message: message,
action: :custom)
expect(event.ip_address).to be_nil
expect(event.created_at).to eq(DateTime.current)
end
end
context 'when IP address is not provided' do
let(:ip_address) { nil }
it 'uses author current_sign_in_ip' do
expect(event.ip_address).to be_nil
end
end
context 'when overriding target details' do
subject(:service) do
described_class.new(
author: author,
scope: scope,
target: target,
message: message,
target_details: "This is my target details"
)
end
it 'uses correct target details' do
expect(event.target_details).to eq("This is my target details")
end
end
context 'when deploy token is passed as author' do
let(:service) do
described_class.new(
author: deploy_token,
scope: scope,
target: target,
message: message
)
end
it 'expect author to be user' do
expect(event.author_id).to eq(-2)
expect(event.author_name).to eq(deploy_token.name)
end
end
context 'when deploy key is passed as author' do
let(:deploy_key) { build_stubbed(:deploy_key, user: author) }
let(:service) do
described_class.new(
author: deploy_key,
scope: scope,
target: target,
message: message
)
end
it 'expect author to be deploy key' do
expect(event.author_id).to eq(-3)
expect(event.author_name).to eq(deploy_key.name)
end
end
context 'when author is passed as UnauthenticatedAuthor' do
let(:service) do
described_class.new(
author: ::Gitlab::Audit::UnauthenticatedAuthor.new,
scope: scope,
target: target,
message: message
)
end
it 'sets author as unauthenticated user' do
expect(event.author).to be_an_instance_of(::Gitlab::Audit::UnauthenticatedAuthor)
expect(event.author_name).to eq('An unauthenticated user')
end
end
context 'when attributes are missing' do
context 'when author is missing' do
let(:author) { nil }
it { expect { service }.to raise_error(described_class::MissingAttributeError) }
end
context 'when scope is missing' do
let(:scope) { nil }
it { expect { service }.to raise_error(described_class::MissingAttributeError) }
end
context 'when target is missing' do
let(:target) { nil }
it { expect { service }.to raise_error(described_class::MissingAttributeError) }
end
context 'when message is missing' do
let(:message) { nil }
it { expect { service }.to raise_error(described_class::MissingAttributeError) }
end
end
end
end

View File

@ -13,8 +13,8 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it_behaves_like 'subscribes to event'
it 'clears the cache with Gitlab::Pages::CacheControl' do
caches.each do |cache_type, cache_id|
expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache_type, id: cache_id) do |cache_control|
caches.each do |cache|
expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id]) do |cache_control|
expect(cache_control).to receive(:clear_cache)
end
end
@ -26,27 +26,42 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it_behaves_like 'clears caches with',
event_class: Pages::PageDeployedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: { namespace: 3, project: 1 }
caches: [
{ type: :namespace, id: 3 },
{ type: :project, id: 1 }
]
it_behaves_like 'clears caches with',
event_class: Pages::PageDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: { namespace: 3, project: 1 }
caches: [
{ type: :namespace, id: 3 },
{ type: :project, id: 1 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: { namespace: 3, project: 1 }
caches: [
{ type: :namespace, id: 3 },
{ type: :project, id: 1 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectCreatedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: { namespace: 3, project: 1 }
caches: [
{ type: :namespace, id: 3 },
{ type: :project, id: 1 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectArchivedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: { namespace: 3, project: 1 }
caches: [
{ type: :namespace, id: 3 },
{ type: :project, id: 1 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectPathChangedEvent,
@ -57,5 +72,40 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
old_path: 'old_path',
new_path: 'new_path'
},
caches: { namespace: 3, project: 1 }
caches: [
{ type: :namespace, id: 3 },
{ type: :project, id: 1 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectTransferedEvent,
event_data: {
project_id: 1,
old_namespace_id: 2,
old_root_namespace_id: 3,
new_namespace_id: 4,
new_root_namespace_id: 5
},
caches: [
{ type: :project, id: 1 },
{ type: :namespace, id: 3 },
{ type: :namespace, id: 5 }
]
context 'when namespace based cache keys are duplicated' do
# de-dups namespace cache keys
it_behaves_like 'clears caches with',
event_class: Projects::ProjectTransferedEvent,
event_data: {
project_id: 1,
old_namespace_id: 2,
old_root_namespace_id: 5,
new_namespace_id: 4,
new_root_namespace_id: 5
},
caches: [
{ type: :project, id: 1 },
{ type: :namespace, id: 5 }
]
end
end