Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
36b6291329
commit
7aa165d479
|
|
@ -362,22 +362,6 @@ Layout/ArgumentAlignment:
|
|||
- 'app/graphql/types/notes/update_diff_image_position_input_type.rb'
|
||||
- 'app/graphql/types/packages/cleanup/policy_type.rb'
|
||||
- 'app/graphql/types/packages/file_metadata_type.rb'
|
||||
- 'app/graphql/types/packages/package_base_type.rb'
|
||||
- 'app/graphql/types/packages/package_details_type.rb'
|
||||
- 'app/graphql/types/packages/package_file_type.rb'
|
||||
- 'app/graphql/types/packages/package_type.rb'
|
||||
- 'app/graphql/types/permission_types/ci/pipeline_schedules.rb'
|
||||
- 'app/graphql/types/permission_types/issue.rb'
|
||||
- 'app/graphql/types/permission_types/merge_request.rb'
|
||||
- 'app/graphql/types/permission_types/project.rb'
|
||||
- 'app/graphql/types/project_invitation_type.rb'
|
||||
- 'app/graphql/types/project_member_type.rb'
|
||||
- 'app/graphql/types/project_statistics_type.rb'
|
||||
- 'app/graphql/types/projects/branch_rule_type.rb'
|
||||
- 'app/graphql/types/projects/fork_details_type.rb'
|
||||
- 'app/graphql/types/projects/repository_language_type.rb'
|
||||
- 'app/graphql/types/projects/service_type.rb'
|
||||
- 'app/graphql/types/projects/services/jira_project_type.rb'
|
||||
- 'app/graphql/types/projects/services/jira_service_type.rb'
|
||||
- 'app/graphql/types/projects/topic_type.rb'
|
||||
- 'app/graphql/types/prometheus_alert_type.rb'
|
||||
|
|
@ -540,22 +524,6 @@ Layout/ArgumentAlignment:
|
|||
- 'ee/app/graphql/mutations/incident_management/issuable_resource_link/base.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/issuable_resource_link/create.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/issuable_resource_link/destroy.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_rotation/base.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_rotation/create.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_rotation/destroy.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_rotation/update.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_schedule/create.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_schedule/destroy.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_schedule/oncall_schedule_base.rb'
|
||||
- 'ee/app/graphql/mutations/incident_management/oncall_schedule/update.rb'
|
||||
- 'ee/app/graphql/mutations/instance_security_dashboard/add_project.rb'
|
||||
- 'ee/app/graphql/mutations/instance_security_dashboard/remove_project.rb'
|
||||
- 'ee/app/graphql/mutations/issues/common_ee_mutation_arguments.rb'
|
||||
- 'ee/app/graphql/mutations/issues/promote_to_epic.rb'
|
||||
- 'ee/app/graphql/mutations/issues/set_epic.rb'
|
||||
- 'ee/app/graphql/mutations/issues/set_escalation_policy.rb'
|
||||
- 'ee/app/graphql/mutations/issues/set_iteration.rb'
|
||||
- 'ee/app/graphql/mutations/issues/set_weight.rb'
|
||||
- 'ee/app/graphql/mutations/iterations/cadences/destroy.rb'
|
||||
- 'ee/app/graphql/mutations/iterations/create.rb'
|
||||
- 'ee/app/graphql/mutations/iterations/update.rb'
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module Types
|
|||
field :id, ::Types::GlobalIDType[::Packages::Package], null: false, description: 'ID of the package.'
|
||||
|
||||
field :_links, Types::Packages::PackageLinksType, null: false, method: :itself,
|
||||
description: 'Map of links to perform actions on the package.'
|
||||
description: 'Map of links to perform actions on the package.'
|
||||
field :can_destroy, GraphQL::Types::Boolean,
|
||||
null: false,
|
||||
deprecated: {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module Types
|
|||
authorize :read_package
|
||||
|
||||
field :versions, ::Types::Packages::PackageBaseType.connection_type, null: true,
|
||||
description: 'Other versions of the package.'
|
||||
description: 'Other versions of the package.'
|
||||
|
||||
field :package_files, Types::Packages::PackageFileType.connection_type, null: true, method: :installable_package_files, description: 'Package files.'
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module Types
|
|||
field :download_path, GraphQL::Types::String, null: false, description: 'Download path of the package file.'
|
||||
field :file_md5, GraphQL::Types::String, null: true, description: 'Md5 of the package file.'
|
||||
field :file_metadata, Types::Packages::FileMetadataType, null: true,
|
||||
description: 'File metadata.'
|
||||
description: 'File metadata.'
|
||||
field :file_name, GraphQL::Types::String, null: false, description: 'Name of the package file.'
|
||||
field :file_sha1, GraphQL::Types::String, null: true, description: 'Sha1 of the package file.'
|
||||
field :file_sha256, GraphQL::Types::String, null: true, description: 'Sha256 of the package file.'
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ module Types
|
|||
authorize :read_package
|
||||
|
||||
field :pipelines,
|
||||
resolver: Resolvers::PackagePipelinesResolver,
|
||||
connection_extension: Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension,
|
||||
description: <<-DESC
|
||||
resolver: Resolvers::PackagePipelinesResolver,
|
||||
connection_extension: Gitlab::Graphql::Extensions::ExternallyPaginatedArrayExtension,
|
||||
description: <<-DESC
|
||||
Pipelines that built the package. Max page size #{Resolvers::PackagePipelinesResolver::MAX_PAGE_SIZE}.
|
||||
DESC
|
||||
DESC
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,15 +7,15 @@ module Types
|
|||
graphql_name 'PipelineSchedulePermissions'
|
||||
|
||||
abilities :update_pipeline_schedule,
|
||||
:admin_pipeline_schedule
|
||||
:admin_pipeline_schedule
|
||||
|
||||
ability_field :play_pipeline_schedule, calls_gitaly: true
|
||||
ability_field :take_ownership_pipeline_schedule,
|
||||
deprecated: {
|
||||
reason: 'Use admin_pipeline_schedule permission to determine if the user can take ownership ' \
|
||||
'of a pipeline schedule',
|
||||
milestone: '15.9'
|
||||
}
|
||||
deprecated: {
|
||||
reason: 'Use admin_pipeline_schedule permission to determine if the user can take ownership ' \
|
||||
'of a pipeline schedule',
|
||||
milestone: '15.9'
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ module Types
|
|||
description 'Check permissions for the current user on a issue'
|
||||
|
||||
abilities :read_issue, :admin_issue, :update_issue, :reopen_issue,
|
||||
:read_design, :create_design, :destroy_design,
|
||||
:create_note, :update_design, :admin_issue_relation
|
||||
:read_design, :create_design, :destroy_design,
|
||||
:create_note, :update_design, :admin_issue_relation
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ module Types
|
|||
revert_on_current_merge_request].freeze
|
||||
|
||||
abilities :read_merge_request, :admin_merge_request,
|
||||
:update_merge_request, :create_note
|
||||
:update_merge_request, :create_note
|
||||
|
||||
PERMISSION_FIELDS.each do |field_name|
|
||||
permission_field field_name, method: :"can_#{field_name}?", calls_gitaly: true
|
||||
|
|
|
|||
|
|
@ -6,19 +6,19 @@ module Types
|
|||
graphql_name 'ProjectPermissions'
|
||||
|
||||
abilities :change_namespace, :change_visibility_level, :rename_project,
|
||||
:remove_project, :archive_project, :remove_fork_project,
|
||||
:remove_pages, :read_project, :create_merge_request_in,
|
||||
:read_wiki, :read_project_member, :create_issue, :upload_file,
|
||||
:read_cycle_analytics, :download_code, :download_wiki_code,
|
||||
:fork_project, :read_commit_status,
|
||||
:request_access, :create_pipeline, :create_pipeline_schedule,
|
||||
:create_merge_request_from, :create_wiki, :push_code,
|
||||
:create_deployment, :push_to_delete_protected_branch,
|
||||
:admin_wiki, :admin_project, :update_pages,
|
||||
:admin_remote_mirror, :create_label, :update_wiki, :destroy_wiki,
|
||||
:create_pages, :destroy_pages, :read_pages_content, :admin_operations,
|
||||
:read_merge_request, :read_design, :create_design, :destroy_design,
|
||||
:read_environment, :view_edit_page
|
||||
:remove_project, :archive_project, :remove_fork_project,
|
||||
:remove_pages, :read_project, :create_merge_request_in,
|
||||
:read_wiki, :read_project_member, :create_issue, :upload_file,
|
||||
:read_cycle_analytics, :download_code, :download_wiki_code,
|
||||
:fork_project, :read_commit_status,
|
||||
:request_access, :create_pipeline, :create_pipeline_schedule,
|
||||
:create_merge_request_from, :create_wiki, :push_code,
|
||||
:create_deployment, :push_to_delete_protected_branch,
|
||||
:admin_wiki, :admin_project, :update_pages,
|
||||
:admin_remote_mirror, :create_label, :update_wiki, :destroy_wiki,
|
||||
:create_pages, :destroy_pages, :read_pages_content, :admin_operations,
|
||||
:read_merge_request, :read_design, :create_design, :destroy_design,
|
||||
:read_environment, :view_edit_page
|
||||
|
||||
permission_field :create_snippet
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ module Types
|
|||
authorize :admin_project
|
||||
|
||||
field :project, Types::ProjectType, null: true,
|
||||
description: 'Project ID for the project of the invitation.'
|
||||
description: 'Project ID for the project of the invitation.'
|
||||
|
||||
def project
|
||||
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.source_id).find
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ module Types
|
|||
authorize :read_project
|
||||
|
||||
field :project, Types::ProjectType, null: true,
|
||||
description: 'Project that User is a member of.'
|
||||
description: 'Project that User is a member of.'
|
||||
|
||||
def project
|
||||
Gitlab::Graphql::Loaders::BatchModelLoader.new(Project, object.source_id).find
|
||||
|
|
|
|||
|
|
@ -7,32 +7,32 @@ module Types
|
|||
authorize :read_statistics
|
||||
|
||||
field :commit_count, GraphQL::Types::Float, null: false,
|
||||
description: 'Commit count of the project.'
|
||||
description: 'Commit count of the project.'
|
||||
|
||||
field :build_artifacts_size, GraphQL::Types::Float, null: false,
|
||||
description: 'Build artifacts size of the project in bytes.'
|
||||
description: 'Build artifacts size of the project in bytes.'
|
||||
field :container_registry_size,
|
||||
GraphQL::Types::Float,
|
||||
null: true,
|
||||
description: 'Container Registry size of the project in bytes.'
|
||||
GraphQL::Types::Float,
|
||||
null: true,
|
||||
description: 'Container Registry size of the project in bytes.'
|
||||
field :lfs_objects_size,
|
||||
GraphQL::Types::Float,
|
||||
null: false,
|
||||
description: 'Large File Storage (LFS) object size of the project in bytes.'
|
||||
GraphQL::Types::Float,
|
||||
null: false,
|
||||
description: 'Large File Storage (LFS) object size of the project in bytes.'
|
||||
field :packages_size, GraphQL::Types::Float, null: false,
|
||||
description: 'Packages size of the project in bytes.'
|
||||
description: 'Packages size of the project in bytes.'
|
||||
field :pipeline_artifacts_size, GraphQL::Types::Float, null: true,
|
||||
description: 'CI Pipeline artifacts size in bytes.'
|
||||
description: 'CI Pipeline artifacts size in bytes.'
|
||||
field :repository_size, GraphQL::Types::Float, null: false,
|
||||
description: 'Repository size of the project in bytes.'
|
||||
description: 'Repository size of the project in bytes.'
|
||||
field :snippets_size, GraphQL::Types::Float, null: true,
|
||||
description: 'Snippets size of the project in bytes.'
|
||||
description: 'Snippets size of the project in bytes.'
|
||||
field :storage_size, GraphQL::Types::Float, null: false,
|
||||
description: 'Storage size of the project in bytes.'
|
||||
description: 'Storage size of the project in bytes.'
|
||||
field :uploads_size, GraphQL::Types::Float, null: true,
|
||||
description: 'Uploads size of the project in bytes.'
|
||||
description: 'Uploads size of the project in bytes.'
|
||||
field :wiki_size, GraphQL::Types::Float, null: true,
|
||||
description: 'Wiki size of the project in bytes.'
|
||||
description: 'Wiki size of the project in bytes.'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -10,46 +10,46 @@ module Types
|
|||
alias_method :branch_rule, :object
|
||||
|
||||
field :id, ::Types::GlobalIDType[::Projects::BranchRule],
|
||||
description: 'ID of the branch rule.'
|
||||
description: 'ID of the branch rule.'
|
||||
|
||||
field :name,
|
||||
type: GraphQL::Types::String,
|
||||
null: false,
|
||||
description: 'Name of the branch rule target. Includes wildcards.'
|
||||
type: GraphQL::Types::String,
|
||||
null: false,
|
||||
description: 'Name of the branch rule target. Includes wildcards.'
|
||||
|
||||
field :is_default,
|
||||
type: GraphQL::Types::Boolean,
|
||||
null: false,
|
||||
method: :default_branch?,
|
||||
calls_gitaly: true,
|
||||
description: "Check if this branch rule protects the project's default branch."
|
||||
type: GraphQL::Types::Boolean,
|
||||
null: false,
|
||||
method: :default_branch?,
|
||||
calls_gitaly: true,
|
||||
description: "Check if this branch rule protects the project's default branch."
|
||||
|
||||
field :is_protected,
|
||||
type: GraphQL::Types::Boolean,
|
||||
null: false,
|
||||
method: :protected?,
|
||||
description: "Check if this branch rule protects access for the branch."
|
||||
type: GraphQL::Types::Boolean,
|
||||
null: false,
|
||||
method: :protected?,
|
||||
description: "Check if this branch rule protects access for the branch."
|
||||
|
||||
field :matching_branches_count,
|
||||
type: GraphQL::Types::Int,
|
||||
null: false,
|
||||
calls_gitaly: true,
|
||||
description: 'Number of existing branches that match this branch rule.'
|
||||
type: GraphQL::Types::Int,
|
||||
null: false,
|
||||
calls_gitaly: true,
|
||||
description: 'Number of existing branches that match this branch rule.'
|
||||
|
||||
field :branch_protection,
|
||||
type: Types::BranchRules::BranchProtectionType,
|
||||
null: true,
|
||||
description: 'Branch protections configured for this branch rule.'
|
||||
type: Types::BranchRules::BranchProtectionType,
|
||||
null: true,
|
||||
description: 'Branch protections configured for this branch rule.'
|
||||
|
||||
field :created_at,
|
||||
Types::TimeType,
|
||||
null: true,
|
||||
description: 'Timestamp of when the branch rule was created.'
|
||||
Types::TimeType,
|
||||
null: true,
|
||||
description: 'Timestamp of when the branch rule was created.'
|
||||
|
||||
field :updated_at,
|
||||
Types::TimeType,
|
||||
null: true,
|
||||
description: 'Timestamp of when the branch rule was last updated.'
|
||||
Types::TimeType,
|
||||
null: true,
|
||||
description: 'Timestamp of when the branch rule was last updated.'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,26 +8,26 @@ module Types
|
|||
description 'Details of the fork project compared to its upstream project.'
|
||||
|
||||
field :ahead, GraphQL::Types::Int,
|
||||
null: true,
|
||||
calls_gitaly: true,
|
||||
method: :ahead,
|
||||
description: 'Number of commits ahead of upstream.'
|
||||
null: true,
|
||||
calls_gitaly: true,
|
||||
method: :ahead,
|
||||
description: 'Number of commits ahead of upstream.'
|
||||
|
||||
field :behind, GraphQL::Types::Int,
|
||||
null: true,
|
||||
calls_gitaly: true,
|
||||
method: :behind,
|
||||
description: 'Number of commits behind upstream.'
|
||||
null: true,
|
||||
calls_gitaly: true,
|
||||
method: :behind,
|
||||
description: 'Number of commits behind upstream.'
|
||||
|
||||
field :is_syncing, GraphQL::Types::Boolean,
|
||||
null: true,
|
||||
method: :syncing?,
|
||||
description: 'Indicates if there is a synchronization in progress.'
|
||||
null: true,
|
||||
method: :syncing?,
|
||||
description: 'Indicates if there is a synchronization in progress.'
|
||||
|
||||
field :has_conflicts, GraphQL::Types::Boolean,
|
||||
null: true,
|
||||
method: :has_conflicts?,
|
||||
description: 'Indicates if the fork conflicts with its upstream project.'
|
||||
null: true,
|
||||
method: :has_conflicts?,
|
||||
description: 'Indicates if the fork conflicts with its upstream project.'
|
||||
|
||||
def ahead
|
||||
counts[:ahead]
|
||||
|
|
|
|||
|
|
@ -7,13 +7,13 @@ module Types
|
|||
graphql_name 'RepositoryLanguage'
|
||||
|
||||
field :name, GraphQL::Types::String, null: false,
|
||||
description: 'Name of the repository language.'
|
||||
description: 'Name of the repository language.'
|
||||
|
||||
field :share, GraphQL::Types::Float, null: true,
|
||||
description: "Percentage of the repository's languages."
|
||||
description: "Percentage of the repository's languages."
|
||||
|
||||
field :color, Types::ColorType, null: true,
|
||||
description: 'Color to visualize the repository language.'
|
||||
description: 'Color to visualize the repository language.'
|
||||
end
|
||||
# rubocop: enable Graphql/AuthorizeTypes
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,11 +10,11 @@ module Types
|
|||
# TODO: Add all the fields that we want to expose for the project services integrations
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/213088
|
||||
field :type, GraphQL::Types::String, null: true,
|
||||
description: 'Class name of the service.'
|
||||
description: 'Class name of the service.'
|
||||
field :service_type, ::Types::Projects::ServiceTypeEnum, null: true,
|
||||
description: 'Type of the service.'
|
||||
description: 'Type of the service.'
|
||||
field :active, GraphQL::Types::Boolean, null: true,
|
||||
description: 'Indicates if the service is active.'
|
||||
description: 'Indicates if the service is active.'
|
||||
|
||||
def type
|
||||
enum = ::Types::Projects::ServiceTypeEnum.coerce_result(service_type, context)
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ module Types
|
|||
graphql_name 'JiraProject'
|
||||
|
||||
field :key, GraphQL::Types::String, null: false,
|
||||
description: 'Key of the Jira project.'
|
||||
description: 'Key of the Jira project.'
|
||||
field :name, GraphQL::Types::String, null: true,
|
||||
description: 'Name of the Jira project.'
|
||||
description: 'Name of the Jira project.'
|
||||
field :project_id, GraphQL::Types::Int, null: false,
|
||||
description: 'ID of the Jira project.',
|
||||
method: :id
|
||||
description: 'ID of the Jira project.',
|
||||
method: :id
|
||||
end
|
||||
# rubocop:enable Graphql/AuthorizeTypes
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,14 +20,12 @@ module Achievements
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'priority',
|
||||
order_expression: ::Achievements::UserAchievement.arel_table[:priority].asc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
order_expression: ::Achievements::UserAchievement.arel_table[:id].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
reorder(keyset_order)
|
||||
|
|
|
|||
|
|
@ -19,14 +19,11 @@ module Ci
|
|||
self.allow_legacy_sti_class = true
|
||||
|
||||
belongs_to :project
|
||||
belongs_to :trigger_request
|
||||
|
||||
has_one :downstream_pipeline, through: :sourced_pipeline, source: :pipeline
|
||||
|
||||
validates :ref, presence: true
|
||||
|
||||
delegate :trigger_short_token, to: :trigger_request, allow_nil: true
|
||||
|
||||
# rubocop:disable Cop/ActiveRecordSerialize
|
||||
serialize :options
|
||||
serialize :yaml_variables, ::Gitlab::Serializer::Ci::Variables
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ module Ci
|
|||
|
||||
belongs_to :project, inverse_of: :builds
|
||||
belongs_to :runner
|
||||
belongs_to :trigger_request
|
||||
belongs_to :erased_by, class_name: 'User'
|
||||
belongs_to :pipeline,
|
||||
->(build) { in_partition(build) },
|
||||
|
|
@ -115,7 +114,6 @@ module Ci
|
|||
delegate :apple_app_store_integration, to: :project
|
||||
delegate :google_play_integration, to: :project
|
||||
delegate :diffblue_cover_integration, to: :project
|
||||
delegate :trigger_short_token, to: :trigger_request, allow_nil: true
|
||||
delegate :ensure_persistent_ref, to: :pipeline
|
||||
delegate :enable_debug_trace!, to: :metadata
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,11 @@ module Ci
|
|||
has_one :resource, class_name: 'Ci::Resource', foreign_key: 'build_id', inverse_of: :processable
|
||||
has_one :sourced_pipeline, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id, inverse_of: :source_job
|
||||
|
||||
belongs_to :trigger_request
|
||||
belongs_to :resource_group, class_name: 'Ci::ResourceGroup', inverse_of: :processables
|
||||
|
||||
delegate :trigger_short_token, to: :trigger_request, allow_nil: true
|
||||
|
||||
accepts_nested_attributes_for :needs
|
||||
|
||||
scope :preload_needs, -> { preload(:needs) }
|
||||
|
|
|
|||
|
|
@ -21,8 +21,8 @@ module Analytics
|
|||
scope :end_event_is_not_happened_yet, -> { where(end_event_timestamp: nil) }
|
||||
scope :for_consistency_check_worker, -> (direction) do
|
||||
keyset_order(
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), distinct: false, nullable: direction == :asc ? :nulls_last : :nulls_first },
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), nullable: :not_nullable, distinct: true }
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), nullable: direction == :asc ? :nulls_last : :nulls_first },
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), nullable: :not_nullable }
|
||||
)
|
||||
end
|
||||
scope :order_by_end_event, -> (direction) do
|
||||
|
|
@ -30,9 +30,9 @@ module Analytics
|
|||
# start_event_timestamp must be included in the ORDER BY clause for the duration
|
||||
# calculation to work: SELECT end_event_timestamp - start_event_timestamp
|
||||
keyset_order(
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), distinct: false, nullable: direction == :asc ? :nulls_last : :nulls_first },
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), distinct: true },
|
||||
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction), distinct: false }
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), nullable: direction == :asc ? :nulls_last : :nulls_first },
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction) },
|
||||
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction) }
|
||||
)
|
||||
end
|
||||
scope :order_by_end_event_with_db_duration, -> (direction) do
|
||||
|
|
@ -40,12 +40,12 @@ module Analytics
|
|||
# start_event_timestamp must be included in the ORDER BY clause for the duration
|
||||
# calculation to work: SELECT end_event_timestamp - start_event_timestamp
|
||||
keyset_order(
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), distinct: false, nullable: direction == :asc ? :nulls_last : :nulls_first },
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), distinct: true },
|
||||
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction), distinct: false },
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), nullable: direction == :asc ? :nulls_last : :nulls_first },
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction) },
|
||||
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction) },
|
||||
:duration_in_milliseconds => {
|
||||
order_expression: arel_order(arel_table[:duration_in_milliseconds], direction),
|
||||
distinct: false, nullable: direction == :asc ? :nulls_last : :nulls_first, sql_type: 'bigint'
|
||||
nullable: direction == :asc ? :nulls_last : :nulls_first, sql_type: 'bigint'
|
||||
}
|
||||
)
|
||||
end
|
||||
|
|
@ -56,11 +56,11 @@ module Analytics
|
|||
keyset_order(
|
||||
:duration_in_milliseconds => {
|
||||
order_expression: arel_order(arel_table[:duration_in_milliseconds], direction),
|
||||
distinct: false, nullable: direction == :asc ? :nulls_last : :nulls_first, sql_type: 'bigint'
|
||||
nullable: direction == :asc ? :nulls_last : :nulls_first, sql_type: 'bigint'
|
||||
},
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), distinct: true },
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), distinct: true },
|
||||
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction), distinct: true }
|
||||
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction) },
|
||||
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction) },
|
||||
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction) }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@ module Sortable
|
|||
order_expression: column.send(direction).send(nullable),
|
||||
reversed_order_expression: column.send(reversed_direction).send(nullable),
|
||||
order_direction: direction,
|
||||
distinct: false,
|
||||
add_to_projections: true,
|
||||
nullable: nullable
|
||||
),
|
||||
|
|
|
|||
|
|
@ -30,8 +30,7 @@ class CustomEmoji < ApplicationRecord
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'name',
|
||||
order_expression: CustomEmoji.arel_table[:name].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'current_namespace',
|
||||
|
|
|
|||
|
|
@ -99,13 +99,11 @@ class CustomerRelations::Contact < ApplicationRecord
|
|||
[
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'last_name',
|
||||
order_expression: arel_table[:last_name].asc,
|
||||
distinct: false
|
||||
order_expression: arel_table[:last_name].asc
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'first_name',
|
||||
order_expression: arel_table[:first_name].asc,
|
||||
distinct: false
|
||||
order_expression: arel_table[:first_name].asc
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
|
|
|
|||
|
|
@ -174,17 +174,6 @@ class Deployment < ApplicationRecord
|
|||
find(ids)
|
||||
end
|
||||
|
||||
# This method returns the deployment records of the last deployment pipeline, that successfully executed for
|
||||
# the given environment.
|
||||
# e.g.
|
||||
# A pipeline contains
|
||||
# - deploy job A => production environment
|
||||
# - deploy job B => production environment
|
||||
# In this case, `last_deployment_group_for_environment` returns both deployments.
|
||||
def self.last_deployment_group_for_environment(env)
|
||||
batch_load_deployments_for_environment(env, associated_jobs: :latest_successful_jobs)
|
||||
end
|
||||
|
||||
# This method returns the *finished deployments* of the *last finished pipeline* for a given environment
|
||||
# e.g., a finished pipeline contains
|
||||
# - deploy job A (environment: production, status: success)
|
||||
|
|
@ -192,7 +181,28 @@ class Deployment < ApplicationRecord
|
|||
# - deploy job C (environment: production, status: canceled)
|
||||
# In the above case, `last_finished_deployment_group_for_environment` returns all deployments
|
||||
def self.last_finished_deployment_group_for_environment(env)
|
||||
batch_load_deployments_for_environment(env, associated_jobs: :latest_finished_jobs)
|
||||
return none unless env.latest_finished_jobs.present?
|
||||
|
||||
# this batch loads a collection of deployments associated to `latest_finished_jobs` per `environment`
|
||||
BatchLoader.for(env).batch(key: :latest_finished_jobs, default_value: none) do |environments, loader|
|
||||
job_ids = []
|
||||
environments_hash = {}
|
||||
|
||||
# Preloading the environment's `latest_finished_jobs` avoids N+1 queries.
|
||||
environments.each do |environment|
|
||||
environments_hash[environment.id] = environment
|
||||
|
||||
job_ids << environment.latest_finished_jobs.map(&:id)
|
||||
end
|
||||
|
||||
Deployment
|
||||
.where(deployable_type: 'CommitStatus', deployable_id: job_ids.flatten)
|
||||
.preload(last_deployment_group_associations)
|
||||
.group_by(&:environment_id)
|
||||
.each do |env_id, deployment_group|
|
||||
loader.call(environments_hash[env_id], deployment_group)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.find_successful_deployment!(iid)
|
||||
|
|
@ -469,48 +479,6 @@ class Deployment < ApplicationRecord
|
|||
perform_params.stringify_keys!
|
||||
end
|
||||
|
||||
# this method batch loads a collection of deployments given:
|
||||
# - an `environment`
|
||||
# - the environment's `associated_jobs`: jobs within a specific scope, e.g.: `latest_successful_jobs`
|
||||
# the `associated_jobs` must be a defined method in the environment model
|
||||
#
|
||||
# this is used by `last_deployment_group_for_environment` and `last_finished_deployment_group_for_environment`,
|
||||
# both methods implement similar batch loading logic, extracted into the below method
|
||||
# preloading the environment's `associated_jobs` avoids N+1 queries.
|
||||
# the `associated_jobs` is a symbol which corresponds to a collection of jobs associated to an Environment record,
|
||||
# i.e.: `Environment#latest_successful_jobs` and `Environment#latest_finished_jobs`
|
||||
# the method will be called on an environment object using `public_send`
|
||||
#
|
||||
# TODO: inline this method to the last_finished_deployment_group_for_environment
|
||||
# when environment_stop_actions_include_all_finished_deployments FF is removed
|
||||
# - https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
def self.batch_load_deployments_for_environment(env, associated_jobs:)
|
||||
# TODO: replace public_send when inlining method https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
return none unless env.public_send(associated_jobs).present? # rubocop: disable GitlabSecurity/PublicSend -- see comment above for justification
|
||||
|
||||
BatchLoader.for(env).batch(key: associated_jobs, default_value: none) do |environments, loader|
|
||||
job_ids = []
|
||||
environments_hash = {}
|
||||
|
||||
environments.each do |environment|
|
||||
environments_hash[environment.id] = environment
|
||||
|
||||
# refer comment note above, if not preloaded this can lead to N+1.
|
||||
# TODO: replace public_send when inlining method https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
job_ids << environment.public_send(associated_jobs).map(&:id) # rubocop: disable GitlabSecurity/PublicSend -- see comment above for justification
|
||||
end
|
||||
|
||||
Deployment
|
||||
.where(deployable_type: 'CommitStatus', deployable_id: job_ids.flatten)
|
||||
.preload(last_deployment_group_associations)
|
||||
.group_by(&:environment_id)
|
||||
.each do |env_id, deployment_group|
|
||||
loader.call(environments_hash[env_id], deployment_group)
|
||||
end
|
||||
end
|
||||
end
|
||||
private_class_method :batch_load_deployments_for_environment
|
||||
|
||||
def self.last_deployment_group_associations
|
||||
{
|
||||
deployable: {
|
||||
|
|
|
|||
|
|
@ -267,18 +267,12 @@ class Environment < ApplicationRecord
|
|||
last_deployment&.deployable
|
||||
end
|
||||
|
||||
# TODO: remove this method when environment_stop_actions_include_all_finished_deployments FF is removed
|
||||
# - https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
# TODO: remove this method when legacy_last_deployment_group is removed
|
||||
# in https://gitlab.com/gitlab-org/gitlab/-/issues/439141
|
||||
def last_deployment_pipeline
|
||||
last_deployable&.pipeline
|
||||
end
|
||||
|
||||
# TODO: remove this method when environment_stop_actions_include_all_finished_deployments FF is removed
|
||||
# - https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
def latest_successful_jobs
|
||||
last_deployment_pipeline&.latest_successful_jobs
|
||||
end
|
||||
|
||||
def last_finished_deployable
|
||||
last_finished_deployment&.deployable
|
||||
end
|
||||
|
|
@ -296,7 +290,9 @@ class Environment < ApplicationRecord
|
|||
# A pipeline contains
|
||||
# - deploy job A => production environment
|
||||
# - deploy job B => production environment
|
||||
# In this case, `last_deployment_group` returns both deployments, whereas `last_deployable` returns only B.
|
||||
# In this case, `legacy_last_deployment_group` returns both deployments, whereas `last_deployable` returns only B.
|
||||
#
|
||||
# TODO: to be removed in https://gitlab.com/gitlab-org/gitlab/-/issues/439141
|
||||
def legacy_last_deployment_group
|
||||
return Deployment.none unless last_deployment_pipeline
|
||||
|
||||
|
|
@ -402,11 +398,7 @@ class Environment < ApplicationRecord
|
|||
end
|
||||
|
||||
def stop_actions
|
||||
if Feature.enabled?(:environment_stop_actions_include_all_finished_deployments, project, type: :gitlab_com_derisk)
|
||||
return last_finished_deployment_group.map(&:stop_action).compact
|
||||
end
|
||||
|
||||
last_deployment_group.map(&:stop_action).compact
|
||||
last_finished_deployment_group.map(&:stop_action).compact
|
||||
end
|
||||
strong_memoize_attr :stop_actions
|
||||
|
||||
|
|
@ -414,10 +406,6 @@ class Environment < ApplicationRecord
|
|||
Deployment.last_finished_deployment_group_for_environment(self)
|
||||
end
|
||||
|
||||
def last_deployment_group
|
||||
Deployment.last_deployment_group_for_environment(self)
|
||||
end
|
||||
|
||||
def reset_auto_stop
|
||||
update_column(:auto_stop_at, nil)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -421,8 +421,7 @@ class Issue < ApplicationRecord
|
|||
attribute_name: 'relative_position',
|
||||
column_expression: arel_table[:relative_position],
|
||||
order_expression: Issue.arel_table[:relative_position].asc.nulls_last,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -90,7 +90,6 @@ class Label < ApplicationRecord
|
|||
column_expression: order_expression,
|
||||
order_expression: order_expression.desc,
|
||||
order_direction: :desc,
|
||||
distinct: false,
|
||||
add_to_projections: true
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
|
|||
|
|
@ -407,7 +407,6 @@ class MergeRequest < ApplicationRecord
|
|||
reversed_order_expression: column_expression_with_direction.reverse.nulls_first,
|
||||
order_direction: direction,
|
||||
nullable: :nulls_last,
|
||||
distinct: false,
|
||||
add_to_projections: true
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
|
|||
|
|
@ -46,8 +46,7 @@ module Ml
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'metric_value',
|
||||
order_expression: metric_order_expression,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
|
|
|
|||
|
|
@ -681,7 +681,6 @@ class Project < ApplicationRecord
|
|||
column_expression: order_expression,
|
||||
order_expression: order_expression.desc,
|
||||
order_direction: :desc,
|
||||
distinct: false,
|
||||
add_to_projections: true
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
|
|||
|
|
@ -186,20 +186,17 @@ class Todo < ApplicationRecord
|
|||
order_expression: asc ? highest_priority_arel.asc.nulls_last : highest_priority_arel.desc.nulls_first,
|
||||
reversed_order_expression: asc ? highest_priority_arel.desc.nulls_first : highest_priority_arel.asc.nulls_last,
|
||||
nullable: asc ? :nulls_last : :nulls_first,
|
||||
order_direction: asc ? :asc : :desc,
|
||||
distinct: false
|
||||
order_direction: asc ? :asc : :desc
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'created_at',
|
||||
order_expression: asc ? Todo.arel_table[:created_at].asc : Todo.arel_table[:created_at].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
order_expression: asc ? Todo.arel_table[:id].asc : Todo.arel_table[:id].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -875,22 +875,19 @@ class User < MainClusterwide::ApplicationRecord
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'users_match_priority',
|
||||
order_expression: sanitized_order_sql.asc,
|
||||
add_to_projections: true,
|
||||
distinct: false
|
||||
add_to_projections: true
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'users_name',
|
||||
order_expression: arel_table[:name].asc,
|
||||
add_to_projections: true,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'users_id',
|
||||
order_expression: arel_table[:id].asc,
|
||||
add_to_projections: true,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
scope.reorder(order)
|
||||
|
|
|
|||
|
|
@ -55,14 +55,12 @@ class WorkItem < Issue
|
|||
attribute_name: :relative_position,
|
||||
column_expression: WorkItems::ParentLink.arel_table[:relative_position],
|
||||
order_expression: WorkItems::ParentLink.arel_table[:relative_position].asc.nulls_last,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :created_at,
|
||||
order_expression: WorkItem.arel_table[:created_at].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
|
||||
|
|
@ -76,8 +74,7 @@ class WorkItem < Issue
|
|||
attribute_name: 'issue_link_id',
|
||||
column_expression: IssueLink.arel_table[:id],
|
||||
order_expression: IssueLink.arel_table[:id].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -71,17 +71,9 @@ class EnvironmentSerializer < BaseSerializer
|
|||
environment.last_deployment&.commit&.try(:lazy_author)
|
||||
environment.upcoming_deployment&.commit&.try(:lazy_author)
|
||||
|
||||
# Batch loading last_deployment_group or last_finished_deployment_group,
|
||||
# Batch loading last_finished_deployment_group,
|
||||
# which is called later by environment.stop_actions
|
||||
if Feature.enabled?(
|
||||
:environment_stop_actions_include_all_finished_deployments,
|
||||
environment.project,
|
||||
type: :gitlab_com_derisk
|
||||
)
|
||||
environment.last_finished_deployment_group
|
||||
else
|
||||
environment.last_deployment_group
|
||||
end
|
||||
environment.last_finished_deployment_group
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,13 +16,8 @@ module WebHooks
|
|||
def execute(web_hook)
|
||||
return error(DENIED, 401) unless authorized?(web_hook)
|
||||
|
||||
hook_id = web_hook.id
|
||||
|
||||
if web_hook.destroy
|
||||
WebHooks::LogDestroyWorker.perform_async({ 'hook_id' => hook_id })
|
||||
Gitlab::AppLogger.info(log_message(web_hook))
|
||||
|
||||
success({ async: false })
|
||||
after_destroy(web_hook)
|
||||
else
|
||||
error("Unable to destroy #{web_hook.model_name.human}", 500)
|
||||
end
|
||||
|
|
@ -30,6 +25,13 @@ module WebHooks
|
|||
|
||||
private
|
||||
|
||||
def after_destroy(web_hook)
|
||||
WebHooks::LogDestroyWorker.perform_async({ 'hook_id' => web_hook.id })
|
||||
Gitlab::AppLogger.info(log_message(web_hook))
|
||||
|
||||
success({ async: false })
|
||||
end
|
||||
|
||||
def log_message(hook)
|
||||
"User #{current_user&.id} scheduled a deletion of logs for hook ID #{hook.id}"
|
||||
end
|
||||
|
|
@ -39,3 +41,5 @@ module WebHooks
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
WebHooks::DestroyService.prepend_mod_with('WebHooks::DestroyService')
|
||||
|
|
|
|||
|
|
@ -16,7 +16,17 @@ module Deployments
|
|||
def perform(deployment_id)
|
||||
if (deploy = Deployment.find_by_id(deployment_id))
|
||||
LinkMergeRequestsService.new(deploy).execute
|
||||
|
||||
after_perform(deploy)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def after_perform(_deployment)
|
||||
# Overridden in EE
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Deployments::LinkMergeRequestWorker.prepend_mod
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ module SshKeys
|
|||
attribute_name: 'expires_at_utc',
|
||||
order_expression: Arel.sql("date(expires_at AT TIME ZONE 'UTC')").asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false,
|
||||
add_to_projections: true
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
name: webhook_destroyed
|
||||
description: Event triggered when a webhook is destroyed.
|
||||
introduced_by_issue: https://gitlab.com/gitlab-org/gitlab/-/issues/458817
|
||||
introduced_by_mr: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102342
|
||||
feature_category: webhooks
|
||||
milestone: '17.0'
|
||||
saved_to_database: true
|
||||
streamed: true
|
||||
scope: [Project, Group, Instance]
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
---
|
||||
name: environment_stop_actions_include_all_finished_deployments
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435128
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141216
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
milestone: '16.9'
|
||||
type: gitlab_com_derisk
|
||||
group: group::environments
|
||||
default_enabled: false
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
key_path: redis_hll_counters.ci_templates.p_ci_templates_security_coverage_fuzzing_latest_weekly
|
||||
description: Weekly counts for Coverage Fuzzing latest CI template
|
||||
description: Weekly counts for Coverage Fuzzing latest CI template. Originally monthly count, fixed in 17.0.
|
||||
product_section: sec
|
||||
product_stage: secure
|
||||
product_group: dynamic_analysis
|
||||
|
|
@ -8,7 +8,7 @@ value_type: number
|
|||
status: active
|
||||
milestone: "15.5"
|
||||
introduced_by_url: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/97886'
|
||||
time_frame: 28d
|
||||
time_frame: 7d
|
||||
data_source: redis_hll
|
||||
data_category: optional
|
||||
instrumentation_class: RedisHLLMetric
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
key_path: redis_hll_counters.runner.g_runner_fleet_read_jobs_statistics_weekly
|
||||
description: Count of unique users (weekly) who read runner job statistics
|
||||
description: Count of unique users (weekly) who read runner job statistics. Originally wrong time_frame (28d). Fixed in 17.0
|
||||
product_section: ops
|
||||
product_stage: verify
|
||||
product_group: runner
|
||||
|
|
@ -8,7 +8,7 @@ value_type: number
|
|||
status: active
|
||||
milestone: "15.9"
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/109113
|
||||
time_frame: 28d
|
||||
time_frame: 7d
|
||||
data_source: redis_hll
|
||||
data_category: optional
|
||||
instrumentation_class: RedisHLLMetric
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class EnsureBackfillVulnerabilityOccurrencePipelinesIdToBigintIsCompleted < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main
|
||||
milestone '17.0'
|
||||
|
||||
TABLE_NAME = :vulnerability_occurrence_pipelines
|
||||
|
||||
def up
|
||||
ensure_batched_background_migration_is_finished(
|
||||
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
|
||||
table_name: TABLE_NAME,
|
||||
column_name: 'id',
|
||||
job_arguments: [['pipeline_id'], ['pipeline_id_convert_to_bigint']]
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
# no-op
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PrepareAsyncIndexesForVulnerabilityOccurrencePipelinesPipelineToBigint < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.0'
|
||||
|
||||
TABLE_NAME = :vulnerability_occurrence_pipelines
|
||||
INDEX_NAME = :index_vulnerability_occurrence_pipelines_on_pipeline_id_bigint
|
||||
COLUMN_NAME = :pipeline_id_convert_to_bigint
|
||||
COMPOSITE_INDEX_NAME = :vulnerability_occurrence_pipelines_on_unique_keys_bigint
|
||||
COMPOSITE_INDEX_COLUMNS = [:occurrence_id, :pipeline_id_convert_to_bigint]
|
||||
|
||||
def up
|
||||
prepare_async_index TABLE_NAME, COLUMN_NAME, name: INDEX_NAME
|
||||
prepare_async_index TABLE_NAME, COMPOSITE_INDEX_COLUMNS, unique: true, name: COMPOSITE_INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
unprepare_async_index TABLE_NAME, COLUMN_NAME, name: INDEX_NAME
|
||||
unprepare_async_index TABLE_NAME, COMPOSITE_INDEX_COLUMNS, unique: true, name: COMPOSITE_INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
5919ea8a31c182682cc04fe21d266107b2f7e1cb8b6e8eba18d1be41c442591b
|
||||
|
|
@ -0,0 +1 @@
|
|||
f72509525fea4271e7e1a62cc99b8e19ec7866abd05629f3fdcd6e10e61028ba
|
||||
|
|
@ -514,3 +514,9 @@ Audit event types belong to the following product categories.
|
|||
| [`secure_ci_job_token_inbound_enabled`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/115350) | Event triggered when CI_JOB_TOKEN permissions enabled for inbound| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.0](https://gitlab.com/gitlab-org/gitlab/-/issues/338255) | Project |
|
||||
| [`secure_ci_job_token_project_added`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/115350) | Event triggered when project added to inbound CI_JOB_TOKEN scope| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.0](https://gitlab.com/gitlab-org/gitlab/-/issues/338255) | Project |
|
||||
| [`secure_ci_job_token_project_removed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/115350) | Event triggered when project removed from inbound CI_JOB_TOKEN scope| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [16.0](https://gitlab.com/gitlab-org/gitlab/-/issues/338255) | Project |
|
||||
|
||||
### Webhooks
|
||||
|
||||
| Name | Description | Saved to database | Streamed | Introduced in | Scope |
|
||||
|:------------|:------------|:------------------|:---------|:--------------|:--------------|
|
||||
| [`webhook_destroyed`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/102342) | Event triggered when a webhook is destroyed.| **{check-circle}** Yes | **{check-circle}** Yes | GitLab [17.0](https://gitlab.com/gitlab-org/gitlab/-/issues/458817) | Project, Group, Instance |
|
||||
|
|
|
|||
|
|
@ -2534,3 +2534,43 @@ Example response:
|
|||
"token_expires_at": null
|
||||
}
|
||||
```
|
||||
|
||||
## Upload a current user avatar
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/148130) in GitLab 17.0.
|
||||
|
||||
Upload an avatar to current user.
|
||||
|
||||
```plaintext
|
||||
PUT /user/avatar
|
||||
```
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|-----------|-------------------|----------|-------------------------------------------------------------------------------------------------------------|
|
||||
| `avatar` | string | Yes | The file to be uploaded. The ideal image size is 192 x 192 pixels. The maximum file size allowed is 200 KiB. |
|
||||
|
||||
To upload an avatar from your file system, use the `--form` argument. This causes
|
||||
cURL to post data using the header `Content-Type: multipart/form-data`. The
|
||||
`file=` parameter must point to an image file on your file system and be
|
||||
preceded by `@`. For example:
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --request PUT --header "Bearer: <your_access_token>" \
|
||||
--form "avatar=@avatar.png" \
|
||||
--url "https://gitlab.example.com/api/v4/user/avatar"
|
||||
```
|
||||
|
||||
Returned object:
|
||||
|
||||
Returns `400 Bad Request` for file sizes greater than 200 KiB.
|
||||
|
||||
If successful, returns [`200`](rest/index.md#status-codes) and the following
|
||||
response attributes:
|
||||
|
||||
```json
|
||||
{
|
||||
"avatar_url": "http://gdk.test:3000/uploads/-/system/user/avatar/76/avatar.png",
|
||||
}
|
||||
```
|
||||
|
|
|
|||
|
|
@ -480,22 +480,11 @@ stop_review:
|
|||
#### Run a pipeline job when environment is stopped
|
||||
|
||||
> - Feature flag `environment_stop_actions_include_all_finished_deployments` [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/435128) in GitLab 16.9. Disabled by default.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../../administration/feature_flags.md) named `environment_stop_actions_include_all_finished_deployments`.
|
||||
On GitLab.com and GitLab Dedicated, this feature is not available.
|
||||
> - Feature flag `environment_stop_actions_include_all_finished_deployments` [removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150932) in GitLab 17.0.
|
||||
|
||||
You can define a stop job for the environment with an [`on_stop` action](../yaml/index.md#environmenton_stop) in the environment's deploy job.
|
||||
|
||||
If `environment_stop_actions_include_all_finished_deployments` is disabled:
|
||||
|
||||
- The stop jobs of successful deployments in the latest successful pipeline are run when an environment is stopped.
|
||||
|
||||
If `environment_stop_actions_include_all_finished_deployments` is enabled:
|
||||
|
||||
- The stop jobs of finished deployments in the latest finished pipeline are run when an environment is stopped.
|
||||
|
||||
A deployment or pipeline is _finished_ if it has the successful, canceled, or failed status.
|
||||
The stop jobs of finished deployments in the latest finished pipeline are run when an environment is stopped. A deployment or pipeline is _finished_ if it has the successful, canceled, or failed status.
|
||||
|
||||
Prerequisites:
|
||||
|
||||
|
|
|
|||
|
|
@ -622,7 +622,6 @@ order = Gitlab::Pagination::Keyset::Order.build([
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'duration_in_seconds',
|
||||
order_expression: Arel.sql('EXTRACT(EPOCH FROM epics.closed_at - epics.created_at)').desc,
|
||||
distinct: false,
|
||||
sql_type: 'double precision' # important for calculated SQL expressions
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
@ -697,8 +696,7 @@ order = Gitlab::Pagination::Keyset::Order.build([
|
|||
attribute_name: 'projects_name',
|
||||
order_expression: Issue.arel_table[:projects_name].asc,
|
||||
sql_type: 'character varying',
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
|
|
|
|||
|
|
@ -630,14 +630,12 @@ order = Gitlab::Pagination::Keyset::Order.build([
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'merge_request_diff_id',
|
||||
order_expression: MergeRequestDiffCommit.arel_table[:merge_request_diff_id].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false,
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'relative_order',
|
||||
order_expression: MergeRequestDiffCommit.arel_table[:relative_order].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false,
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
MergeRequestDiffCommit.include(FromUnion) # keyset pagination generates UNION queries
|
||||
|
|
|
|||
|
|
@ -242,14 +242,12 @@ order = Gitlab::Pagination::Keyset::Order.build([
|
|||
order_expression: Issue.arel_table[:relative_position].desc.nulls_last,
|
||||
reversed_order_expression: Issue.arel_table[:relative_position].asc.nulls_first,
|
||||
nullable: :nulls_last,
|
||||
order_direction: :desc,
|
||||
distinct: false
|
||||
order_direction: :desc
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
order_expression: Issue.arel_table[:id].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
|
||||
|
|
@ -270,7 +268,6 @@ order = Gitlab::Pagination::Keyset::Order.build([
|
|||
order_expression: Arel.sql('id * 10').asc,
|
||||
nullable: :not_nullable,
|
||||
order_direction: :asc,
|
||||
distinct: true,
|
||||
add_to_projections: true
|
||||
)
|
||||
])
|
||||
|
|
@ -296,8 +293,7 @@ order = Gitlab::Pagination::Keyset::Order.build([
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'iid',
|
||||
order_expression: Issue.arel_table[:iid].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -58,28 +58,29 @@ To run a DAST authenticated scan:
|
|||
|
||||
### Available CI/CD variables
|
||||
|
||||
| CI/CD variable | Type | Description |
|
||||
|:-----------------------------------------------|:------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `DAST_AUTH_COOKIES` | string | Set to a comma-separated list of cookie names to specify which cookies are used for authentication. |
|
||||
| `DAST_AUTH_REPORT` | boolean | Set to `true` to generate a report detailing steps taken during the authentication process. You must also define `gl-dast-debug-auth-report.html` as a CI job artifact to be able to access the generated report. The report's content aids when debugging authentication failures. |
|
||||
| `DAST_AUTH_TYPE` | string | The authentication type to use. Example: `basic-digest`. |
|
||||
| `DAST_AUTH_URL` | URL | The URL of the page containing the login form on the target website. `DAST_USERNAME` and `DAST_PASSWORD` are submitted with the login form to create an authenticated scan. Example: `https://login.example.com`. |
|
||||
| `DAST_AUTH_VERIFICATION_LOGIN_FORM` | boolean | Verifies successful authentication by checking for the absence of a login form after the login form has been submitted. |
|
||||
| `DAST_AUTH_VERIFICATION_SELECTOR` | [selector](#finding-an-elements-selector) | A selector describing an element whose presence is used to determine if authentication has succeeded after the login form is submitted. Example: `css:.user-photo`. |
|
||||
| `DAST_AUTH_VERIFICATION_URL` | URL | A URL that is compared to the URL in the browser to determine if authentication has succeeded after the login form is submitted. Example: `"https://example.com/loggedin_page"`. |
|
||||
| `DAST_BROWSER_PATH_TO_LOGIN_FORM` | [selector](#finding-an-elements-selector) | A comma-separated list of selectors representing elements to click on prior to entering the `DAST_USERNAME` and `DAST_PASSWORD` into the login form. Example: `"css:.navigation-menu,css:.login-menu-item"`. |
|
||||
| `DAST_EXCLUDE_URLS` | URLs | The URLs to skip during the authenticated scan; comma-separated. Regular expression syntax can be used to match multiple URLs. For example, `.*` matches an arbitrary character sequence. |
|
||||
| `DAST_FIRST_SUBMIT_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element that is clicked on to submit the username form of a multi-page login process. For example, `css:button[type='user-submit']`. |
|
||||
| `DAST_PASSWORD` | string | The password to authenticate to in the website. Example: `P@55w0rd!` |
|
||||
| `DAST_PASSWORD_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element used to enter the password on the login form. Example: `id:password` |
|
||||
| `DAST_SUBMIT_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element clicked on to submit the login form for a single-page login form, or the password form for a multi-page login form. For example, `css:button[type='submit']`. |
|
||||
| `DAST_USERNAME` | string | The username to authenticate to in the website. Example: `admin` |
|
||||
| `DAST_USERNAME_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element used to enter the username on the login form. Example: `name:username` |
|
||||
| `DAST_AUTH_DISABLE_CLEAR_FIELDS` | boolean | Disables clearing of username and password fields before attempting manual login. Set to `false` by default. |
|
||||
| CI/CD variable | Type | Description |
|
||||
|:-------------------------------------|:--------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `DAST_AUTH_AFTER_LOGIN_ACTIONS` | string | A comma-separated list of actions to be run after login but before login verification. Currently supports "click" actions. Example: `click(on=id:change_to_bar_graph),click(on=css:input[name=username])`. |
|
||||
| `DAST_AUTH_BEFORE_LOGIN_ACTIONS` | [selector](#finding-an-elements-selector) | A comma-separated list of selectors representing elements to click on prior to entering the `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` into the login form. Example: `"css:.navigation-menu,css:.login-menu-item"`. |
|
||||
| `DAST_AUTH_CLEAR_INPUT_FIELDS` | boolean | Disables clearing of username and password fields before attempting manual login. Set to `false` by default. |
|
||||
| `DAST_AUTH_COOKIE_NAMES` | string | Set to a comma-separated list of cookie names to specify which cookies are used for authentication. |
|
||||
| `DAST_AUTH_FIRST_SUBMIT_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element that is clicked on to submit the username form of a multi-page login process. Example, `css:button[type='user-submit']`. |
|
||||
| `DAST_AUTH_PASSWORD_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element used to enter the password on the login form. Example: `id:password` |
|
||||
| `DAST_AUTH_PASSWORD` | string | The password to authenticate to in the website. Example: `P@55w0rd!`. |
|
||||
| `DAST_AUTH_REPORT` | boolean | Set to `true` to generate a report detailing steps taken during the authentication process. You must also define `gl-dast-debug-auth-report.html` as a CI job artifact to be able to access the generated report. The report's content aids when debugging authentication failures. |
|
||||
| `DAST_AUTH_SUBMIT_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element clicked on to submit the login form for a single-page login form, or the password form for a multi-page login form. Example: `css:button[type='submit']`. |
|
||||
| `DAST_AUTH_SUCCESS_IF_AT_URL` | URL | A URL that is compared to the URL in the browser to determine if authentication has succeeded after the login form is submitted. Example: `"https://example.com/loggedin_page"`. |
|
||||
| `DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND` | [selector](#finding-an-elements-selector) | A selector describing an element whose presence is used to determine if authentication has succeeded after the login form is submitted. Example: `css:.user-photo`. |
|
||||
| `DAST_AUTH_SUCCESS_IF_NO_LOGIN_FORM` | boolean | Verifies successful authentication by checking for the absence of a login form after the login form has been submitted. |
|
||||
| `DAST_AUTH_TYPE` | string | The authentication type to use. Example: `basic-digest`. |
|
||||
| `DAST_AUTH_URL` | URL | The URL of the page containing the login form on the target website. `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` are submitted with the login form to create an authenticated scan. Example: `https://login.example.com`. |
|
||||
| `DAST_AUTH_USERNAME_FIELD` | [selector](#finding-an-elements-selector) | A selector describing the element used to enter the username on the login form. Example: `name:username`. |
|
||||
| `DAST_AUTH_USERNAME` | string | The username to authenticate to in the website. Example: `admin`. |
|
||||
| `DAST_SCOPE_EXCLUDE_URLS` | URLs | The URLs to skip during the authenticated scan; comma-separated. Regular expression syntax can be used to match multiple URLs. For example, `.*` matches an arbitrary character sequence. |
|
||||
|
||||
### Update the target website
|
||||
|
||||
The target website, defined using the CI/CD variable `DAST_WEBSITE`, is the URL DAST uses to begin crawling your application.
|
||||
The target website, defined using the CI/CD variable `DAST_TARGET_URL`, is the URL DAST uses to begin crawling your application.
|
||||
|
||||
For best crawl results on an authenticated scan, the target website should be a URL accessible only after the user is authenticated.
|
||||
Often, this is the URL of the page the user lands on after they're logged in.
|
||||
|
|
@ -92,7 +93,7 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com/dashboard/welcome"
|
||||
DAST_TARGET_URL: "https://example.com/dashboard/welcome"
|
||||
DAST_AUTH_URL: "https://example.com/login"
|
||||
```
|
||||
|
||||
|
|
@ -101,7 +102,7 @@ dast:
|
|||
To use an [HTTP authentication scheme](https://www.chromium.org/developers/design-documents/http-authentication/) such as Basic Authentication you can set the `DAST_AUTH_TYPE` value to `basic-digest`.
|
||||
Other schemes such as Negotiate or NTLM may work but aren't officially supported due to current lack of automated test coverage.
|
||||
|
||||
Configuration requires the CI/CD variables `DAST_AUTH_TYPE`, `DAST_AUTH_URL`, `DAST_USERNAME`, `DAST_PASSWORD` to be defined for the DAST job. If you don't have a unique login URL, set `DAST_AUTH_URL` to the same URL as `DAST_WEBSITE`.
|
||||
Configuration requires the CI/CD variables `DAST_AUTH_TYPE`, `DAST_AUTH_URL`, `DAST_AUTH_USERNAME`, `DAST_AUTH_PASSWORD` to be defined for the DAST job. If you don't have a unique login URL, set `DAST_AUTH_URL` to the same URL as `DAST_TARGET_URL`.
|
||||
|
||||
```yaml
|
||||
include:
|
||||
|
|
@ -109,18 +110,18 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_TYPE: "basic-digest"
|
||||
DAST_AUTH_URL: "https://example.com"
|
||||
```
|
||||
|
||||
Do **not** define `DAST_USERNAME` and `DAST_PASSWORD` in the YAML job definition file as this could present a security risk. Instead, create them as masked CI/CD variables using the GitLab UI.
|
||||
Do **not** define `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` in the YAML job definition file as this could present a security risk. Instead, create them as masked CI/CD variables using the GitLab UI.
|
||||
See [Custom CI/CD variables](../../../../../ci/variables/index.md#for-a-project) for more information.
|
||||
|
||||
### Configuration for a single-step login form
|
||||
|
||||
A single-step login form has all login form elements on a single page.
|
||||
Configuration requires the CI/CD variables `DAST_AUTH_URL`, `DAST_USERNAME`, `DAST_USERNAME_FIELD`, `DAST_PASSWORD`, `DAST_PASSWORD_FIELD`, and `DAST_SUBMIT_FIELD` to be defined for the DAST job.
|
||||
Configuration requires the CI/CD variables `DAST_AUTH_URL`, `DAST_AUTH_USERNAME`, `DAST_AUTH_USERNAME_FIELD`, `DAST_AUTH_PASSWORD`, `DAST_AUTH_PASSWORD_FIELD`, and `DAST_AUTH_SUBMIT_FIELD` to be defined for the DAST job.
|
||||
|
||||
You should set up the URL and selectors of fields in the job definition YAML, for example:
|
||||
|
||||
|
|
@ -130,14 +131,14 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_URL: "https://example.com/login"
|
||||
DAST_USERNAME_FIELD: "css:[name=username]"
|
||||
DAST_PASSWORD_FIELD: "css:[name=password]"
|
||||
DAST_SUBMIT_FIELD: "css:button[type=submit]"
|
||||
DAST_AUTH_USERNAME_FIELD: "css:[name=username]"
|
||||
DAST_AUTH_PASSWORD_FIELD: "css:[name=password]"
|
||||
DAST_AUTH_SUBMIT_FIELD: "css:button[type=submit]"
|
||||
```
|
||||
|
||||
Do **not** define `DAST_USERNAME` and `DAST_PASSWORD` in the YAML job definition file as this could present a security risk. Instead, create them as masked CI/CD variables using the GitLab UI.
|
||||
Do **not** define `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` in the YAML job definition file as this could present a security risk. Instead, create them as masked CI/CD variables using the GitLab UI.
|
||||
See [Custom CI/CD variables](../../../../../ci/variables/index.md#for-a-project) for more information.
|
||||
|
||||
### Configuration for a multi-step login form
|
||||
|
|
@ -148,12 +149,12 @@ If the username is valid, a second form on the subsequent page has the password
|
|||
Configuration requires the CI/CD variables to be defined for the DAST job:
|
||||
|
||||
- `DAST_AUTH_URL`
|
||||
- `DAST_USERNAME`
|
||||
- `DAST_USERNAME_FIELD`
|
||||
- `DAST_FIRST_SUBMIT_FIELD`
|
||||
- `DAST_PASSWORD`
|
||||
- `DAST_PASSWORD_FIELD`
|
||||
- `DAST_SUBMIT_FIELD`.
|
||||
- `DAST_AUTH_USERNAME`
|
||||
- `DAST_AUTH_USERNAME_FIELD`
|
||||
- `DAST_AUTH_FIRST_SUBMIT_FIELD`
|
||||
- `DAST_AUTH_PASSWORD`
|
||||
- `DAST_AUTH_PASSWORD_FIELD`
|
||||
- `DAST_AUTH_SUBMIT_FIELD`.
|
||||
|
||||
You should set up the URL and selectors of fields in the job definition YAML, for example:
|
||||
|
||||
|
|
@ -163,15 +164,15 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_URL: "https://example.com/login"
|
||||
DAST_USERNAME_FIELD: "css:[name=username]"
|
||||
DAST_FIRST_SUBMIT_FIELD: "css:button[name=next]"
|
||||
DAST_PASSWORD_FIELD: "css:[name=password]"
|
||||
DAST_SUBMIT_FIELD: "css:button[type=submit]"
|
||||
DAST_AUTH_USERNAME_FIELD: "css:[name=username]"
|
||||
DAST_AUTH_FIRST_SUBMIT_FIELD: "css:button[name=next]"
|
||||
DAST_AUTH_PASSWORD_FIELD: "css:[name=password]"
|
||||
DAST_AUTH_SUBMIT_FIELD: "css:button[type=submit]"
|
||||
```
|
||||
|
||||
Do **not** define `DAST_USERNAME` and `DAST_PASSWORD` in the YAML job definition file as this could present a security risk. Instead, create them as masked CI/CD variables using the GitLab UI.
|
||||
Do **not** define `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` in the YAML job definition file as this could present a security risk. Instead, create them as masked CI/CD variables using the GitLab UI.
|
||||
See [Custom CI/CD variables](../../../../../ci/variables/index.md#for-a-project) for more information.
|
||||
|
||||
### Configuration for Single Sign-On (SSO)
|
||||
|
|
@ -185,7 +186,7 @@ Check the [known limitations](#known-limitations) of DAST authentication to dete
|
|||
|
||||
### Clicking to go to the login form
|
||||
|
||||
Define `DAST_BROWSER_PATH_TO_LOGIN_FORM` to provide a path of elements to click on from the `DAST_AUTH_URL` so that DAST can access the
|
||||
Define `DAST_AUTH_BEFORE_LOGIN_ACTIONS` to provide a path of elements to click on from the `DAST_AUTH_URL` so that DAST can access the
|
||||
login form. This method is suitable for applications that show the login form in a pop-up (modal) window or when the login form does not
|
||||
have a unique URL.
|
||||
|
||||
|
|
@ -197,17 +198,17 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_URL: "https://example.com/login"
|
||||
DAST_BROWSER_PATH_TO_LOGIN_FORM: "css:.navigation-menu,css:.login-menu-item"
|
||||
DAST_AUTH_BEFORE_LOGIN_ACTIONS: "css:.navigation-menu,css:.login-menu-item"
|
||||
```
|
||||
|
||||
### Excluding logout URLs
|
||||
|
||||
If DAST crawls the logout URL while running an authenticated scan, the user is logged out, resulting in the remainder of the scan being unauthenticated.
|
||||
It is therefore recommended to exclude logout URLs using the CI/CD variable `DAST_EXCLUDE_URLS`. DAST isn't accessing any excluded URLs, ensuring the user remains logged in.
|
||||
It is therefore recommended to exclude logout URLs using the CI/CD variable `DAST_SCOPE_EXCLUDE_URLS`. DAST isn't accessing any excluded URLs, ensuring the user remains logged in.
|
||||
|
||||
Provided URLs can be either absolute URLs, or regular expressions of URL paths relative to the base path of the `DAST_WEBSITE`. For example:
|
||||
Provided URLs can be either absolute URLs, or regular expressions of URL paths relative to the base path of the `DAST_TARGET_URL`. For example:
|
||||
|
||||
```yaml
|
||||
include:
|
||||
|
|
@ -215,8 +216,8 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com/welcome/home"
|
||||
DAST_EXCLUDE_URLS: "https://example.com/logout,/user/.*/logout"
|
||||
DAST_TARGET_URL: "https://example.com/welcome/home"
|
||||
DAST_SCOPE_EXCLUDE_URLS: "https://example.com/logout,/user/.*/logout"
|
||||
```
|
||||
|
||||
### Finding an element's selector
|
||||
|
|
@ -246,7 +247,7 @@ Chrome DevTools element selector tool is an effective way to find a selector.
|
|||
1. Once highlighted, you can see the element's details, including attributes that would make a good candidate for a selector.
|
||||
|
||||
In this example, the `id="user_login"` appears to be a good candidate. You can use this as a selector as the DAST username field by setting
|
||||
`DAST_USERNAME_FIELD: "id:user_login"`.
|
||||
`DAST_AUTH_USERNAME_FIELD: "id:user_login"`.
|
||||
|
||||
#### Choose the right selector
|
||||
|
||||
|
|
@ -287,7 +288,7 @@ DAST tests for the absence of a login form if no verification checks are configu
|
|||
|
||||
#### Verify based on the URL
|
||||
|
||||
Define `DAST_AUTH_VERIFICATION_URL` as the URL displayed in the browser tab after the login form is successfully submitted.
|
||||
Define `DAST_AUTH_SUCCESS_IF_AT_URL` as the URL displayed in the browser tab after the login form is successfully submitted.
|
||||
|
||||
DAST compares the verification URL to the URL in the browser after authentication.
|
||||
If they are not the same, authentication is unsuccessful.
|
||||
|
|
@ -300,13 +301,13 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_AUTH_VERIFICATION_URL: "https://example.com/user/welcome"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_SUCCESS_IF_AT_URL: "https://example.com/user/welcome"
|
||||
```
|
||||
|
||||
#### Verify based on presence of an element
|
||||
|
||||
Define `DAST_AUTH_VERIFICATION_SELECTOR` as a [selector](#finding-an-elements-selector) that finds one or many elements on the page
|
||||
Define `DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND` as a [selector](#finding-an-elements-selector) that finds one or many elements on the page
|
||||
displayed after the login form is successfully submitted. If no element is found, authentication is unsuccessful.
|
||||
Searching for the selector on the page displayed when login fails should return no elements.
|
||||
|
||||
|
|
@ -318,13 +319,13 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_AUTH_VERIFICATION_SELECTOR: "css:.welcome-user"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND: "css:.welcome-user"
|
||||
```
|
||||
|
||||
#### Verify based on absence of a login form
|
||||
|
||||
Define `DAST_AUTH_VERIFICATION_LOGIN_FORM` as `"true"` to indicate that DAST should search for the login form on the
|
||||
Define `DAST_AUTH_SUCCESS_IF_NO_LOGIN_FORM` as `"true"` to indicate that DAST should search for the login form on the
|
||||
page displayed after the login form is successfully submitted. If a login form is still present after logging in, authentication is unsuccessful.
|
||||
|
||||
For example:
|
||||
|
|
@ -335,8 +336,8 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_AUTH_VERIFICATION_LOGIN_FORM: "true"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_SUCCESS_IF_NO_LOGIN_FORM: "true"
|
||||
```
|
||||
|
||||
### Authentication tokens
|
||||
|
|
@ -351,7 +352,7 @@ by the authentication process.
|
|||
DAST considers cookies, local storage and session storage values set with sufficiently "random" values to be authentication tokens.
|
||||
For example, `sessionID=HVxzpS8GzMlPAc2e39uyIVzwACIuGe0H` would be viewed as an authentication token, while `ab_testing_group=A1` would not.
|
||||
|
||||
The CI/CD variable `DAST_AUTH_COOKIES` can be used to specify the names of authentication cookies and bypass the randomness check used by DAST.
|
||||
The CI/CD variable `DAST_AUTH_COOKIE_NAMES` can be used to specify the names of authentication cookies and bypass the randomness check used by DAST.
|
||||
Not only can this make the authentication process more robust, but it can also increase vulnerability check accuracy for checks that
|
||||
inspect authentication tokens.
|
||||
|
||||
|
|
@ -363,8 +364,8 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_AUTH_COOKIES: "sessionID,refreshToken"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_COOKIE_NAMES: "sessionID,refreshToken"
|
||||
```
|
||||
|
||||
## Known limitations
|
||||
|
|
@ -417,11 +418,8 @@ An example configuration where the authentication debug report is exported may l
|
|||
```yaml
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_AUTH_REPORT: "true"
|
||||
artifacts:
|
||||
paths: [gl-dast-debug-auth-report.html]
|
||||
when: always
|
||||
```
|
||||
|
||||
### Known problems
|
||||
|
|
@ -442,7 +440,7 @@ Suggested actions:
|
|||
- Check the target application authentication is deployed and running.
|
||||
- Check the `DAST_AUTH_URL` is correct.
|
||||
- Check the GitLab Runner can access the `DAST_AUTH_URL`.
|
||||
- Check the `DAST_BROWSER_PATH_TO_LOGIN_FORM` is valid if used.
|
||||
- Check the `DAST_AUTH_BEFORE_LOGIN_ACTIONS` is valid if used.
|
||||
|
||||
#### Scan doesn't crawl authenticated pages
|
||||
|
||||
|
|
@ -458,7 +456,7 @@ Suggested actions:
|
|||
|
||||
- Generate the [authentication report](#configure-the-authentication-report) and look at the screenshot from the `Login submit` to verify that the login worked as expected.
|
||||
- Verify the logged authentication tokens are those used by your application.
|
||||
- If using cookies to store authentication tokens, set the names of the authentication token cookies using `DAST_AUTH_COOKIES`.
|
||||
- If using cookies to store authentication tokens, set the names of the authentication token cookies using `DAST_AUTH_COOKIE_NAMES`.
|
||||
|
||||
#### Unable to find elements with selector
|
||||
|
||||
|
|
@ -471,7 +469,7 @@ DAST failed to find the username, password, first submit button, or submit butto
|
|||
Suggested actions:
|
||||
|
||||
- Generate the [authentication report](#configure-the-authentication-report) to use the screenshot from the `Login page` to verify that the page loaded correctly.
|
||||
- Load the login page in a browser and verify the [selectors](#finding-an-elements-selector) configured in `DAST_USERNAME_FIELD`, `DAST_PASSWORD_FIELD`, `DAST_FIRST_SUBMIT_FIELD`, and `DAST_SUBMIT_FIELD` are correct.
|
||||
- Load the login page in a browser and verify the [selectors](#finding-an-elements-selector) configured in `DAST_AUTH_USERNAME_FIELD`, `DAST_AUTH_PASSWORD_FIELD`, `DAST_AUTH_FIRST_SUBMIT_FIELD`, and `DAST_AUTH_SUBMIT_FIELD` are correct.
|
||||
|
||||
#### Failed to authenticate user
|
||||
|
||||
|
|
@ -507,12 +505,12 @@ Suggested actions:
|
|||
- Generate the [authentication report](#configure-the-authentication-report) and verify the `Request` for the `Login submit` is correct.
|
||||
- It's possible that the authentication report `Login submit` request and response are empty. This occurs when there is no request that would result
|
||||
in a full page reload, such as a request made when submitting a HTML form. This occurs when using websockets or AJAX to submit the login form.
|
||||
- If the page displayed following user authentication genuinely has elements matching the login form selectors, configure `DAST_AUTH_VERIFICATION_URL`
|
||||
or `DAST_AUTH_VERIFICATION_SELECTOR` to use an alternate method of verifying the login attempt.
|
||||
- If the page displayed following user authentication genuinely has elements matching the login form selectors, configure `DAST_AUTH_SUCCESS_IF_AT_URL`
|
||||
or `DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND` to use an alternate method of verifying the login attempt.
|
||||
|
||||
#### Requirement unsatisfied, selector returned no results
|
||||
|
||||
DAST cannot find an element matching the selector provided in `DAST_AUTH_VERIFICATION_SELECTOR` on the page displayed following user login.
|
||||
DAST cannot find an element matching the selector provided in `DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND` on the page displayed following user login.
|
||||
|
||||
```plaintext
|
||||
2022-12-07T06:39:33.239 INF AUTH requirement is unsatisfied, searching DOM using selector returned no results want="has element css:[name=welcome]"
|
||||
|
|
@ -521,11 +519,11 @@ DAST cannot find an element matching the selector provided in `DAST_AUTH_VERIFIC
|
|||
Suggested actions:
|
||||
|
||||
- Generate the [authentication report](#configure-the-authentication-report) and look at the screenshot from the `Login submit` to verify that the expected page is displayed.
|
||||
- Ensure the `DAST_AUTH_VERIFICATION_SELECTOR` [selector](#finding-an-elements-selector) is correct.
|
||||
- Ensure the `DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND` [selector](#finding-an-elements-selector) is correct.
|
||||
|
||||
#### Requirement unsatisfied, browser not at URL
|
||||
|
||||
DAST detected that the page displayed following user login has a URL different to what was expected according to `DAST_AUTH_VERIFICATION_URL`.
|
||||
DAST detected that the page displayed following user login has a URL different to what was expected according to `DAST_AUTH_SUCCESS_IF_AT_URL`.
|
||||
|
||||
```plaintext
|
||||
2022-12-07T11:28:00.241 INF AUTH requirement is unsatisfied, browser is not at URL browser_url="https://example.com/home" want="is at url https://example.com/user/dashboard"
|
||||
|
|
@ -534,7 +532,7 @@ DAST detected that the page displayed following user login has a URL different t
|
|||
Suggested actions:
|
||||
|
||||
- Generate the [authentication report](#configure-the-authentication-report) and look at the screenshot from the `Login submit` to verify that the expected page is displayed.
|
||||
- Ensure the `DAST_AUTH_VERIFICATION_URL` is correct.
|
||||
- Ensure the `DAST_AUTH_SUCCESS_IF_AT_URL` is correct.
|
||||
|
||||
#### Requirement unsatisfied, HTTP login request status code
|
||||
|
||||
|
|
@ -563,4 +561,4 @@ Suggestion actions:
|
|||
|
||||
- Generate the [authentication report](#configure-the-authentication-report) and look at the screenshot from the `Login submit` to verify that the login worked as expected.
|
||||
- Using the browser's developer tools, investigate the cookies and local/session storage objects created while logging in. Ensure there is an authentication token created with sufficiently random value.
|
||||
- If using cookies to store authentication tokens, set the names of the authentication token cookies using `DAST_AUTH_COOKIES`.
|
||||
- If using cookies to store authentication tokens, set the names of the authentication token cookies using `DAST_AUTH_COOKIE_NAMES`.
|
||||
|
|
|
|||
|
|
@ -50,10 +50,10 @@ By default, URLs matching the host of the target application are considered in-s
|
|||
|
||||
Scope is configured using the following variables:
|
||||
|
||||
- Use `DAST_BROWSER_ALLOWED_HOSTS` to add in-scope hosts.
|
||||
- Use `DAST_BROWSER_IGNORED_HOSTS` to add to out-of-scope hosts.
|
||||
- Use `DAST_BROWSER_EXCLUDED_HOSTS` to add to excluded-from-scope hosts.
|
||||
- Use `DAST_EXCLUDE_URLS` to set specific URLs to be excluded-from-scope.
|
||||
- Use `DAST_SCOPE_ALLOW_HOSTS` to add in-scope hosts.
|
||||
- Use `DAST_SCOPE_IGNORE_HOSTS` to add to out-of-scope hosts.
|
||||
- Use `DAST_SCOPE_EXCLUDE_HOSTS` to add to excluded-from-scope hosts.
|
||||
- Use `DAST_SCOPE_EXCLUDE_URLS` to set specific URLs to be excluded-from-scope.
|
||||
|
||||
Rules:
|
||||
|
||||
|
|
@ -69,11 +69,11 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://my.site.com" # my.site.com URLs are considered in-scope by default
|
||||
DAST_BROWSER_ALLOWED_HOSTS: "api.site.com:8443" # include the API as part of the scan
|
||||
DAST_BROWSER_IGNORED_HOSTS: "analytics.site.com" # explicitly disregard analytics from the scan
|
||||
DAST_BROWSER_EXCLUDED_HOSTS: "ads.site.com" # don't visit any URLs on the ads subdomain
|
||||
DAST_EXCLUDE_URLS: "https://my.site.com/user/logout" # don't visit this URL
|
||||
DAST_TARGET_URL: "https://my.site.com" # my.site.com URLs are considered in-scope by default
|
||||
DAST_SCOPE_ALLOW_HOSTS: "api.site.com:8443" # include the API as part of the scan
|
||||
DAST_SCOPE_IGNORE_HOSTS: "analytics.site.com" # explicitly disregard analytics from the scan
|
||||
DAST_SCOPE_EXCLUDE_HOSTS: "ads.site.com" # don't visit any URLs on the ads subdomain
|
||||
DAST_SCOPE_EXCLUDE_URLS: "https://my.site.com/user/logout" # don't visit this URL
|
||||
```
|
||||
|
||||
## Vulnerability detection
|
||||
|
|
@ -100,14 +100,14 @@ This can come at a cost of increased scan time.
|
|||
|
||||
You can manage the trade-off between coverage and scan time with the following measures:
|
||||
|
||||
- Vertically scale the runner and use a higher number of browsers with the [variable](variables.md) `DAST_BROWSER_NUMBER_OF_BROWSERS`. The default is `3`.
|
||||
- Limit the number of actions executed by the browser with the [variable](variables.md) `DAST_BROWSER_MAX_ACTIONS`. The default is `10,000`.
|
||||
- Limit the page depth that the browser-based crawler checks coverage on with the [variable](variables.md) `DAST_BROWSER_MAX_DEPTH`. The crawler uses a breadth-first search strategy, so pages with smaller depth are crawled first. The default is `10`.
|
||||
- Limit the time taken to crawl the target application with the [variable](variables.md) `DAST_BROWSER_CRAWL_TIMEOUT`. The default is `24h`. Scans continue with passive and active checks when the crawler times out.
|
||||
- Build the crawl graph with the [variable](variables.md) `DAST_BROWSER_CRAWL_GRAPH` to see what pages are being crawled.
|
||||
- Prevent pages from being crawled using the [variable](variables.md) `DAST_EXCLUDE_URLS`.
|
||||
- Prevent elements being selected using the [variable](variables.md) `DAST_BROWSER_EXCLUDED_ELEMENTS`. Use with caution, as defining this variable causes an extra lookup for each page crawled.
|
||||
- If the target application has minimal or fast rendering, consider reducing the [variable](variables.md) `DAST_BROWSER_DOM_READY_AFTER_TIMEOUT` to a smaller value. The default is `500ms`.
|
||||
- Vertically scale the runner and use a higher number of browsers with the [variable](variables.md) `DAST_CRAWL_WORKER_COUNT`. The default is `3`.
|
||||
- Limit the number of actions executed by the browser with the [variable](variables.md) `DAST_CRAWL_MAX_ACTIONS`. The default is `10,000`.
|
||||
- Limit the page depth that the browser-based crawler checks coverage on with the [variable](variables.md) `DAST_CRAWL_MAX_DEPTH`. The crawler uses a breadth-first search strategy, so pages with smaller depth are crawled first. The default is `10`.
|
||||
- Limit the time taken to crawl the target application with the [variable](variables.md) `DAST_CRAWL_TIMEOUT`. The default is `24h`. Scans continue with passive and active checks when the crawler times out.
|
||||
- Build the crawl graph with the [variable](variables.md) `DAST_CRAWL_GRAPH` to see what pages are being crawled.
|
||||
- Prevent pages from being crawled using the [variable](variables.md) `DAST_SCOPE_EXCLUDE_URLS`.
|
||||
- Prevent elements being selected using the [variable](variables.md) `DAST_SCOPE_EXCLUDE_ELEMENTS`. Use with caution, as defining this variable causes an extra lookup for each page crawled.
|
||||
- If the target application has minimal or fast rendering, consider reducing the [variable](variables.md) `DAST_PAGE_DOM_STABLE_WAIT` to a smaller value. The default is `500ms`.
|
||||
|
||||
## Timeouts
|
||||
|
||||
|
|
@ -116,16 +116,16 @@ Due to poor network conditions or heavy application load, the default timeouts m
|
|||
Browser-based scans offer the ability to adjust various timeouts to ensure it continues smoothly as it transitions from one page to the next. These values are configured using a [Duration string](https://pkg.go.dev/time#ParseDuration), which allow you to configure durations with a prefix: `m` for minutes, `s` for seconds, and `ms` for milliseconds.
|
||||
|
||||
Navigations, or the act of loading a new page, usually require the most amount of time because they are
|
||||
loading multiple new resources such as JavaScript or CSS files. Depending on the size of these resources, or the speed at which they are returned, the default `DAST_BROWSER_NAVIGATION_TIMEOUT` may not be sufficient.
|
||||
loading multiple new resources such as JavaScript or CSS files. Depending on the size of these resources, or the speed at which they are returned, the default `DAST_PAGE_READY_AFTER_NAVIGATION_TIMEOUT` may not be sufficient.
|
||||
|
||||
Stability timeouts, such as those configurable with `DAST_BROWSER_NAVIGATION_STABILITY_TIMEOUT`, `DAST_BROWSER_STABILITY_TIMEOUT`, and `DAST_BROWSER_ACTION_STABILITY_TIMEOUT` can also be configured. Stability timeouts determine when browser-based scans consider
|
||||
Stability timeouts, such as those configurable with `DAST_PAGE_DOM_READY_TIMEOUT` or `DAST_PAGE_READY_AFTER_ACTION_TIMEOUT`, can also be configured. Stability timeouts determine when browser-based scans consider
|
||||
a page fully loaded. Browser-based scans consider a page loaded when:
|
||||
|
||||
1. The [DOMContentLoaded](https://developer.mozilla.org/en-US/docs/Web/API/Document/DOMContentLoaded_event) event has fired.
|
||||
1. There are no open or outstanding requests that are deemed important, such as JavaScript and CSS. Media files are usually deemed unimportant.
|
||||
1. Depending on whether the browser executed a navigation, was forcibly transitioned, or action:
|
||||
|
||||
- There are no new Document Object Model (DOM) modification events after the `DAST_BROWSER_NAVIGATION_STABILITY_TIMEOUT`, `DAST_BROWSER_STABILITY_TIMEOUT`, or `DAST_BROWSER_ACTION_STABILITY_TIMEOUT` durations.
|
||||
- There are no new Document Object Model (DOM) modification events after the `DAST_PAGE_DOM_READY_TIMEOUT` or `DAST_PAGE_READY_AFTER_ACTION_TIMEOUT` durations.
|
||||
|
||||
After these events have occurred, browser-based scans consider the page loaded and ready, and attempt the next action.
|
||||
|
||||
|
|
@ -137,12 +137,10 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://my.site.com"
|
||||
DAST_BROWSER_NAVIGATION_TIMEOUT: "25s"
|
||||
DAST_BROWSER_ACTION_TIMEOUT: "10s"
|
||||
DAST_BROWSER_STABILITY_TIMEOUT: "15s"
|
||||
DAST_BROWSER_NAVIGATION_STABILITY_TIMEOUT: "15s"
|
||||
DAST_BROWSER_ACTION_STABILITY_TIMEOUT: "3s"
|
||||
DAST_TARGET_URL: "https://my.site.com"
|
||||
DAST_PAGE_READY_AFTER_NAVIGATION_TIMEOUT: "45s"
|
||||
DAST_PAGE_READY_AFTER_ACTION_TIMEOUT: "15s"
|
||||
DAST_PAGE_DOM_READY_TIMEOUT: "15s"
|
||||
```
|
||||
|
||||
NOTE:
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ To create the CI/CD job:
|
|||
|
||||
1. Define the URL to be scanned by DAST by using one of these methods:
|
||||
|
||||
- Set the `DAST_WEBSITE` [CI/CD variable](../../../../../ci/yaml/index.md#variables).
|
||||
- Set the `DAST_TARGET_URL` [CI/CD variable](../../../../../ci/yaml/index.md#variables).
|
||||
If set, this value takes precedence.
|
||||
|
||||
- Adding the URL in an `environment_url.txt` file at your project's root is great for testing in
|
||||
|
|
@ -68,6 +68,6 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_WEBSITE: "https://example.com"
|
||||
DAST_TARGET_URL: "https://example.com"
|
||||
DAST_BROWSER_SCAN: "true"
|
||||
```
|
||||
|
|
|
|||
|
|
@ -17,5 +17,5 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_BROWSER_LOG: auth:debug
|
||||
DAST_LOG_CONFIG: auth:debug
|
||||
```
|
||||
|
|
|
|||
|
|
@ -88,8 +88,8 @@ dast:
|
|||
alias: yourapp
|
||||
|
||||
variables:
|
||||
DAST_WEBSITE: https://yourapp
|
||||
DAST_FULL_SCAN_ENABLED: "true" # do a full scan
|
||||
DAST_TARGET_URL: https://yourapp
|
||||
DAST_FULL_SCAN: "true" # do a full scan
|
||||
DAST_BROWSER_SCAN: "true" # use the browser-based GitLab DAST crawler
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -11,64 +11,64 @@ These CI/CD variables are specific to the browser-based DAST analyzer. They can
|
|||
DAST to your requirements.
|
||||
For authentication CI/CD variables, see [Authentication](authentication.md).
|
||||
|
||||
| CI/CD variable | Type | Example | Description |
|
||||
|:--------------------------------------------|:---------------------------------------------------------|----------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `DAST_ADVERTISE_SCAN` | boolean | `true` | Set to `true` to add a `Via` header to every request sent, advertising that the request was sent as part of a GitLab DAST scan. |
|
||||
| `DAST_AUTH_COOKIES` | string | | Set to a comma-separated list of cookie names to specify which cookies are used for authentication. |
|
||||
| `DAST_AUTH_DISABLE_CLEAR_FIELDS` | boolean | | Disables clearing of username and password fields before attempting manual login. Set to `false` by default. |
|
||||
| `DAST_AUTH_REPORT` | boolean | | Set to `true` to generate a report detailing steps taken during the authentication process. You must also define `gl-dast-debug-auth-report.html` as a CI job artifact to be able to access the generated report. The report's content aids when debugging authentication failures. |
|
||||
| `DAST_AUTH_TYPE` | string | | The authentication type to use. Example: `basic-digest`. |
|
||||
| `DAST_AUTH_URL` | URL | | The URL of the page containing the login form on the target website. `DAST_USERNAME` and `DAST_PASSWORD` are submitted with the login form to create an authenticated scan. Example: `https://login.example.com`. |
|
||||
| `DAST_AUTH_VERIFICATION_LOGIN_FORM` | boolean | | Verifies successful authentication by checking for the absence of a login form after the login form has been submitted. |
|
||||
| `DAST_AUTH_VERIFICATION_SELECTOR` | [selector](authentication.md#finding-an-elements-selector) | | A selector describing an element whose presence is used to determine if authentication has succeeded after the login form is submitted. Example: `css:.user-photo`. |
|
||||
| `DAST_AUTH_VERIFICATION_URL` | URL | | A URL that is compared to the URL in the browser to determine if authentication has succeeded after the login form is submitted. Example: `"https://example.com/loggedin_page"`. |
|
||||
| `DAST_BROWSER_PATH_TO_LOGIN_FORM` | [selector](authentication.md#finding-an-elements-selector) | | A comma-separated list of selectors representing elements to click on prior to entering the `DAST_USERNAME` and `DAST_PASSWORD` into the login form. Example: `"css:.navigation-menu,css:.login-menu-item"`. |
|
||||
| `DAST_BROWSER_ACTION_STABILITY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `800ms` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after completing an action. |
|
||||
| `DAST_BROWSER_ACTION_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to complete an action. |
|
||||
| `DAST_BROWSER_ALLOWED_HOSTS` | List of strings | `site.com,another.com` | Hostnames included in this variable are considered in scope when crawled. By default the `DAST_WEBSITE` hostname is included in the allowed hosts list. Headers set using `DAST_REQUEST_HEADERS` are added to every request made to these hostnames. |
|
||||
| `DAST_BROWSER_COOKIES` | dictionary | `abtesting_group:3,region:locked` | A cookie name and value to be added to every request. |
|
||||
| `DAST_BROWSER_CRAWL_GRAPH` | boolean | `true` | Set to `true` to generate an SVG graph of navigation paths visited during crawl phase of the scan. You must also define `gl-dast-crawl-graph.svg` as a CI job artifact to be able to access the generated graph. |
|
||||
| `DAST_BROWSER_CRAWL_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `5m` | The maximum amount of time to wait for the crawl phase of the scan to complete. Defaults to `24h`. |
|
||||
| `DAST_BROWSER_DEVTOOLS_LOG` | string | `Default:messageAndBody,truncate:2000` | Set to log protocol messages between DAST and the Chromium browser. |
|
||||
| `DAST_BROWSER_DOM_READY_AFTER_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `200ms` | Define how long to wait for updates to the DOM before checking a page is stable. Defaults to `500ms`. |
|
||||
| `DAST_BROWSER_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `600ms` | The maximum amount of time to wait for an element before determining it is ready for analysis. |
|
||||
| `DAST_BROWSER_EXCLUDED_ELEMENTS` | selector | `a[href='2.html'],css:.no-follow` | Comma-separated list of selectors that are ignored when scanning. |
|
||||
| `DAST_BROWSER_EXCLUDED_HOSTS` | List of strings | `site.com,another.com` | Hostnames included in this variable are considered excluded and connections are forcibly dropped. |
|
||||
| `DAST_BROWSER_EXTRACT_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `5s` | The maximum amount of time to allow the browser to extract newly found elements or navigations. |
|
||||
| `DAST_BROWSER_FILE_LOG` | List of strings | `brows:debug,auth:debug` | A list of modules and their intended logging level for use in the file log. |
|
||||
| `DAST_BROWSER_FILE_LOG_PATH` | string | `/output/browserker.log` | Set to the path of the file log. |
|
||||
| `DAST_BROWSER_IGNORED_HOSTS` | List of strings | `site.com,another.com` | Hostnames included in this variable are accessed, not attacked, and not reported against. |
|
||||
| `DAST_BROWSER_INCLUDE_ONLY_RULES` | List of strings | `16.1,16.2,16.3` | Comma-separated list of check identifiers to use for the scan. |
|
||||
| `DAST_BROWSER_LOG` | List of strings | `brows:debug,auth:debug` | A list of modules and their intended logging level for use in the console log. |
|
||||
| `DAST_BROWSER_LOG_CHROMIUM_OUTPUT` | boolean | `true` | Set to `true` to log Chromium `STDOUT` and `STDERR`. |
|
||||
| `DAST_BROWSER_MAX_ACTIONS` | number | `10000` | The maximum number of actions that the crawler performs. For example, selecting a link, or filling a form. |
|
||||
| `DAST_BROWSER_MAX_DEPTH` | number | `10` | The maximum number of chained actions that the crawler takes. For example, `Click -> Form Fill -> Click` is a depth of three. |
|
||||
| `DAST_BROWSER_MAX_RESPONSE_SIZE_MB` | number | `15` | The maximum size of a HTTP response body. Responses with bodies larger than this are blocked by the browser. Defaults to 10 MB. |
|
||||
| `DAST_BROWSER_NAVIGATION_STABILITY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after a navigation completes. Defaults to `800ms`.|
|
||||
| `DAST_BROWSER_NAVIGATION_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `15s` | The maximum amount of time to wait for a browser to navigate from one page to another. |
|
||||
| `DAST_BROWSER_NUMBER_OF_BROWSERS` | number | `3` | The maximum number of concurrent browser instances to use. For instance runners on GitLab.com, we recommended a maximum of three. Private runners with more resources may benefit from a higher number, but are likely to produce little benefit after five to seven instances. |
|
||||
| `DAST_BROWSER_PAGE_LOADING_SELECTOR` | selector | `css:#page-is-loading` | Selector that when is no longer visible on the page, indicates to the analyzer that the page has finished loading and the scan can continue. Cannot be used with `DAST_BROWSER_PAGE_READY_SELECTOR`. |
|
||||
| `DAST_BROWSER_PAGE_READY_SELECTOR` | selector | `css:#page-is-ready` | Selector that when detected as visible on the page, indicates to the analyzer that the page has finished loading and the scan can continue. Cannot be used with `DAST_BROWSER_PAGE_LOADING_SELECTOR`. |
|
||||
| `DAST_BROWSER_PASSIVE_CHECK_WORKERS` | int | `5` | Number of workers that passive scan in parallel. Recommend setting to the number of available CPUs. |
|
||||
| `DAST_BROWSER_SCAN` | boolean | `true` | Required to be `true` to run a browser-based scan. |
|
||||
| `DAST_BROWSER_SEARCH_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `3s` | The maximum amount of time to allow the browser to search for new elements or user actions. |
|
||||
| `DAST_BROWSER_STABILITY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis. |
|
||||
| `DAST_EXCLUDE_RULES` | string | `10020,10026` | Set to a comma-separated list of ZAP Vulnerability Rule IDs to exclude them from running during the scan. Rule IDs are numbers and can be found from the DAST log or on the [ZAP project](https://www.zaproxy.org/docs/alerts/). |
|
||||
| `DAST_EXCLUDE_URLS` | URLs | `https://example.com/.*/sign-out` | The URLs to skip during the authenticated scan; comma-separated. Regular expression syntax can be used to match multiple URLs. For example, `.*` matches an arbitrary character sequence. |
|
||||
| `DAST_FF_ENABLE_BAS` | boolean | `true` | Set to `true` to [enable Breach and Attack Simulation](../../../breach_and_attack_simulation/index.md#extend-dynamic-application-security-testing-dast) during this DAST scan. |
|
||||
| `DAST_FIRST_SUBMIT_FIELD` | [selector](authentication.md#finding-an-elements-selector) | | A selector describing the element that is clicked on to submit the username form of a multi-page login process. For example, `css:button[type='user-submit']`. |
|
||||
| `DAST_FULL_SCAN_ENABLED` | boolean | `true` | Set to `true` to run both passive and active checks. Default: `false` |
|
||||
| `DAST_PASSWORD` | string | | The password to authenticate to in the website. Example: `P@55w0rd!` |
|
||||
| `DAST_PASSWORD_FIELD` | [selector](authentication.md#finding-an-elements-selector) | | A selector describing the element used to enter the password on the login form. Example: `id:password` |
|
||||
| `DAST_PATHS` | string | `/page1.html,/category1/page3.html` | Limit the paths scanned to a provided list. Set to a comma-separated list of URL paths relative to `DAST_WEBSITE`. |
|
||||
| `DAST_PATHS_FILE` | string | `/builds/project/urls.txt` | Limit the paths scanned to a provided list. Set to a file path containing a list of URL paths relative to `DAST_WEBSITE`. The file must be plain text with one path per line. |
|
||||
| `DAST_PKCS12_CERTIFICATE_BASE64` | string | `ZGZkZ2p5NGd...` | The PKCS12 certificate used for sites that require Mutual TLS. Must be encoded as base64 text. |
|
||||
| `DAST_PKCS12_PASSWORD` | string | `password` | The password of the certificate used in `DAST_PKCS12_CERTIFICATE_BASE64`. Create sensitive [custom CI/CI variables](../../../../../ci/variables/index.md#define-a-cicd-variable-in-the-ui) using the GitLab UI. |
|
||||
| `DAST_REQUEST_HEADERS` | string | `Cache-control:no-cache` | Set to a comma-separated list of request header names and values. |
|
||||
| `DAST_SKIP_TARGET_CHECK` | boolean | `true` | Set to `true` to prevent DAST from checking that the target is available before scanning. Default: `false`. |
|
||||
| `DAST_SUBMIT_FIELD` | [selector](authentication.md#finding-an-elements-selector) | | A selector describing the element clicked on to submit the login form for a single-page login form, or the password form for a multi-page login form. For example, `css:button[type='submit']`. |
|
||||
| `DAST_TARGET_AVAILABILITY_TIMEOUT` | number | `60` | Time limit in seconds to wait for target availability. |
|
||||
| `DAST_USERNAME` | string | | The username to authenticate to in the website. Example: `admin` |
|
||||
| `DAST_USERNAME_FIELD` | [selector](authentication.md#finding-an-elements-selector) | | A selector describing the element used to enter the username on the login form. Example: `name:username` |
|
||||
| `DAST_WEBSITE` | URL | `https://example.com` | The URL of the website to scan. |
|
||||
| `SECURE_ANALYZERS_PREFIX` | URL | `registry.organization.com` | Set the Docker registry base address from which to download the analyzer. |
|
||||
| CI/CD variable | Type | Example | Description |
|
||||
|:-------------------------------------------|:-----------------------------------------------------------|----------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `DAST_ACTIVE_SCAN_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `3h` | The maximum amount of time to wait for the active scan phase of the scan to complete. Defaults to `3h`. |
|
||||
| `DAST_ACTIVE_SCAN_WORKER_COUNT` | number | `3` | The number of active checks to run in parallel. Defaults to `3`. |
|
||||
| `DAST_AUTH_AFTER_LOGIN_ACTIONS` | string | `click(on=id:remember-me),click(on=css:.continue)` | A comma-separated list of actions to be run after login but before login verification. Currently supports "click" actions. |
|
||||
| `DAST_AUTH_BEFORE_LOGIN_ACTIONS` | [selector](authentication.md#finding-an-elements-selector) | `css:.user,id:show-login-form` | A comma-separated list of selectors representing elements to click on prior to entering the `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` into the login form. |
|
||||
| `DAST_AUTH_CLEAR_INPUT_FIELDS` | boolean | `true` | Disables clearing of username and password fields before attempting manual login. Set to `false` by default. |
|
||||
| `DAST_AUTH_COOKIE_NAMES` | string | `sessionID,groupName` | Set to a comma-separated list of cookie names to specify which cookies are used for authentication. |
|
||||
| `DAST_AUTH_FIRST_SUBMIT_FIELD` | [selector](authentication.md#finding-an-elements-selector) | `css:input[type=submit]` | A selector describing the element that is clicked on to submit the username form of a multi-page login process. |
|
||||
| `DAST_AUTH_PASSWORD_FIELD` | [selector](authentication.md#finding-an-elements-selector) | `name:password` | A selector describing the element used to enter the password on the login form. |
|
||||
| `DAST_AUTH_PASSWORD` | string | `P@55w0rd!` | The password to authenticate to in the website. |
|
||||
| `DAST_AUTH_REPORT` | boolean | `true` | Set to `true` to generate a report detailing steps taken during the authentication process. You must also define `gl-dast-debug-auth-report.html` as a CI job artifact to be able to access the generated report. The report's content aids when debugging authentication failures. |
|
||||
| `DAST_AUTH_SUBMIT_FIELD` | [selector](authentication.md#finding-an-elements-selector) | `css:input[type=submit]` | A selector describing the element clicked on to submit the login form for a single-page login form, or the password form for a multi-page login form. |
|
||||
| `DAST_AUTH_SUCCESS_IF_AT_URL` | URL | `https://www.site.com/welcome` | A URL that is compared to the URL in the browser to determine if authentication has succeeded after the login form is submitted. |
|
||||
| `DAST_AUTH_SUCCESS_IF_ELEMENT_FOUND` | [selector](authentication.md#finding-an-elements-selector) | `css:.user-avatar` | A selector describing an element whose presence is used to determine if authentication has succeeded after the login form is submitted. |
|
||||
| `DAST_AUTH_SUCCESS_IF_NO_LOGIN_FORM` | boolean | `true` | Verifies successful authentication by checking for the absence of a login form after the login form has been submitted. |
|
||||
| `DAST_AUTH_TYPE` | string | `basic-digest` | The authentication type to use. |
|
||||
| `DAST_AUTH_URL` | URL | `https://site.com/login` | The URL of the page containing the login form on the target website. `DAST_AUTH_USERNAME` and `DAST_AUTH_PASSWORD` are submitted with the login form to create an authenticated scan. |
|
||||
| `DAST_AUTH_USERNAME_FIELD` | [selector](authentication.md#finding-an-elements-selector) | `name:username` | A selector describing the element used to enter the username on the login form. |
|
||||
| `DAST_AUTH_USERNAME` | string | `user@email.com` | The username to authenticate to in the website. |
|
||||
| `DAST_BROWSER_SCAN` | boolean | `true` | Required to be `true` to run a browser-based scan. |
|
||||
| `DAST_CHECKS_TO_EXCLUDE` | string | `552.2,78.1` | Comma-separated list of check identifiers to exclude from the scan. For identifiers, see [vulnerability checks](../checks/index.md). |
|
||||
| `DAST_CHECKS_TO_RUN` | List of strings | `16.1,16.2,16.3` | Comma-separated list of check identifiers to use for the scan. For identifiers, see [vulnerability checks](../checks/index.md). |
|
||||
| `DAST_CRAWL_EXTRACT_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `5s` | The maximum amount of time to allow the browser to extract newly found elements or navigations. |
|
||||
| `DAST_CRAWL_GRAPH` | boolean | `true` | Set to `true` to generate an SVG graph of navigation paths visited during crawl phase of the scan. You must also define `gl-dast-crawl-graph.svg` as a CI job artifact to be able to access the generated graph. |
|
||||
| `DAST_CRAWL_MAX_ACTIONS` | number | `10000` | The maximum number of actions that the crawler performs. Example actions include selecting a link, or filling a form. |
|
||||
| `DAST_CRAWL_MAX_DEPTH` | number | `10` | The maximum number of chained actions that the crawler takes. For example, `Click -> Form Fill -> Click` is a depth of three. |
|
||||
| `DAST_CRAWL_SEARCH_ELEMENT_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `3s` | The maximum amount of time to allow the browser to search for new elements or user actions. |
|
||||
| `DAST_CRAWL_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `5m` | The maximum amount of time to wait for the crawl phase of the scan to complete. Defaults to `24h`. |
|
||||
| `DAST_CRAWL_WORKER_COUNT` | number | `3` | The maximum number of concurrent browser instances to use. For instance runners on GitLab.com, we recommended a maximum of three. Private runners with more resources may benefit from a higher number, but are likely to produce little benefit after five to seven instances. |
|
||||
| `DAST_FULL_SCAN` | boolean | `true` | Set to `true` to run both passive and active checks. Default: `false` |
|
||||
| `DAST_LOG_BROWSER_OUTPUT` | boolean | `true` | Set to `true` to log Chromium `STDOUT` and `STDERR`. |
|
||||
| `DAST_LOG_CONFIG` | List of strings | `brows:debug,auth:debug` | A list of modules and their intended logging level for use in the console log. |
|
||||
| `DAST_LOG_DEVTOOLS_CONFIG` | string | `Default:messageAndBody,truncate:2000` | Set to log protocol messages between DAST and the Chromium browser. |
|
||||
| `DAST_LOG_FILE_CONFIG` | List of strings | `brows:debug,auth:debug` | A list of modules and their intended logging level for use in the file log. |
|
||||
| `DAST_LOG_FILE_PATH` | string | `/output/browserker.log` | Set to the path of the file log. |
|
||||
| `DAST_PAGE_DOM_READY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis after a navigation completes. Defaults to `800ms`. |
|
||||
| `DAST_PAGE_DOM_STABLE_WAIT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `200ms` | Define how long to wait for updates to the DOM before checking a page is stable. Defaults to `500ms`. |
|
||||
| `DAST_PAGE_ELEMENT_READY_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `600ms` | The maximum amount of time to wait for an element before determining it is ready for analysis. |
|
||||
| `DAST_PAGE_IS_LOADING_ELEMENT` | selector | `css:#page-is-loading` | Selector that when is no longer visible on the page, indicates to the analyzer that the page has finished loading and the scan can continue. Cannot be used with `DAST_PAGE_IS_READY_ELEMENT`. |
|
||||
| `DAST_PAGE_IS_READY_ELEMENT` | selector | `css:#page-is-ready` | Selector that when detected as visible on the page, indicates to the analyzer that the page has finished loading and the scan can continue. Cannot be used with `DAST_PAGE_IS_LOADING_ELEMENT`. |
|
||||
| `DAST_PAGE_MAX_RESPONSE_SIZE_MB` | number | `15` | The maximum size of a HTTP response body. Responses with bodies larger than this are blocked by the browser. Defaults to 10 MB. |
|
||||
| `DAST_PAGE_READY_AFTER_ACTION_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `7s` | The maximum amount of time to wait for a browser to consider a page loaded and ready for analysis. |
|
||||
| `DAST_PAGE_READY_AFTER_NAVIGATION_TIMEOUT` | [Duration string](https://pkg.go.dev/time#ParseDuration) | `15s` | The maximum amount of time to wait for a browser to navigate from one page to another. |
|
||||
| `DAST_PASSIVE_SCAN_WORKER_COUNT` | int | `5` | Number of workers that passive scan in parallel. Defaults to the number of available CPUs. |
|
||||
| `DAST_PKCS12_CERTIFICATE_BASE64` | string | `ZGZkZ2p5NGd...` | The PKCS12 certificate used for sites that require Mutual TLS. Must be encoded as base64 text. |
|
||||
| `DAST_PKCS12_PASSWORD` | string | `password` | The password of the certificate used in `DAST_PKCS12_CERTIFICATE_BASE64`. Create sensitive [custom CI/CI variables](../../../../../ci/variables/index.md#define-a-cicd-variable-in-the-ui) using the GitLab UI. |
|
||||
| `DAST_REQUEST_ADVERTISE_SCAN` | boolean | `true` | Set to `true` to add a `Via` header to every request sent, advertising that the request was sent as part of a GitLab DAST scan. |
|
||||
| `DAST_REQUEST_COOKIES` | dictionary | `abtesting_group:3,region:locked` | A cookie name and value to be added to every request. |
|
||||
| `DAST_REQUEST_HEADERS` | string | `Cache-control:no-cache` | Set to a comma-separated list of request header names and values. |
|
||||
| `DAST_SCOPE_ALLOW_HOSTS` | List of strings | `site.com,another.com` | Hostnames included in this variable are considered in scope when crawled. By default the `DAST_TARGET_URL` hostname is included in the allowed hosts list. Headers set using `DAST_REQUEST_HEADERS` are added to every request made to these hostnames. |
|
||||
| `DAST_SCOPE_EXCLUDE_ELEMENTS` | selector | `a[href='2.html'],css:.no-follow` | Comma-separated list of selectors that are ignored when scanning. |
|
||||
| `DAST_SCOPE_EXCLUDE_HOSTS` | List of strings | `site.com,another.com` | Hostnames included in this variable are considered excluded and connections are forcibly dropped. |
|
||||
| `DAST_SCOPE_EXCLUDE_URLS` | URLs | `https://site.com/.*/sign-out` | The URLs to skip during the authenticated scan; comma-separated. Regular expression syntax can be used to match multiple URLs. For example, `.*` matches an arbitrary character sequence. |
|
||||
| `DAST_SCOPE_IGNORE_HOSTS` | List of strings | `site.com,another.com` | Hostnames included in this variable are accessed, not attacked, and not reported against. |
|
||||
| `DAST_TARGET_CHECK_SKIP` | boolean | `true` | Set to `true` to prevent DAST from checking that the target is available before scanning. Default: `false`. |
|
||||
| `DAST_TARGET_CHECK_TIMEOUT` | number | `60` | Time limit in seconds to wait for target availability. |
|
||||
| `DAST_TARGET_PATHS_FILE` | string | `/builds/project/urls.txt` | Limit the paths scanned to a provided list. Set to a file path containing a list of URL paths relative to `DAST_TARGET_URL`. The file must be plain text with one path per line. |
|
||||
| `DAST_TARGET_PATHS` | string | `/page1.html,/category1/page3.html` | Limit the paths scanned to a provided list. Set to a comma-separated list of URL paths relative to `DAST_TARGET_URL`. |
|
||||
| `DAST_TARGET_URL` | URL | `https://site.com` | The URL of the website to scan. |
|
||||
| `SECURE_ANALYZERS_PREFIX` | URL | `registry.organization.com` | Set the Docker registry base address from which to download the analyzer. |
|
||||
|
|
|
|||
|
|
@ -13,6 +13,10 @@ DETAILS:
|
|||
|
||||
> - [Generally available](https://gitlab.com/groups/gitlab-org/-/epics/9023) in GitLab 15.7 (GitLab DAST v3.0.50).
|
||||
|
||||
WARNING:
|
||||
The DAST version 4 browser-based analyzer is replaced by DAST version 5 in GitLab 17.0.
|
||||
For instructions on how to migrate to DAST version 5, see the [migration guide](../browser_based_4_to_5_migration_guide.md).
|
||||
|
||||
Browser-based DAST helps you identify security weaknesses (CWEs) in your web applications. After you
|
||||
deploy your web application, it becomes exposed to new types of attacks, many of which cannot be
|
||||
detected prior to deployment. For example, misconfigurations of your application server or incorrect
|
||||
|
|
|
|||
|
|
@ -43,10 +43,10 @@ Knowing the outcome you expect, try to replicate it manually using a browser on
|
|||
- In Firefox: `Tools -> Browser Tools -> Web Developer Tools`.
|
||||
- If authenticating:
|
||||
- Go to the `DAST_AUTH_URL`.
|
||||
- Type in the `DAST_USERNAME` in the `DAST_USERNAME_FIELD`.
|
||||
- Type in the `DAST_PASSWORD` in the `DAST_PASSWORD_FIELD`.
|
||||
- Select the `DAST_SUBMIT_FIELD`.
|
||||
- Select links and fill in forms. Go to the pages that aren't scanning correctly.
|
||||
- Type in the `DAST_AUTH_USERNAME` in the `DAST_AUTH_USERNAME_FIELD`.
|
||||
- Type in the `DAST_AUTH_PASSWORD` in the `DAST_AUTH_PASSWORD_FIELD`.
|
||||
- Select the `DAST_AUTH_SUBMIT_FIELD`.
|
||||
- Select links and fill in forms. Navigate to the pages that aren't scanning correctly.
|
||||
- Observe how your application behaves. Notice if there is anything that might cause problems for an automated scanner.
|
||||
|
||||
### Any reason why DAST would not work?
|
||||
|
|
@ -98,7 +98,7 @@ For example, the following log entry has level `INFO`, is part of the `CRAWL` lo
|
|||
### Log destination
|
||||
|
||||
Logs are sent either to file or to console (the CI/CD job log). You can configure each destination to accept different logs using
|
||||
the environment variables `DAST_BROWSER_LOG` for console logs and `DAST_BROWSER_FILE_LOG` for file logs.
|
||||
the environment variables `DAST_LOG_CONFIG` for console logs and `DAST_LOG_FILE_CONFIG` for file logs.
|
||||
|
||||
For example:
|
||||
|
||||
|
|
@ -109,13 +109,8 @@ include:
|
|||
dast:
|
||||
variables:
|
||||
DAST_BROWSER_SCAN: "true"
|
||||
DAST_BROWSER_LOG: "auth:debug" # console log defaults to INFO level, logs AUTH module at DEBUG
|
||||
DAST_BROWSER_FILE_LOG: "loglevel:debug,cache:warn" # file log defaults to DEBUG level, logs CACHE module at WARN
|
||||
DAST_BROWSER_FILE_LOG_PATH: "$CI_PROJECT_DIR/dast-scan.log" # Save the file log in the project directory so it can be recognized as an artifact
|
||||
artifacts:
|
||||
paths:
|
||||
- dast-scan.log
|
||||
when: always
|
||||
DAST_LOG_CONFIG: "auth:debug" # console log defaults to INFO level, logs AUTH module at DEBUG
|
||||
DAST_LOG_FILE_CONFIG: "loglevel:debug,cache:warn" # file log defaults to DEBUG level, logs CACHE module at WARN
|
||||
```
|
||||
|
||||
### Log levels
|
||||
|
|
@ -170,7 +165,7 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_BROWSER_LOG: "crawl:debug"
|
||||
DAST_LOG_CONFIG: "crawl:debug"
|
||||
```
|
||||
|
||||
For example, the following output shows that four anchor links we discovered during the crawl of the page at `https://example.com`.
|
||||
|
|
@ -206,7 +201,7 @@ To log all DevTools messages, turn the `CHROM` log module to `trace` and configu
|
|||
### Customizing DevTools log levels
|
||||
|
||||
Chrome DevTools requests, responses and events are namespaced by domain. DAST allows each domain and each domain with message to have different logging configuration.
|
||||
The environment variable `DAST_BROWSER_DEVTOOLS_LOG` accepts a semi-colon separated list of logging configurations.
|
||||
The environment variable `DAST_LOG_DEVTOOLS_CONFIG` accepts a semi-colon separated list of logging configurations.
|
||||
Logging configurations are declared using the structure `[domain/message]:[what-to-log][,truncate:[max-message-size]]`.
|
||||
|
||||
- `domain/message` references what is being logged.
|
||||
|
|
@ -230,13 +225,8 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_BROWSER_FILE_LOG: "chrom:trace"
|
||||
DAST_BROWSER_FILE_LOG_PATH: "/zap/wrk/dast-scan.log"
|
||||
DAST_BROWSER_DEVTOOLS_LOG: "Default:messageAndBody,truncate:2000"
|
||||
artifacts:
|
||||
paths:
|
||||
- dast-scan.log
|
||||
when: always
|
||||
DAST_LOG_FILE_CONFIG: "chrom:trace"
|
||||
DAST_LOG_DEVTOOLS_CONFIG: "Default:messageAndBody,truncate:2000"
|
||||
```
|
||||
|
||||
### Example - log HTTP messages
|
||||
|
|
@ -250,19 +240,14 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_BROWSER_FILE_LOG: "chrom:trace"
|
||||
DAST_BROWSER_FILE_LOG_PATH: "/zap/wrk/dast-scan.log"
|
||||
DAST_BROWSER_DEVTOOLS_LOG: "Default:suppress;Fetch:messageAndBody,truncate:2000;Network:messageAndBody,truncate:2000;Log:messageAndBody,truncate:2000;Console:messageAndBody,truncate:2000"
|
||||
artifacts:
|
||||
paths:
|
||||
- dast-scan.log
|
||||
when: always
|
||||
DAST_LOG_FILE_CONFIG: "chrom:trace"
|
||||
DAST_LOG_DEVTOOLS_CONFIG: "Default:suppress;Fetch:messageAndBody,truncate:2000;Network:messageAndBody,truncate:2000;Log:messageAndBody,truncate:2000;Console:messageAndBody,truncate:2000"
|
||||
```
|
||||
|
||||
## Chromium logs
|
||||
|
||||
In the rare event that Chromium crashes, it can be helpful to write the Chromium process `STDOUT` and `STDERR` to log.
|
||||
Setting the environment variable `DAST_BROWSER_LOG_CHROMIUM_OUTPUT` to `true` achieves this purpose.
|
||||
Setting the environment variable `DAST_LOG_BROWSER_OUTPUT` to `true` achieves this purpose.
|
||||
|
||||
DAST starts and stops many Chromium processes. DAST sends each process output to all log destinations with the log module `LEASE` and log level `INFO`.
|
||||
|
||||
|
|
@ -274,7 +259,7 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_BROWSER_LOG_CHROMIUM_OUTPUT: "true"
|
||||
DAST_LOG_BROWSER_OUTPUT: "true"
|
||||
```
|
||||
|
||||
## Known problems
|
||||
|
|
@ -291,7 +276,7 @@ An example log is as follows, where DAST blocked the JavaScript file found at `h
|
|||
2022-12-05T06:28:58.104 WRN CONTA request failed, attempting to continue scan error=net::ERR_BLOCKED_BY_RESPONSE index=0 requestID=38.2 url=https://example.com/large.js
|
||||
```
|
||||
|
||||
This can be changed using the configuration `DAST_MAX_RESPONSE_SIZE_MB`. For example,
|
||||
This can be changed using the configuration `DAST_PAGE_MAX_RESPONSE_SIZE_MB`. For example,
|
||||
|
||||
```yaml
|
||||
include:
|
||||
|
|
@ -299,5 +284,5 @@ include:
|
|||
|
||||
dast:
|
||||
variables:
|
||||
DAST_MAX_RESPONSE_SIZE_MB: "25"
|
||||
DAST_PAGE_MAX_RESPONSE_SIZE_MB: "25"
|
||||
```
|
||||
|
|
|
|||
|
|
@ -11,9 +11,11 @@ DETAILS:
|
|||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
WARNING:
|
||||
Proxy-based DAST was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/430966) in GitLab
|
||||
16.9 and is planned for removal in 17.0. Use [browser-based DAST](browser_based.md) instead. This
|
||||
change is a breaking change.
|
||||
The DAST proxy-based analyzer was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/430966) in GitLab 16.9 and
|
||||
is replaced by DAST version 5 in GitLab 17.0. This change is a breaking change. For instructions on how to migrate from
|
||||
the DAST proxy-based analyzer to DAST version 5, see the [proxy-based migration guide](proxy_based_to_browser_based_migration_guide.md).
|
||||
For instructions on how to migrate from the DAST version 4 browser-based analyzer to DAST version 5,
|
||||
see the [browser-based migration guide](browser_based_4_to_5_migration_guide.md).
|
||||
|
||||
Dynamic Application Security Testing (DAST) runs automated penetration tests to find vulnerabilities
|
||||
in your web applications and APIs as they are running. DAST automates a hacker’s approach and
|
||||
|
|
|
|||
|
|
@ -12,7 +12,9 @@ DETAILS:
|
|||
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
|
||||
|
||||
WARNING:
|
||||
This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/430966) in GitLab 16.9 and will be removed in 17.0. Use [browser-based DAST](browser_based.md) instead. This change is a breaking change.
|
||||
The DAST proxy-based analyzer was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/430966) in GitLab 16.9 and
|
||||
is replaced by DAST version 5 in GitLab 17.0. This change is a breaking change. For instructions on how to migrate
|
||||
to DAST version 5, see the [migration guide](proxy_based_to_browser_based_migration_guide.md).
|
||||
|
||||
The DAST proxy-based analyzer can be added to your [GitLab CI/CD](../../../ci/index.md) pipeline.
|
||||
This helps you discover vulnerabilities in web applications that do not use JavaScript heavily. For applications that do,
|
||||
|
|
|
|||
|
|
@ -1399,6 +1399,27 @@ module API
|
|||
present current_user.status || {}, with: Entities::UserStatus
|
||||
end
|
||||
|
||||
desc 'Set the avatar of the current user' do
|
||||
success Entities::Avatar
|
||||
detail 'This feature was introduced in GitLab 17.0.'
|
||||
end
|
||||
params do
|
||||
requires :avatar, type: ::API::Validations::Types::WorkhorseFile, desc: 'The avatar file (generated by Multipart middleware)', documentation: { type: 'file' }
|
||||
end
|
||||
put "avatar", feature_category: :user_profile do
|
||||
update_params = {
|
||||
avatar: declared_params[:avatar],
|
||||
user: current_user
|
||||
}
|
||||
result = ::Users::UpdateService.new(current_user, update_params).execute
|
||||
|
||||
if result[:status] == :success
|
||||
present current_user, with: Entities::Avatar
|
||||
else
|
||||
render_api_error!(result[:message], result[:reason] || :bad_request)
|
||||
end
|
||||
end
|
||||
|
||||
resource :personal_access_tokens do
|
||||
desc 'Create a personal access token with limited scopes for the currently authenticated user' do
|
||||
detail 'This feature was introduced in GitLab 16.5'
|
||||
|
|
|
|||
|
|
@ -136,8 +136,7 @@ class EventFilter
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: order_hint_column,
|
||||
order_expression: Event.arel_table[order_hint_column].desc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
|
|
|
|||
|
|
@ -227,8 +227,7 @@ module Gitlab
|
|||
order_expression: order_expression,
|
||||
reversed_order_expression: reverse_order_expression,
|
||||
order_direction: direction,
|
||||
nullable: nulls_position,
|
||||
distinct: false
|
||||
nullable: nulls_position
|
||||
),
|
||||
::Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: klass.primary_key,
|
||||
|
|
|
|||
|
|
@ -60,13 +60,6 @@ module Gitlab
|
|||
# my_record = Issue.select(:id, 'LOWER(title) as lowercase_title').last
|
||||
# value = my_record[:lowercase_title]
|
||||
#
|
||||
# **distinct**
|
||||
#
|
||||
# Boolean value.
|
||||
#
|
||||
# Tells us whether the database column contains only distinct values. If the column is covered by
|
||||
# a unique index then set to true.
|
||||
#
|
||||
# **nullable** (:not_nullable | :nulls_last | :nulls_first)
|
||||
#
|
||||
# Tells us whether the database column is nullable or not. This information can be
|
||||
|
|
@ -137,13 +130,12 @@ module Gitlab
|
|||
attr_reader :attribute_name, :column_expression, :order_expression, :add_to_projections, :order_direction
|
||||
|
||||
# rubocop: disable Metrics/ParameterLists
|
||||
def initialize(attribute_name:, order_expression:, column_expression: nil, reversed_order_expression: nil, nullable: :not_nullable, distinct: true, order_direction: nil, sql_type: nil, add_to_projections: false)
|
||||
def initialize(attribute_name:, order_expression:, column_expression: nil, reversed_order_expression: nil, nullable: :not_nullable, order_direction: nil, sql_type: nil, add_to_projections: false)
|
||||
@attribute_name = attribute_name
|
||||
@order_expression = order_expression
|
||||
@column_expression = column_expression || calculate_column_expression(order_expression)
|
||||
@distinct = distinct
|
||||
@reversed_order_expression = reversed_order_expression || calculate_reversed_order(order_expression)
|
||||
@nullable = parse_nullable(nullable, distinct)
|
||||
@nullable = parse_nullable(nullable)
|
||||
@order_direction = parse_order_direction(order_expression, order_direction)
|
||||
@sql_type = sql_type
|
||||
@add_to_projections = add_to_projections
|
||||
|
|
@ -157,7 +149,6 @@ module Gitlab
|
|||
order_expression: reversed_order_expression,
|
||||
reversed_order_expression: order_expression,
|
||||
nullable: not_nullable? ? :not_nullable : REVERSED_NULL_POSITIONS[nullable],
|
||||
distinct: distinct,
|
||||
order_direction: REVERSED_ORDER_DIRECTIONS[order_direction]
|
||||
)
|
||||
end
|
||||
|
|
@ -186,10 +177,6 @@ module Gitlab
|
|||
!not_nullable?
|
||||
end
|
||||
|
||||
def distinct?
|
||||
distinct
|
||||
end
|
||||
|
||||
def order_direction_as_sql_string
|
||||
sql_string = ascending_order? ? +'ASC' : +'DESC'
|
||||
|
||||
|
|
@ -210,7 +197,7 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
attr_reader :reversed_order_expression, :nullable, :distinct
|
||||
attr_reader :reversed_order_expression, :nullable
|
||||
|
||||
def calculate_reversed_order(order_expression)
|
||||
unless order_expression.is_a?(Arel::Nodes::Ordering)
|
||||
|
|
@ -242,15 +229,11 @@ module Gitlab
|
|||
transformed_order_direction
|
||||
end
|
||||
|
||||
def parse_nullable(nullable, distinct)
|
||||
def parse_nullable(nullable)
|
||||
if ALLOWED_NULLABLE_VALUES.exclude?(nullable)
|
||||
raise "Invalid `nullable` is given (value: #{nullable}), the allowed values are: #{ALLOWED_NULLABLE_VALUES.join(', ')}"
|
||||
end
|
||||
|
||||
if nullable != :not_nullable && distinct
|
||||
raise 'Invalid column definition, `distinct` and `nullable` columns are not allowed at the same time'
|
||||
end
|
||||
|
||||
nullable
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -58,7 +58,6 @@ module Gitlab
|
|||
# attribute_name: :created_at,
|
||||
# column_expression: Project.arel_table[:created_at],
|
||||
# order_expression: Project.arel_table[:created_at].asc,
|
||||
# distinct: false, # values in the column are not unique
|
||||
# nullable: :nulls_last # we might see NULL values (bottom)
|
||||
# ),
|
||||
# Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
|
|||
|
|
@ -139,8 +139,7 @@ module Gitlab
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: attribute_name,
|
||||
order_expression: order_value,
|
||||
nullable: nullability(order_value, attribute_name),
|
||||
distinct: false
|
||||
nullable: nullability(order_value, attribute_name)
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -153,8 +152,7 @@ module Gitlab
|
|||
order_expression: order_value,
|
||||
reversed_order_expression: order_value.reverse,
|
||||
order_direction: order_value.expr.direction,
|
||||
nullable: order_value.is_a?(Arel::Nodes::NullsLast) ? :nulls_last : :nulls_first,
|
||||
distinct: false
|
||||
nullable: order_value.is_a?(Arel::Nodes::NullsLast) ? :nulls_last : :nulls_first
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -165,8 +163,7 @@ module Gitlab
|
|||
attribute_name: attribute_name,
|
||||
column_expression: Arel::Nodes::NamedFunction.new("LOWER", [model_class.arel_table[attribute_name]]),
|
||||
order_expression: order_value,
|
||||
nullable: nullability(order_value, attribute_name),
|
||||
distinct: false
|
||||
nullable: nullability(order_value, attribute_name)
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -396,34 +396,15 @@ RSpec.describe 'Environments page', :js, feature_category: :continuous_delivery
|
|||
build = create(:ci_build, status, pipeline: pipeline)
|
||||
create(:ci_build, :manual, project: project, pipeline: pipeline, name: 'stop_action')
|
||||
create(:deployment, status, environment: environment, deployable: build, sha: project.commit.id, on_stop: 'stop_action')
|
||||
|
||||
visit_environments(project)
|
||||
end
|
||||
|
||||
context 'when :environment_stop_actions_include_all_finished_deployments FF is enabled' do
|
||||
before do
|
||||
visit_environments(project)
|
||||
end
|
||||
it 'shows a stop action dialog without a warning message' do
|
||||
click_button(_('Stop'))
|
||||
|
||||
it 'shows a stop action dialog without a warning message' do
|
||||
click_button(_('Stop'))
|
||||
|
||||
within('.modal-body') do
|
||||
expect(page).not_to have_content(warning_message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when :environment_stop_actions_include_all_finished_deployments FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(environment_stop_actions_include_all_finished_deployments: false)
|
||||
visit_environments(project)
|
||||
end
|
||||
|
||||
it 'shows a stop action dialog with the correct message' do
|
||||
click_button(_('Stop'))
|
||||
|
||||
within('.modal-body') do
|
||||
expect(page.has_content?(warning_message)).to eq warning_present_if_ff_disabled
|
||||
end
|
||||
within('.modal-body') do
|
||||
expect(page).not_to have_content(warning_message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -74,6 +74,8 @@ describe('PipelineFailedJobsWidget component', () => {
|
|||
});
|
||||
});
|
||||
|
||||
const CSS_BORDER_CLASSES = 'gl-border-white gl-hover-border-gray-100';
|
||||
|
||||
describe('when the job button is clicked', () => {
|
||||
beforeEach(async () => {
|
||||
createComponent();
|
||||
|
|
@ -85,9 +87,7 @@ describe('PipelineFailedJobsWidget component', () => {
|
|||
});
|
||||
|
||||
it('removes the CSS border classes', () => {
|
||||
expect(findFailedJobsCard().attributes('class')).not.toContain(
|
||||
'gl-border-white gl-hover-border-gray-100',
|
||||
);
|
||||
expect(findFailedJobsCard().attributes('class')).not.toContain(CSS_BORDER_CLASSES);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -97,9 +97,7 @@ describe('PipelineFailedJobsWidget component', () => {
|
|||
});
|
||||
|
||||
it('has the CSS border classes', () => {
|
||||
expect(findFailedJobsCard().attributes('class')).toContain(
|
||||
'gl-border-white gl-hover-border-gray-100',
|
||||
);
|
||||
expect(findFailedJobsCard().attributes('class')).toContain(CSS_BORDER_CLASSES);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -22,8 +22,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
|
|||
order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last,
|
||||
reversed_order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last,
|
||||
order_direction: :asc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false)
|
||||
nullable: :nulls_last)
|
||||
end
|
||||
|
||||
let_it_be(:column_order_last_repo_desc) do
|
||||
|
|
@ -33,8 +32,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
|
|||
order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last,
|
||||
reversed_order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last,
|
||||
order_direction: :desc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false)
|
||||
nullable: :nulls_last)
|
||||
end
|
||||
|
||||
subject(:connection) do
|
||||
|
|
|
|||
|
|
@ -7,8 +7,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
described_class.new(
|
||||
attribute_name: :name,
|
||||
order_expression: Project.arel_table[:name].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -16,8 +15,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
described_class.new(
|
||||
attribute_name: :name,
|
||||
order_expression: Project.arel_table[:name].lower.desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -35,8 +33,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
attribute_name: :name,
|
||||
column_expression: project_calculated_column_expression,
|
||||
order_expression: project_calculated_column_expression.asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -57,8 +54,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
attribute_name: :name,
|
||||
column_expression: Project.arel_table[:name],
|
||||
order_expression: Project.arel_table[:name].asc.nulls_last,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
)
|
||||
|
||||
expect(column_order_definition).to be_ascending_order
|
||||
|
|
@ -72,8 +68,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
column_expression: Project.arel_table[:name],
|
||||
order_expression: 'name asc',
|
||||
reversed_order_expression: 'name desc',
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
end.to raise_error(RuntimeError, /Invalid or missing `order_direction`/)
|
||||
end
|
||||
|
|
@ -85,8 +80,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
order_expression: 'name asc',
|
||||
reversed_order_expression: 'name desc',
|
||||
order_direction: :asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
|
||||
expect(column_order_definition).to be_ascending_order
|
||||
|
|
@ -118,8 +112,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
column_expression: Project.arel_table[:name],
|
||||
order_expression: 'name asc',
|
||||
order_direction: :asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
end.to raise_error(RuntimeError, /Couldn't determine reversed order/)
|
||||
end
|
||||
|
|
@ -140,8 +133,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
order_expression: 'name asc',
|
||||
reversed_order_expression: 'name desc',
|
||||
order_direction: :asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
|
||||
expect(column_order_definition.reverse.order_expression).to eq('name desc')
|
||||
|
|
@ -155,8 +147,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
column_expression: Project.arel_table[:name],
|
||||
order_expression: Project.arel_table[:name].asc.nulls_last,
|
||||
order_direction: :asc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
)
|
||||
|
||||
expect(column_order_definition.reverse.order_expression).to eq(Project.arel_table[:name].desc.nulls_first)
|
||||
|
|
@ -173,8 +164,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last,
|
||||
reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first,
|
||||
order_direction: :desc,
|
||||
nullable: :nulls_last, # null values are always last
|
||||
distinct: false
|
||||
nullable: :nulls_last # null values are always last
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -194,25 +184,10 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last,
|
||||
reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first,
|
||||
order_direction: :desc,
|
||||
nullable: true,
|
||||
distinct: false
|
||||
nullable: true
|
||||
)
|
||||
end.to raise_error(RuntimeError, /Invalid `nullable` is given/)
|
||||
end
|
||||
|
||||
it 'raises error when the column is nullable and distinct' do
|
||||
expect do
|
||||
described_class.new(
|
||||
attribute_name: :name,
|
||||
column_expression: Project.arel_table[:name],
|
||||
order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last,
|
||||
reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first,
|
||||
order_direction: :desc,
|
||||
nullable: :nulls_last,
|
||||
distinct: true
|
||||
)
|
||||
end.to raise_error(RuntimeError, /Invalid column definition/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -224,8 +199,7 @@ RSpec.describe Gitlab::Pagination::Keyset::ColumnOrderDefinition do
|
|||
order_expression: MergeRequest::Metrics.arel_table[:merged_at].desc.nulls_last,
|
||||
reversed_order_expression: MergeRequest::Metrics.arel_table[:merged_at].asc.nulls_first,
|
||||
order_direction: :desc,
|
||||
nullable: :nulls_last, # null values are always last
|
||||
distinct: false
|
||||
nullable: :nulls_last # null values are always last
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -186,14 +186,12 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
|
|||
order_expression: Issue.arel_table[:relative_position].desc.nulls_last,
|
||||
reversed_order_expression: Issue.arel_table[:relative_position].asc.nulls_first,
|
||||
order_direction: :desc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
order_expression: Issue.arel_table[:id].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -414,8 +412,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
|
|||
attribute_name: 'projects_id',
|
||||
order_expression: Issue.arel_table[:projects_id].asc,
|
||||
sql_type: 'integer',
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
|
|
@ -446,8 +443,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
|
|||
attribute_name: 'projects_id',
|
||||
order_expression: Issue.arel_table[:projects_id].desc,
|
||||
sql_type: 'integer',
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
|
|
@ -478,15 +474,13 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
|
|||
attribute_name: 'projects_name',
|
||||
order_expression: Issue.arel_table[:projects_name].asc,
|
||||
sql_type: 'character varying',
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'projects_id',
|
||||
order_expression: Issue.arel_table[:projects_id].asc,
|
||||
sql_type: 'integer',
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
|
|
@ -516,8 +510,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
|
|||
attribute_name: 'projects_name',
|
||||
order_expression: Issue.arel_table[:projects_name].asc,
|
||||
sql_type: 'character varying',
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
|
|
|
|||
|
|
@ -23,8 +23,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Iterator do
|
|||
order_expression: klass.arel_table[column].public_send(direction).public_send(nulls_position), # rubocop:disable GitlabSecurity/PublicSend
|
||||
reversed_order_expression: klass.arel_table[column].public_send(reverse_direction).public_send(reverse_nulls_position), # rubocop:disable GitlabSecurity/PublicSend
|
||||
order_direction: direction,
|
||||
nullable: nulls_position,
|
||||
distinct: false
|
||||
nullable: nulls_position
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
|
|
|
|||
|
|
@ -132,7 +132,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when ordering by a distinct column' do
|
||||
context 'when ordering by a unique column' do
|
||||
let(:table_data) do
|
||||
<<-SQL
|
||||
VALUES (1, 0, 0),
|
||||
|
|
@ -154,8 +154,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -177,7 +176,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
it_behaves_like 'order examples'
|
||||
end
|
||||
|
||||
context 'when ordering by two non-nullable columns and a distinct column' do
|
||||
context 'when ordering by two non-nullable columns' do
|
||||
let(:table_data) do
|
||||
<<-SQL
|
||||
VALUES (1, 2010, 2),
|
||||
|
|
@ -199,22 +198,19 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
attribute_name: 'year',
|
||||
column_expression: table['year'],
|
||||
order_expression: table['year'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'month',
|
||||
column_expression: table['month'],
|
||||
order_expression: table['month'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -242,7 +238,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when ordering by nullable columns and a distinct column' do
|
||||
context 'when ordering by nullable columns' do
|
||||
let(:table_data) do
|
||||
<<-SQL
|
||||
VALUES (1, 2010, null),
|
||||
|
|
@ -268,8 +264,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
order_expression: table[:year].asc.nulls_last,
|
||||
reversed_order_expression: table[:year].desc.nulls_first,
|
||||
order_direction: :asc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'month',
|
||||
|
|
@ -277,15 +272,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
order_expression: table[:month].asc.nulls_last,
|
||||
reversed_order_expression: table[:month].desc.nulls_first,
|
||||
order_direction: :asc,
|
||||
nullable: :nulls_last,
|
||||
distinct: false
|
||||
nullable: :nulls_last
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -309,7 +302,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
it_behaves_like 'order examples'
|
||||
end
|
||||
|
||||
context 'when ordering by nullable columns with nulls first ordering and a distinct column' do
|
||||
context 'when ordering by nullable columns with nulls first ordering' do
|
||||
let(:table_data) do
|
||||
<<-SQL
|
||||
VALUES (1, 2010, null),
|
||||
|
|
@ -335,8 +328,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
order_expression: table[:year].asc.nulls_first,
|
||||
reversed_order_expression: table[:year].desc.nulls_last,
|
||||
order_direction: :asc,
|
||||
nullable: :nulls_first,
|
||||
distinct: false
|
||||
nullable: :nulls_first
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'month',
|
||||
|
|
@ -344,15 +336,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
order_expression: table[:month].asc.nulls_first,
|
||||
order_direction: :asc,
|
||||
reversed_order_expression: table[:month].desc.nulls_last,
|
||||
nullable: :nulls_first,
|
||||
distinct: false
|
||||
nullable: :nulls_first
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -376,7 +366,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
it_behaves_like 'order examples'
|
||||
end
|
||||
|
||||
context 'when ordering by non-nullable columns with mixed directions and a distinct column' do
|
||||
context 'when ordering by non-nullable columns with mixed directions' do
|
||||
let(:table_data) do
|
||||
<<-SQL
|
||||
VALUES (1, 2010, 0),
|
||||
|
|
@ -400,15 +390,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
attribute_name: 'year',
|
||||
column_expression: table['year'],
|
||||
order_expression: table['year'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -458,15 +446,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
attribute_name: 'title',
|
||||
column_expression: Arel::Nodes::NamedFunction.new("LOWER", [table['title'].desc]),
|
||||
order_expression: table['title'].lower.desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -500,15 +486,13 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
attribute_name: 'year',
|
||||
column_expression: table['year'],
|
||||
order_expression: table['year'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
column_expression: table['id'],
|
||||
order_expression: table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -569,8 +553,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
order_expression: Project.arel_table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -585,8 +568,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: :id,
|
||||
order_expression: Project.arel_table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -613,7 +595,6 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
column_expression: Project.arel_table[:created_at],
|
||||
order_expression: Project.arel_table[:created_at].desc,
|
||||
order_direction: :desc,
|
||||
distinct: false,
|
||||
add_to_projections: true
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
|
|
@ -645,8 +626,7 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'traversal_ids',
|
||||
order_expression: Group.arel_table['traversal_ids'].asc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -761,14 +741,12 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'id',
|
||||
order_expression: Project.arel_table['id'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
),
|
||||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'name',
|
||||
order_expression: Project.arel_table['name'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: true
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,9 +16,8 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
|
|||
expect(sql_with_order).to end_with('ORDER BY "projects"."id" DESC')
|
||||
end
|
||||
|
||||
it 'sets the column definition distinct and not nullable' do
|
||||
it 'sets the column definition to not nullable' do
|
||||
expect(column_definition).to be_not_nullable
|
||||
expect(column_definition).to be_distinct
|
||||
end
|
||||
|
||||
context "when the order scope's model uses default_scope" do
|
||||
|
|
@ -53,10 +52,9 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
|
|||
expect(sql_with_order).to end_with('ORDER BY "projects"."created_at" ASC, "projects"."id" DESC')
|
||||
end
|
||||
|
||||
it 'sets the column definition for created_at non-distinct and nullable' do
|
||||
it 'sets the column definition for created_at' do
|
||||
expect(column_definition.attribute_name).to eq('created_at')
|
||||
expect(column_definition.nullable?).to eq(true) # be_nullable calls non_null? method for some reason
|
||||
expect(column_definition).not_to be_distinct
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -71,10 +69,9 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
|
|||
context 'when non-nullable column is given' do
|
||||
let(:scope) { Project.where(id: [1, 2, 3]).order(namespace_id: :asc, id: :asc) }
|
||||
|
||||
it 'sets the column definition for namespace_id non-distinct and non-nullable' do
|
||||
it 'sets the column definition for namespace_id to non-nullable' do
|
||||
expect(column_definition.attribute_name).to eq('namespace_id')
|
||||
expect(column_definition).to be_not_nullable
|
||||
expect(column_definition).not_to be_distinct
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -93,10 +90,9 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
|
|||
SQL
|
||||
end
|
||||
|
||||
it 'sets the column definition for name non-distinct and nullable' do
|
||||
it 'sets the column definition for name to nullable' do
|
||||
expect(column_definition.attribute_name).to eq('name')
|
||||
expect(column_definition.nullable?).to be_truthy
|
||||
expect(column_definition).not_to be_distinct
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -108,10 +104,9 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
|
|||
SQL
|
||||
end
|
||||
|
||||
it 'sets the column definition for name non-distinct and non-nullable' do
|
||||
it 'sets the column definition for name to non-nullable' do
|
||||
expect(column_definition.attribute_name).to eq('name')
|
||||
expect(column_definition).to be_not_nullable
|
||||
expect(column_definition).not_to be_distinct
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -52,8 +52,7 @@ RSpec.describe UnnestedInFilters::Rewriter do
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'user_type',
|
||||
order_expression: User.arel_table['user_type'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
@ -202,8 +201,7 @@ RSpec.describe UnnestedInFilters::Rewriter do
|
|||
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
|
||||
attribute_name: 'user_type',
|
||||
order_expression: User.arel_table['user_type'].desc,
|
||||
nullable: :not_nullable,
|
||||
distinct: false
|
||||
nullable: :not_nullable
|
||||
)
|
||||
])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -759,26 +759,10 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
|
|||
expect(bridge.variables.to_hash)
|
||||
.to eq(bridge.scoped_variables.concat(bridge.pipeline.persisted_variables).to_hash)
|
||||
end
|
||||
|
||||
context 'when bridge is for a trigger request' do
|
||||
let(:trigger) { create(:ci_trigger, project: project) }
|
||||
let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
|
||||
|
||||
let(:predefined_trigger_variables) do
|
||||
[{ key: 'CI_PIPELINE_TRIGGERED', value: 'true', public: true, masked: false },
|
||||
{ key: 'CI_TRIGGER_SHORT_TOKEN', value: trigger.short_token, public: true, masked: false }]
|
||||
end
|
||||
|
||||
before do
|
||||
bridge.trigger_request = trigger_request
|
||||
end
|
||||
|
||||
it 'includes the trigger variables' do
|
||||
expect(bridge.variables).to include(*predefined_trigger_variables)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'a triggerable processable', :ci_bridge
|
||||
|
||||
describe '#pipeline_variables' do
|
||||
it 'returns the pipeline variables' do
|
||||
expect(bridge.pipeline_variables).to eq(bridge.pipeline.variables)
|
||||
|
|
|
|||
|
|
@ -133,6 +133,8 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
let(:job) { build }
|
||||
end
|
||||
|
||||
it_behaves_like 'a triggerable processable', :ci_build
|
||||
|
||||
describe '.ref_protected' do
|
||||
subject { described_class.ref_protected }
|
||||
|
||||
|
|
@ -3017,22 +3019,6 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration, factory_def
|
|||
end
|
||||
end
|
||||
|
||||
context 'when build is for triggers' do
|
||||
let(:trigger) { create(:ci_trigger, project: project) }
|
||||
let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
|
||||
|
||||
let(:predefined_trigger_variables) do
|
||||
[{ key: 'CI_PIPELINE_TRIGGERED', value: 'true', public: true, masked: false },
|
||||
{ key: 'CI_TRIGGER_SHORT_TOKEN', value: trigger.short_token, public: true, masked: false }]
|
||||
end
|
||||
|
||||
before do
|
||||
build.trigger_request = trigger_request
|
||||
end
|
||||
|
||||
it { is_expected.to include(*predefined_trigger_variables) }
|
||||
end
|
||||
|
||||
context 'when pipeline has a variable' do
|
||||
let!(:pipeline_variable) { create(:ci_pipeline_variable, pipeline: pipeline) }
|
||||
|
||||
|
|
|
|||
|
|
@ -6,12 +6,17 @@ RSpec.describe Ci::Processable, feature_category: :continuous_integration do
|
|||
let_it_be(:project) { create(:project) }
|
||||
let_it_be_with_refind(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:trigger_request) }
|
||||
end
|
||||
|
||||
describe 'delegations' do
|
||||
subject { described_class.new }
|
||||
|
||||
it { is_expected.to delegate_method(:merge_request?).to(:pipeline) }
|
||||
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
|
||||
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
|
||||
it { is_expected.to delegate_method(:trigger_short_token).to(:trigger_request) }
|
||||
end
|
||||
|
||||
describe '#clone' do
|
||||
|
|
|
|||
|
|
@ -729,179 +729,6 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
|
|||
end
|
||||
end
|
||||
|
||||
describe '.last_deployment_group_for_environment' do
|
||||
def subject_method(environment)
|
||||
described_class.last_deployment_group_for_environment(environment)
|
||||
end
|
||||
|
||||
shared_examples_for 'find last deployment group for environment' do
|
||||
context 'when there are no deployments and jobs' do
|
||||
it do
|
||||
expect(subject_method(environment)).to eq(described_class.none)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no successful jobs' do
|
||||
let(:job) { create(factory_type, :created, project: project, pipeline: pipeline) }
|
||||
|
||||
before do
|
||||
create(:deployment, :created, environment: environment, project: project, deployable: job)
|
||||
end
|
||||
|
||||
it do
|
||||
expect(subject_method(environment)).to eq(described_class.none)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are deployments for multiple pipelines' do
|
||||
let(:job_a) { create(factory_type, :success, project: project, pipeline: pipeline) }
|
||||
let(:job_b) { create(factory_type, :failed, project: project, pipeline: pipeline_b) }
|
||||
let(:job_c) { create(factory_type, :success, project: project, pipeline: pipeline) }
|
||||
let(:job_d) { create(factory_type, :failed, project: project, pipeline: pipeline) }
|
||||
|
||||
# Successful deployments for pipeline
|
||||
let!(:deployment_a) do
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: job_a)
|
||||
end
|
||||
|
||||
let!(:deployment_b) do
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: job_c)
|
||||
end
|
||||
|
||||
before do
|
||||
# Failed deployment for pipeline
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: job_d)
|
||||
|
||||
# Failed deployment for pipeline_b
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: job_b)
|
||||
end
|
||||
|
||||
it 'returns the successful deployment jobs for the last deployment pipeline' do
|
||||
expect(subject_method(environment.reload).pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are many environments' do
|
||||
let(:environment_b) { create(:environment, project: project) }
|
||||
|
||||
let(:pipeline_c) { create(:ci_pipeline, project: project) }
|
||||
let(:pipeline_d) { create(:ci_pipeline, project: project) }
|
||||
|
||||
# Builds for first environment: 'environment' with pipeline and pipeline_b
|
||||
let(:job_a) { create(factory_type, :success, project: project, pipeline: pipeline) }
|
||||
let(:job_b) { create(factory_type, :failed, project: project, pipeline: pipeline_b) }
|
||||
let(:job_c) { create(factory_type, :success, project: project, pipeline: pipeline) }
|
||||
let(:job_d) { create(factory_type, :failed, project: project, pipeline: pipeline) }
|
||||
let!(:stop_env_a) do
|
||||
create(factory_type, :manual, project: project, pipeline: pipeline, name: 'stop_env_a')
|
||||
end
|
||||
|
||||
# Builds for second environment: 'environment_b' with pipeline_c and pipeline_d
|
||||
let(:job_e) { create(factory_type, :success, project: project, pipeline: pipeline_c) }
|
||||
let(:job_f) { create(factory_type, :failed, project: project, pipeline: pipeline_d) }
|
||||
let(:job_g) { create(factory_type, :success, project: project, pipeline: pipeline_c) }
|
||||
let(:job_h) { create(factory_type, :failed, project: project, pipeline: pipeline_c) }
|
||||
let!(:stop_env_b) do
|
||||
create(factory_type, :manual, project: project, pipeline: pipeline_c, name: 'stop_env_b')
|
||||
end
|
||||
|
||||
# Successful deployments for 'environment' from pipeline
|
||||
let!(:deployment_a) do
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: job_a)
|
||||
end
|
||||
|
||||
let!(:deployment_b) do
|
||||
create(:deployment, :success,
|
||||
project: project, environment: environment, deployable: job_c, on_stop: 'stop_env_a')
|
||||
end
|
||||
|
||||
# Successful deployments for 'environment_b' from pipeline_c
|
||||
let!(:deployment_c) do
|
||||
create(:deployment, :success, project: project, environment: environment_b, deployable: job_e)
|
||||
end
|
||||
|
||||
let!(:deployment_d) do
|
||||
create(:deployment, :success,
|
||||
project: project, environment: environment_b, deployable: job_g, on_stop: 'stop_env_b')
|
||||
end
|
||||
|
||||
before do
|
||||
# Failed deployment for 'environment' from pipeline and pipeline_b
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: job_d)
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: job_b)
|
||||
|
||||
# Failed deployment for 'environment_b' from pipeline_c and pipeline_d
|
||||
create(:deployment, :failed, project: project, environment: environment_b, deployable: job_h)
|
||||
create(:deployment, :failed, project: project, environment: environment_b, deployable: job_f)
|
||||
end
|
||||
|
||||
it 'batch loads for environments' do
|
||||
environments = [environment, environment_b]
|
||||
|
||||
# Loads Batch loader
|
||||
environments.each do |env|
|
||||
subject_method(env)
|
||||
end
|
||||
|
||||
expect(subject_method(environments.first).pluck(:id))
|
||||
.to contain_exactly(deployment_a.id, deployment_b.id)
|
||||
|
||||
expect { subject_method(environments.second).pluck(:id) }.not_to exceed_query_limit(0)
|
||||
|
||||
expect(subject_method(environments.second).pluck(:id))
|
||||
.to contain_exactly(deployment_c.id, deployment_d.id)
|
||||
|
||||
expect(subject_method(environments.first).filter_map(&:stop_action))
|
||||
.to contain_exactly(stop_env_a)
|
||||
|
||||
expect { subject_method(environments.second).map(&:stop_action) }
|
||||
.not_to exceed_query_limit(0)
|
||||
|
||||
expect(subject_method(environments.second).filter_map(&:stop_action))
|
||||
.to contain_exactly(stop_env_b)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when last deployment for environment is a retried job' do
|
||||
let(:environment_b) { environment }
|
||||
|
||||
let(:job_a) do
|
||||
create(factory_type, :success, project: project, pipeline: pipeline, environment: environment.name)
|
||||
end
|
||||
|
||||
let(:job_b) do
|
||||
create(factory_type, :success, project: project, pipeline: pipeline, environment: environment_b.name)
|
||||
end
|
||||
|
||||
let!(:deployment_a) do
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: job_a)
|
||||
end
|
||||
|
||||
let!(:deployment_b) do
|
||||
create(:deployment, :success, project: project, environment: environment_b, deployable: job_b)
|
||||
end
|
||||
|
||||
before do
|
||||
# Retry job_b
|
||||
job_b.update!(retried: true)
|
||||
|
||||
# New successful job after retry.
|
||||
create(factory_type, :success, project: project, pipeline: pipeline, environment: environment_b.name)
|
||||
end
|
||||
|
||||
it { expect(subject_method(environment_b)).not_to be_nil }
|
||||
end
|
||||
end
|
||||
|
||||
it_behaves_like 'find last deployment group for environment' do
|
||||
let(:factory_type) { :ci_build }
|
||||
end
|
||||
|
||||
it_behaves_like 'find last deployment group for environment' do
|
||||
let(:factory_type) { :ci_bridge }
|
||||
end
|
||||
end
|
||||
|
||||
describe '.last_finished_deployment_group_for_environment' do
|
||||
subject { described_class.last_finished_deployment_group_for_environment(environment) }
|
||||
|
||||
|
|
@ -1085,25 +912,6 @@ RSpec.describe Deployment, feature_category: :continuous_delivery do
|
|||
expect(subject_method(environment_2).filter_map(&:stop_action))
|
||||
.to contain_exactly(stop_job_c_success, stop_job_c_failed, stop_job_c_canceled)
|
||||
end
|
||||
|
||||
# last_deployment_group_for_environment and last_finished_deployment_group_for_environment
|
||||
# use BatchLoader.for(environment), with different batch key. This test makes sure that
|
||||
# the different methods are returning different results.
|
||||
context 'when last_deployment_group_for_environment is also called' do
|
||||
def last_deployment_group(env)
|
||||
described_class.last_deployment_group_for_environment(env)
|
||||
end
|
||||
|
||||
it 'returns different results' do
|
||||
successful_deployment_group_for_env_1 = last_deployment_group(environment)
|
||||
expect(successful_deployment_group_for_env_1.pluck(:id)).to contain_exactly(deployment_a_success.id)
|
||||
expect(successful_deployment_group_for_env_1).not_to eq(subject_method(environment))
|
||||
|
||||
successful_deployment_group_for_env_2 = last_deployment_group(environment_2)
|
||||
expect(successful_deployment_group_for_env_2.pluck(:id)).to contain_exactly(deployment_c_success.id)
|
||||
expect(successful_deployment_group_for_env_2).not_to eq(subject_method(environment_2))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -991,12 +991,9 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
|
|||
|
||||
describe '#stop_actions' do
|
||||
subject do
|
||||
# since we are toggling the jobs being fetched according to the
|
||||
# `environment_stop_actions_include_all_finished_deployments` FF
|
||||
# and the `stop_actions` method is strong_memoized, we need to reload
|
||||
# the environment every time we fetch stop_actions
|
||||
# TODO: switch to a simple `environment.stop_actions` when FF is removed
|
||||
# - https://gitlab.com/gitlab-org/gitlab/-/issues/435132
|
||||
# Environment#stop_actions is strong-memoized,
|
||||
# so we need to reload the `environment` to make sure
|
||||
# that the updated `stop_actions` records are being fetched
|
||||
reloaded_environment = described_class.find(environment.id)
|
||||
reloaded_environment.stop_actions
|
||||
end
|
||||
|
|
@ -1039,16 +1036,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
|
|||
)
|
||||
end
|
||||
|
||||
context 'when :environment_stop_actions_include_all_finished_deployments FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(environment_stop_actions_include_all_finished_deployments: false)
|
||||
end
|
||||
|
||||
it 'returns the stop actions of the last successful pipeline' do
|
||||
expect(subject).to contain_exactly(successful_pipeline_stop)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the last finished pipeline has a successful deployment' do
|
||||
let_it_be(:finished_pipeline_stop_c) { create_deployment_with_stop_action(:success, finished_pipeline, 'finished_pipeline_stop_c') }
|
||||
|
||||
|
|
@ -1059,51 +1046,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
|
|||
finished_pipeline_stop_c
|
||||
)
|
||||
end
|
||||
|
||||
context 'when :environment_stop_actions_include_all_finished_deployments FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(environment_stop_actions_include_all_finished_deployments: false)
|
||||
end
|
||||
|
||||
it 'returns the stop actions of the successful deployment in the last finished pipeline' do
|
||||
expect(subject).to contain_exactly(finished_pipeline_stop_c)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#last_deployment_group' do
|
||||
subject { environment.last_deployment_group }
|
||||
|
||||
context 'when there are no deployments and builds' do
|
||||
it do
|
||||
is_expected.to eq(Deployment.none)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are deployments for multiple pipelines' do
|
||||
let(:pipeline_a) { create(:ci_pipeline, project: project) }
|
||||
let(:pipeline_b) { create(:ci_pipeline, project: project) }
|
||||
let(:ci_build_a) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
|
||||
let(:ci_build_b) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
|
||||
let(:ci_build_c) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
|
||||
let(:ci_build_d) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
|
||||
|
||||
# Successful deployments for pipeline_a
|
||||
let!(:deployment_a) { create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a) }
|
||||
let!(:deployment_b) { create(:deployment, :success, project: project, environment: environment, deployable: ci_build_c) }
|
||||
|
||||
before do
|
||||
# Failed deployment for pipeline_a
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_d)
|
||||
|
||||
# Failed deployment for pipeline_b
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
|
||||
end
|
||||
|
||||
it 'returns the successful deployment jobs for the last deployment pipeline' do
|
||||
expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1281,48 +1223,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_categ
|
|||
end
|
||||
end
|
||||
|
||||
describe '#last_deployment_pipeline' do
|
||||
subject { environment.last_deployment_pipeline }
|
||||
|
||||
let(:pipeline_a) { create(:ci_pipeline, project: project) }
|
||||
let(:pipeline_b) { create(:ci_pipeline, project: project) }
|
||||
let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
|
||||
let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
|
||||
|
||||
before do
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
|
||||
end
|
||||
|
||||
it 'does not join across databases' do
|
||||
with_cross_joins_prevented do
|
||||
expect(subject.id).to eq(pipeline_a.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#latest_successful_jobs' do
|
||||
subject { environment.latest_successful_jobs }
|
||||
|
||||
let(:pipeline_a) { create(:ci_pipeline, project: project) }
|
||||
let(:pipeline_b) { create(:ci_pipeline, project: project) }
|
||||
let(:ci_build_a_1) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
|
||||
let(:ci_build_a_2) { create(:ci_build, :success, project: project, pipeline: pipeline_a) }
|
||||
let(:ci_build_a_3) { create(:ci_build, :failed, project: project, pipeline: pipeline_a) }
|
||||
let(:ci_build_b_1) { create(:ci_build, :failed, project: project, pipeline: pipeline_b) }
|
||||
|
||||
before do
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a_1)
|
||||
create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a_2)
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_a_3)
|
||||
create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b_1)
|
||||
end
|
||||
|
||||
it 'fetches the latest successful jobs through the last pipeline with a successful deployment' do
|
||||
is_expected.to contain_exactly(ci_build_a_1, ci_build_a_2)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#last_visible_deployment' do
|
||||
subject { environment.last_visible_deployment }
|
||||
|
||||
|
|
|
|||
|
|
@ -4607,6 +4607,36 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
|
|||
end
|
||||
end
|
||||
|
||||
describe 'PUT /user/avatar' do
|
||||
let(:path) { "/user/avatar" }
|
||||
|
||||
it "returns 200 OK on success" do
|
||||
workhorse_form_with_file(
|
||||
api(path, user),
|
||||
method: :put,
|
||||
file_key: :avatar,
|
||||
params: { avatar: fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
|
||||
)
|
||||
|
||||
user.reload
|
||||
expect(user.avatar).to be_present
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['avatar_url']).to include(user.avatar_path)
|
||||
end
|
||||
|
||||
it "returns 400 when avatar file size over 200 KiB" do
|
||||
workhorse_form_with_file(
|
||||
api(path, user),
|
||||
method: :put,
|
||||
file_key: :avatar,
|
||||
params: { avatar: fixture_file_upload('spec/fixtures/big-image.png', 'image/png') }
|
||||
)
|
||||
|
||||
expect(response).to have_gitlab_http_status(:bad_request)
|
||||
expect(json_response['message']).to include("Avatar is too big (should be at most 200 KiB)")
|
||||
end
|
||||
end
|
||||
|
||||
describe 'POST /users/:user_id/personal_access_tokens' do
|
||||
let(:name) { 'new pat' }
|
||||
let(:expires_at) { 3.days.from_now.to_date.to_s }
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ RSpec.describe 'value stream analytics events', feature_category: :team_planning
|
|||
include CycleAnalyticsHelpers
|
||||
|
||||
let(:user) { create(:user) }
|
||||
let(:project) { create(:project, :repository, public_builds: false) }
|
||||
let(:project) { create(:project, :repository, public_builds: false, developers: user) }
|
||||
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
|
||||
|
||||
describe 'GET /:namespace/:project/value_stream_analytics/events/issues', :sidekiq_inline do
|
||||
|
|
@ -14,15 +14,15 @@ RSpec.describe 'value stream analytics events', feature_category: :team_planning
|
|||
let(:first_mr_iid) { project.merge_requests.sort_by_attribute(:created_desc).pick(:iid).to_s }
|
||||
|
||||
before do
|
||||
project.add_developer(user)
|
||||
|
||||
3.times do |count|
|
||||
travel_to(Time.now + count.days) do
|
||||
create_cycle
|
||||
end
|
||||
end
|
||||
|
||||
deploy_master(user, project)
|
||||
travel_to(Time.now + 3.days) do
|
||||
deploy_master(user, project)
|
||||
end
|
||||
|
||||
login_as(user)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,14 +16,6 @@ RSpec.describe EnvironmentSerializer, feature_category: :continuous_delivery do
|
|||
|
||||
it_behaves_like 'avoid N+1 on environments serialization'
|
||||
|
||||
context 'when :environment_stop_actions_include_all_finished_deployments FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(environment_stop_actions_include_all_finished_deployments: false)
|
||||
end
|
||||
|
||||
it_behaves_like 'avoid N+1 on environments serialization'
|
||||
end
|
||||
|
||||
context 'when there is a collection of objects provided' do
|
||||
let(:resource) { project.environments }
|
||||
|
||||
|
|
@ -272,7 +264,6 @@ RSpec.describe EnvironmentSerializer, feature_category: :continuous_delivery do
|
|||
before do
|
||||
environments.each do |env|
|
||||
allow(env).to receive(:last_finished_deployment_group).and_call_original
|
||||
allow(env).to receive(:last_deployment_group).and_call_original
|
||||
end
|
||||
|
||||
allow(resource).to receive(:to_a).and_return(environments)
|
||||
|
|
@ -283,18 +274,6 @@ RSpec.describe EnvironmentSerializer, feature_category: :continuous_delivery do
|
|||
|
||||
json
|
||||
end
|
||||
|
||||
context 'when :environment_stop_actions_include_all_finished_deployments FF is disabled' do
|
||||
before do
|
||||
stub_feature_flags(environment_stop_actions_include_all_finished_deployments: false)
|
||||
end
|
||||
|
||||
it "batch loads each environment's last_deployment_group" do
|
||||
expect(environments).to all(receive(:last_deployment_group))
|
||||
|
||||
json
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'a triggerable processable' do |factory|
|
||||
describe '#variables' do
|
||||
subject { described_instance.variables }
|
||||
|
||||
let_it_be(:described_instance) { create(factory) } # rubocop:disable Rails/SaveBang -- This is a factory, not a Rails method call
|
||||
|
||||
context 'when trigger_request is present' do
|
||||
let_it_be(:trigger) { create(:ci_trigger, project: project) }
|
||||
let_it_be(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
|
||||
|
||||
let(:predefined_trigger_variables) do
|
||||
[
|
||||
{ key: 'CI_PIPELINE_TRIGGERED', value: 'true', public: true, masked: false },
|
||||
{ key: 'CI_TRIGGER_SHORT_TOKEN', value: trigger.short_token, public: true, masked: false }
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
described_instance.trigger_request = trigger_request
|
||||
end
|
||||
|
||||
it { is_expected.to include(*predefined_trigger_variables) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'gitlab:web_hook namespace rake tasks', :silence_stdout do
|
||||
RSpec.describe 'gitlab:web_hook namespace rake tasks', :silence_stdout, feature_category: :webhooks do
|
||||
let!(:group) { create(:group) }
|
||||
let!(:project1) { create(:project, namespace: group) }
|
||||
let!(:project2) { create(:project, namespace: group) }
|
||||
|
|
|
|||
|
|
@ -153,6 +153,7 @@ func TestAcceleratedUpload(t *testing.T) {
|
|||
{"PUT", `/api/v4/groups/group%2Fsubgroup`, false},
|
||||
{"POST", `/api/v4/groups/1/wikis/attachments`, false},
|
||||
{"POST", `/api/v4/groups/my%2Fsubgroup/wikis/attachments`, false},
|
||||
{"PUT", `/api/v4/user/avatar`, false},
|
||||
{"POST", `/api/v4/users`, false},
|
||||
{"PUT", `/api/v4/users/42`, false},
|
||||
{"PUT", "/api/v4/projects/9001/packages/nuget/v1/files", true},
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import (
|
|||
// DefaultObjectStoreTimeout is the timeout for ObjectStore upload operation
|
||||
const DefaultObjectStoreTimeout = 4 * time.Hour
|
||||
|
||||
// ObjectStorageConfig holds configuration details for object storage destinations.
|
||||
type ObjectStorageConfig struct {
|
||||
Provider string
|
||||
|
||||
|
|
@ -54,7 +55,7 @@ type UploadOpts struct {
|
|||
// The maximum accepted size in bytes of the upload
|
||||
MaximumSize int64
|
||||
|
||||
//MultipartUpload parameters
|
||||
// MultipartUpload parameters
|
||||
// PartSize is the exact size of each uploaded part. Only the last one can be smaller
|
||||
PartSize int64
|
||||
// PresignedParts contains the presigned URLs for each part
|
||||
|
|
@ -140,18 +141,22 @@ func GetOpts(apiResponse *api.Response) (*UploadOpts, error) {
|
|||
return &opts, nil
|
||||
}
|
||||
|
||||
// IsAWS checks if the object storage provider is AWS S3.
|
||||
func (c *ObjectStorageConfig) IsAWS() bool {
|
||||
return strings.EqualFold(c.Provider, "AWS") || strings.EqualFold(c.Provider, "S3")
|
||||
}
|
||||
|
||||
// IsAzure checks if the object storage provider is Azure Blob Storage.
|
||||
func (c *ObjectStorageConfig) IsAzure() bool {
|
||||
return strings.EqualFold(c.Provider, "AzureRM")
|
||||
}
|
||||
|
||||
// IsGoCloud checks if the object storage provider is configured to use GoCloud.
|
||||
func (c *ObjectStorageConfig) IsGoCloud() bool {
|
||||
return c.GoCloudConfig.URL != ""
|
||||
}
|
||||
|
||||
// IsValid checks if the object storage configuration is valid.
|
||||
func (c *ObjectStorageConfig) IsValid() bool {
|
||||
if c.IsAWS() {
|
||||
return c.S3Config.Bucket != "" && c.s3CredentialsValid()
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ func TestGetOpts(t *testing.T) {
|
|||
if test.customPutHeaders {
|
||||
require.Equal(t, opts.PutHeaders, apiResponse.RemoteObject.PutHeaders)
|
||||
} else {
|
||||
require.Equal(t, opts.PutHeaders, map[string]string{"Content-Type": "application/octet-stream"})
|
||||
require.Equal(t, map[string]string{"Content-Type": "application/octet-stream"}, opts.PutHeaders)
|
||||
}
|
||||
|
||||
if test.multipart == nil {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
// Package upload provides functionality for handling file uploads and processing image metadata.
|
||||
package upload
|
||||
|
||||
import (
|
||||
|
|
@ -17,11 +18,12 @@ func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageTy
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
tmpfile.Close()
|
||||
_ = tmpfile.Close()
|
||||
}()
|
||||
if err := os.Remove(tmpfile.Name()); err != nil {
|
||||
if err = os.Remove(tmpfile.Name()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
@ -30,7 +32,7 @@ func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageTy
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
if _, err = tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
@ -42,7 +44,7 @@ func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageTy
|
|||
isValidType = isTIFF(tmpfile)
|
||||
}
|
||||
|
||||
if _, err := tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
if _, err = tmpfile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
// Package exif provides functionality for extracting EXIF metadata from images.
|
||||
package exif
|
||||
|
||||
import (
|
||||
|
|
@ -13,6 +14,7 @@ import (
|
|||
"gitlab.com/gitlab-org/labkit/log"
|
||||
)
|
||||
|
||||
// ErrRemovingExif is an error returned when there is an issue while removing EXIF metadata from an image.
|
||||
var ErrRemovingExif = errors.New("error while removing EXIF")
|
||||
|
||||
type cleaner struct {
|
||||
|
|
@ -23,14 +25,20 @@ type cleaner struct {
|
|||
eof bool
|
||||
}
|
||||
|
||||
// FileType represents the type of an image file.
|
||||
type FileType int
|
||||
|
||||
const (
|
||||
// TypeUnknown represents an unknown file type.
|
||||
TypeUnknown FileType = iota
|
||||
// TypeJPEG represents the JPEG image file type.
|
||||
TypeJPEG
|
||||
// TypeTIFF represents the TIFF image file type.
|
||||
TypeTIFF
|
||||
)
|
||||
|
||||
// NewCleaner creates a new EXIF cleaner instance using the provided context and stdin.
|
||||
// It processes the input from stdin to remove EXIF data from images.
|
||||
func NewCleaner(ctx context.Context, stdin io.Reader) (io.ReadCloser, error) {
|
||||
c := &cleaner{ctx: ctx}
|
||||
|
||||
|
|
@ -75,7 +83,7 @@ func (c *cleaner) Read(p []byte) (int, error) {
|
|||
func (c *cleaner) startProcessing(stdin io.Reader) error {
|
||||
var err error
|
||||
|
||||
whitelisted_tags := []string{
|
||||
whitelistedTags := []string{
|
||||
"-ResolutionUnit",
|
||||
"-XResolution",
|
||||
"-YResolution",
|
||||
|
|
@ -90,7 +98,7 @@ func (c *cleaner) startProcessing(stdin io.Reader) error {
|
|||
"-Orientation",
|
||||
}
|
||||
|
||||
args := append([]string{"-all=", "--IPTC:all", "--XMP-iptcExt:all", "-tagsFromFile", "@"}, whitelisted_tags...)
|
||||
args := append([]string{"-all=", "--IPTC:all", "--XMP-iptcExt:all", "-tagsFromFile", "@"}, whitelistedTags...)
|
||||
args = append(args, "-")
|
||||
c.cmd = exec.CommandContext(c.ctx, "exiftool", args...)
|
||||
|
||||
|
|
@ -109,6 +117,7 @@ func (c *cleaner) startProcessing(stdin io.Reader) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// FileTypeFromSuffix returns the FileType inferred from the filename's suffix.
|
||||
func FileTypeFromSuffix(filename string) FileType {
|
||||
if os.Getenv("SKIP_EXIFTOOL") == "1" {
|
||||
return TypeUnknown
|
||||
|
|
|
|||
|
|
@ -344,6 +344,7 @@ func configureRoutes(u *upstream) {
|
|||
u.route("PUT", apiPattern+`v4/groups/[^/]+\z`, tempfileMultipartProxy),
|
||||
|
||||
// User Avatar
|
||||
u.route("PUT", apiPattern+`v4/user/avatar\z`, tempfileMultipartProxy),
|
||||
u.route("POST", apiPattern+`v4/users\z`, tempfileMultipartProxy),
|
||||
u.route("PUT", apiPattern+`v4/users/[0-9]+\z`, tempfileMultipartProxy),
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue