Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-01-17 12:48:37 +00:00
parent 958b824875
commit ecb6bda4d4
73 changed files with 1697 additions and 1137 deletions

View File

@ -9,7 +9,6 @@ Gettext/StaticIdentifier:
- 'app/services/security/ci_configuration/base_create_service.rb'
- 'app/services/users/banned_user_base_service.rb'
- 'ee/app/mailers/ee/emails/admin_notification.rb'
- 'ee/app/mailers/emails/namespace_storage_usage_mailer.rb'
- 'ee/app/models/integrations/github.rb'
- 'ee/app/services/ee/projects/create_from_template_service.rb'
- 'ee/app/services/security/security_orchestration_policies/policy_configuration_validation_service.rb'

View File

@ -2630,14 +2630,12 @@ Gitlab/BoundedContexts:
- 'ee/app/mailers/emails/epics.rb'
- 'ee/app/mailers/emails/group_memberships.rb'
- 'ee/app/mailers/emails/merge_commits.rb'
- 'ee/app/mailers/emails/namespace_storage_usage_mailer.rb'
- 'ee/app/mailers/emails/okr.rb'
- 'ee/app/mailers/emails/oncall_rotation.rb'
- 'ee/app/mailers/emails/requirements.rb'
- 'ee/app/mailers/emails/user_cap.rb'
- 'ee/app/mailers/license_mailer.rb'
- 'ee/app/mailers/previews/ci_minutes_usage_mailer_preview.rb'
- 'ee/app/mailers/previews/emails/namespace_storage_usage_mailer_preview.rb'
- 'ee/app/mailers/previews/license_mailer_preview.rb'
- 'ee/app/models/alert_management/alert_payload_field.rb'
- 'ee/app/models/allowed_email_domain.rb'

View File

@ -487,7 +487,6 @@ Layout/LineLength:
- 'ee/app/finders/projects/integrations/jira/by_ids_finder.rb'
- 'ee/app/finders/projects/integrations/jira/issues_finder.rb'
- 'ee/app/finders/security/pipeline_vulnerabilities_finder.rb'
- 'ee/app/finders/security/vulnerabilities_finder.rb'
- 'ee/app/graphql/ee/mutations/boards/lists/create.rb'
- 'ee/app/graphql/mutations/analytics/devops_adoption/enabled_namespaces/bulk_enable.rb'
- 'ee/app/graphql/mutations/audit_events/external_audit_event_destinations/create.rb'
@ -1103,7 +1102,6 @@ Layout/LineLength:
- 'ee/spec/finders/projects/integrations/jira/by_ids_finder_spec.rb'
- 'ee/spec/finders/projects/integrations/jira/issues_finder_spec.rb'
- 'ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
- 'ee/spec/finders/security/vulnerabilities_finder_spec.rb'
- 'ee/spec/finders/security/vulnerability_reads_finder_spec.rb'
- 'ee/spec/finders/snippets_finder_spec.rb'
- 'ee/spec/finders/template_finder_spec.rb'

View File

@ -183,7 +183,6 @@ RSpec/ContextWording:
- 'ee/spec/finders/productivity_analytics_finder_spec.rb'
- 'ee/spec/finders/scim_finder_spec.rb'
- 'ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
- 'ee/spec/finders/security/vulnerabilities_finder_spec.rb'
- 'ee/spec/finders/security/vulnerability_reads_finder_spec.rb'
- 'ee/spec/finders/snippets_finder_spec.rb'
- 'ee/spec/finders/template_finder_spec.rb'

View File

@ -426,7 +426,6 @@ RSpec/ExampleWithoutDescription:
- 'spec/models/event_spec.rb'
- 'spec/models/group_group_link_spec.rb'
- 'spec/models/group_spec.rb'
- 'spec/models/hooks/web_hook_spec.rb'
- 'spec/models/incident_management/issuable_escalation_status_spec.rb'
- 'spec/models/incident_management/timeline_event_spec.rb'
- 'spec/models/incident_management/timeline_event_tag_spec.rb'

View File

@ -89,7 +89,6 @@ RSpec/FactoryBot/AvoidCreate:
- 'ee/spec/mailers/ee/emails/projects_spec.rb'
- 'ee/spec/mailers/emails/group_memberships_spec.rb'
- 'ee/spec/mailers/emails/merge_commits_spec.rb'
- 'ee/spec/mailers/emails/namespace_storage_usage_mailer_spec.rb'
- 'ee/spec/mailers/emails/requirements_spec.rb'
- 'ee/spec/mailers/emails/user_cap_spec.rb'
- 'ee/spec/mailers/license_mailer_spec.rb'

View File

@ -81,7 +81,6 @@ RSpec/NamedSubject:
- 'ee/spec/finders/projects/integrations/jira/issues_finder_spec.rb'
- 'ee/spec/finders/security/approval_groups_finder_spec.rb'
- 'ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
- 'ee/spec/finders/security/vulnerabilities_finder_spec.rb'
- 'ee/spec/finders/security/vulnerability_feedbacks_finder_spec.rb'
- 'ee/spec/finders/security/vulnerability_reads_finder_spec.rb'
- 'ee/spec/finders/snippets_finder_spec.rb'

View File

@ -133,7 +133,6 @@ Style/FormatString:
- 'ee/app/helpers/ee/timeboxes_helper.rb'
- 'ee/app/helpers/vulnerabilities_helper.rb'
- 'ee/app/mailers/ee/emails/admin_notification.rb'
- 'ee/app/mailers/emails/namespace_storage_usage_mailer.rb'
- 'ee/app/models/ci/minutes/notification.rb'
- 'ee/app/models/dast/profile.rb'
- 'ee/app/models/dast/site_profile_secret_variable.rb'

View File

@ -215,7 +215,6 @@ Style/GuardClause:
- 'ee/app/controllers/smartcard_controller.rb'
- 'ee/app/finders/ee/template_finder.rb'
- 'ee/app/finders/iterations_finder.rb'
- 'ee/app/finders/security/vulnerabilities_finder.rb'
- 'ee/app/graphql/mutations/concerns/mutations/shared_epic_arguments.rb'
- 'ee/app/graphql/mutations/iterations/create.rb'
- 'ee/app/graphql/mutations/projects/set_locked.rb'

View File

@ -224,7 +224,6 @@ Style/IfUnlessModifier:
- 'ee/app/controllers/projects/path_locks_controller.rb'
- 'ee/app/controllers/projects/push_rules_controller.rb'
- 'ee/app/finders/security/pipeline_vulnerabilities_finder.rb'
- 'ee/app/finders/security/vulnerabilities_finder.rb'
- 'ee/app/graphql/mutations/audit_events/external_audit_event_destinations/create.rb'
- 'ee/app/graphql/mutations/audit_events/external_audit_event_destinations/destroy.rb'
- 'ee/app/graphql/mutations/boards/scoped_board_mutation.rb'

View File

@ -128,7 +128,7 @@ export default {
const filteredList = sharedAgents.filter((node, index, list) => {
if (!node?.agent) return false;
const isDuplicate = index !== list.findIndex((agent) => agent.id === node.id);
const isDuplicate = index !== list.findIndex((agent) => agent.agent.id === node.agent.id);
const isSameProject = node.agent.project.fullPath === this.projectPath;
return !isDuplicate && !isSameProject;
});

View File

@ -1,11 +1,14 @@
import { ExpandLinesAdapter } from '~/rapid_diffs/expand_lines/adapter';
import { ToggleFileAdapter } from '~/rapid_diffs/toggle_file/adapter';
const RAPID_DIFFS_VIEWERS = {
text_inline: 'text_inline',
text_parallel: 'text_parallel',
};
const COMMON_ADAPTERS = [ExpandLinesAdapter, ToggleFileAdapter];
export const VIEWER_ADAPTERS = {
[RAPID_DIFFS_VIEWERS.text_inline]: [ExpandLinesAdapter],
[RAPID_DIFFS_VIEWERS.text_parallel]: [ExpandLinesAdapter],
[RAPID_DIFFS_VIEWERS.text_inline]: COMMON_ADAPTERS,
[RAPID_DIFFS_VIEWERS.text_parallel]: COMMON_ADAPTERS,
};

View File

@ -0,0 +1,21 @@
function oppositeToggleButton(clicked) {
const isOpened = clicked.dataset.opened;
const parent = clicked.parentElement;
return isOpened === ''
? parent.querySelector('button[data-closed]')
: parent.querySelector('button[data-opened]');
}
export const ToggleFileAdapter = {
clicks: {
toggleFile(event) {
const fileBody = this.diffElement.querySelector('[data-file-body]');
const button = event.target.closest('button');
const oppositeButton = oppositeToggleButton(button);
fileBody.hidden = !fileBody.hidden;
oppositeButton.focus();
},
},
};

View File

@ -215,10 +215,12 @@ export default {
variables: {
input: {
fullPath: this.fullPath,
parent: {
...this.availableWorkItems?.find(({ id }) => id === this.localSelectedItem),
webUrl: this.parentWebUrl ?? null,
},
parent: this.localSelectedItem
? {
...this.availableWorkItems?.find(({ id }) => id === this.localSelectedItem),
webUrl: this.parentWebUrl ?? null,
}
: null,
workItemType: this.workItemType,
},
},

View File

@ -2,6 +2,11 @@
padding-bottom: $gl-padding;
--rd-diff-file-border-radius: #{calc($gl-border-radius-base - 1px)};
&:has([data-file-body][hidden]) .rd-diff-file-toggle [data-opened],
&:not(:has([data-file-body][hidden])) .rd-diff-file-toggle [data-closed] {
display: none;
}
}
.rd-diff-file-header {
@ -36,3 +41,10 @@
border-radius: inherit;
}
}
[data-file-body][hidden] {
display: block !important;
// https://web.dev/articles/content-visibility#hide_content_with_content-visibility_hidden
// content-visibility: hidden preserves element's rendering state which improves performance for larger diffs
content-visibility: hidden;
}

View File

@ -3,6 +3,8 @@
%diff-file{ id: id, data: server_data }
.rd-diff-file
= render RapidDiffs::DiffFileHeaderComponent.new(diff_file: @diff_file)
.rd-diff-file-body
= render viewer_component.new(diff_file: @diff_file)
-# extra wrapper needed so content-visibility: hidden doesn't require removing border or other styles
%div{ data: { file_body: '' } }
.rd-diff-file-body
= render viewer_component.new(diff_file: @diff_file)
%diff-file-mounted

View File

@ -13,6 +13,9 @@
-# * submodule compare
.rd-diff-file-header{ data: { testid: 'rd-diff-file-header' } }
.rd-diff-file-toggle.gl-mr-2<
= render Pajamas::ButtonComponent.new(category: :tertiary, size: :small, icon: 'chevron-down', button_options: { data: { opened: '', click: 'toggleFile' }, aria: { label: _('Hide file contents') } })
= render Pajamas::ButtonComponent.new(category: :tertiary, size: :small, icon: 'chevron-right', button_options: { data: { closed: '', click: 'toggleFile' }, aria: { label: _('Show file contents') } })
.rd-diff-file-title
- if @diff_file.submodule?
%span

View File

@ -80,6 +80,9 @@ class NotesFinder
{ iid: iid }
end
# the reads finder needs to query by vulnerability_id
return noteables_for_type(type).find_by!(vulnerability_id: query[:id]) if type == 'vulnerability' # rubocop: disable CodeReuse/ActiveRecord
noteables_for_type(type).find_by!(query) # rubocop: disable CodeReuse/ActiveRecord
end

View File

@ -16,7 +16,7 @@ class ApplicationSetting < ApplicationRecord
encrypted_vertex_ai_credentials_iv
encrypted_vertex_ai_access_token
encrypted_vertex_ai_access_token_iv
], remove_with: '17.5', remove_after: '2024-09-19'
], remove_with: '17.10', remove_after: '2025-02-15'
ignore_columns %i[
elasticsearch_aws

View File

@ -42,6 +42,7 @@ module HasUserType
# `service_account` allows instance/namespaces to configure a user for external integrations/automations
# `service_user` is an internal, `gitlab-com`-specific user type for integrations like suggested reviewers
# Changes to these types might have billing implications, https://docs.gitlab.com/ee/subscriptions/gitlab_com/#billable-users
NON_INTERNAL_USER_TYPES = %w[human project_bot service_user service_account].freeze
INTERNAL_USER_TYPES = (USER_TYPES.keys - NON_INTERNAL_USER_TYPES).freeze

View File

@ -0,0 +1,206 @@
# frozen_string_literal: true
module WebHooks
module Hook
extend ActiveSupport::Concern
InterpolationError = Class.new(StandardError)
SECRET_MASK = '************'
# See app/validators/json_schemas/web_hooks_url_variables.json
VARIABLE_REFERENCE_RE = /\{([A-Za-z]+[0-9]*(?:[._-][A-Za-z0-9]+)*)\}/
included do
include Sortable
include WebHooks::AutoDisabling
attr_encrypted :token,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32
attr_encrypted :url,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32
attr_encrypted :url_variables,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
attr_encrypted :custom_headers,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
validates :url, presence: true
validates :url, public_url: true, if: ->(hook) { hook.validate_public_url? && !hook.url_variables? }
validates :token, format: { without: /\n/ }
after_initialize :initialize_url_variables
after_initialize :initialize_custom_headers
before_validation :reset_token
before_validation :reset_url_variables, unless: ->(hook) { hook.is_a?(ServiceHook) }, on: :update
before_validation :reset_custom_headers, unless: ->(hook) { hook.is_a?(ServiceHook) }, on: :update
before_validation :set_branch_filter_nil, if: :branch_filter_strategy_all_branches?
validates :push_events_branch_filter, untrusted_regexp: true, if: :branch_filter_strategy_regex?
validates(
:push_events_branch_filter, "web_hooks/wildcard_branch_filter": true, if: :branch_filter_strategy_wildcard?
)
validates :url_variables, json_schema: { filename: 'web_hooks_url_variables' }
validate :no_missing_url_variables
validates :interpolated_url, public_url: true, if: ->(hook) { hook.url_variables? && hook.errors.empty? }
validates :custom_headers, json_schema: { filename: 'web_hooks_custom_headers' }
validates :custom_webhook_template, length: { maximum: 4096 }
enum :branch_filter_strategy, {
wildcard: 0,
regex: 1,
all_branches: 2
}, prefix: true
def execute(data, hook_name, idempotency_key: nil, force: false)
# hook.executable? is checked in WebHookService#execute
WebHookService.new(self, data, hook_name, idempotency_key: idempotency_key, force: force).execute
end
def async_execute(data, hook_name, idempotency_key: nil)
WebHookService.new(self, data, hook_name, idempotency_key: idempotency_key).async_execute if executable?
end
# Allow urls pointing localhost and the local network
def allow_local_requests?
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def help_path
'user/project/integrations/webhooks'
end
# @return [Boolean] Whether or not the WebHook is currently throttled.
def rate_limited?
rate_limiter.rate_limited?
end
# @return [Integer] The rate limit for the WebHook. `0` for no limit.
def rate_limit
rate_limiter.limit
end
# Returns the associated Project or Group for the WebHook if one exists.
# Overridden by inheriting classes.
def parent; end
# Custom attributes to be included in the worker context.
def application_context
{ related_class: self.class.to_s }
end
# Exclude binary columns by default - they have no sensible JSON encoding
def serializable_hash(options = nil)
options = options.try(:dup) || {}
options[:except] = Array(options[:except]).dup
options[:except].concat [:encrypted_url_variables, :encrypted_url_variables_iv]
super
end
def interpolated_url(url = self.url, url_variables = self.url_variables)
return url unless url.include?('{')
vars = url_variables
url.gsub(VARIABLE_REFERENCE_RE) do |match|
vars.fetch(match.delete_prefix('{').delete_suffix('}'))
end
rescue KeyError => e
raise InterpolationError, "Invalid URL template. Missing key #{e.key}"
end
def masked_token
token.present? ? SECRET_MASK : nil
end
def validate_public_url?
true
end
private
def reset_token
self.token = nil if url_changed? && !encrypted_token_changed?
end
def reset_url_variables
return if url_variables_were.blank? || !interpolated_url_changed?
self.url_variables = {} if url_variables_were.keys.intersection(url_variables.keys).any?
self.url_variables = {} if url_changed? && url_variables_were.to_a.intersection(url_variables.to_a).any?
end
def reset_custom_headers
return if url.nil? # checking interpolated_url with a nil url causes errors
return unless interpolated_url_changed?
self.custom_headers = {}
rescue InterpolationError
# ignore -- record is invalid and won't be saved. no need to reset custom_headers
end
def interpolated_url_changed?
interpolated_url_was = interpolated_url(decrypt_url_was, url_variables_were)
interpolated_url_was != interpolated_url
end
def decrypt_url_was
self.class.decrypt_url(encrypted_url_was, iv: Base64.decode64(encrypted_url_iv_was))
end
def url_variables_were
self.class.decrypt_url_variables(encrypted_url_variables_was, iv: encrypted_url_variables_iv_was)
end
def initialize_url_variables
self.url_variables = {} if encrypted_url_variables.nil?
end
def initialize_custom_headers
self.custom_headers = {} if encrypted_custom_headers.nil?
end
def rate_limiter
@rate_limiter ||= Gitlab::WebHooks::RateLimiter.new(self)
end
def no_missing_url_variables
return if url.nil?
variable_names = url_variables.keys
used_variables = url.scan(VARIABLE_REFERENCE_RE).map(&:first)
missing = used_variables - variable_names
return if missing.empty?
errors.add(:url, "Invalid URL template. Missing keys: #{missing}")
end
def set_branch_filter_nil
self.push_events_branch_filter = nil
end
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class NoStiSystemHook < SystemHook # rubocop:disable Gitlab/BoundedContexts, Gitlab/NamespacedClass -- Copied from SystemHook
self.table_name = "system_hooks"
undef :web_hook_logs
attr_encrypted :token,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32,
encode: false,
encode_iv: false
attr_encrypted :url,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32,
encode: false,
encode_iv: false
def decrypt_url_was
self.class.decrypt_url(encrypted_url_was, iv: encrypted_url_iv_was)
end
end

View File

@ -28,6 +28,8 @@ class ProjectHook < WebHook
self.limit_scope = :project
has_many :web_hook_logs, foreign_key: 'web_hook_id', inverse_of: :web_hook
belongs_to :project
validates :project, presence: true

View File

@ -7,6 +7,8 @@ class ServiceHook < WebHook
self.allow_legacy_sti_class = true
has_many :web_hook_logs, foreign_key: 'web_hook_id', inverse_of: :web_hook
belongs_to :integration
validates :integration, presence: true

View File

@ -1,10 +1,16 @@
# frozen_string_literal: true
# This model is being migrated to the NoStiSystemHook model temporarily.
# Please ensure all changes here are reflected in the new model.
# More info here: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175729
class SystemHook < WebHook
extend ::Gitlab::Utils::Override
include TriggerableHooks
self.allow_legacy_sti_class = true
has_many :web_hook_logs, foreign_key: 'web_hook_id', inverse_of: :web_hook
triggerable_hooks [
:repository_update_hooks,
:push_hooks,
@ -16,7 +22,7 @@ class SystemHook < WebHook
attribute :repository_update_events, default: true
attribute :merge_requests_events, default: false
validates :url, system_hook_url: true
validates :url, system_hook_url: true, unless: ->(hook) { hook.url_variables? }
# Allow urls pointing localhost and the local network
def allow_local_requests?
@ -30,4 +36,9 @@ class SystemHook < WebHook
def help_path
'administration/system_hooks'
end
override :validate_public_url?
def validate_public_url?
false
end
end

View File

@ -1,199 +1,5 @@
# frozen_string_literal: true
class WebHook < ApplicationRecord
include Sortable
include WebHooks::AutoDisabling
InterpolationError = Class.new(StandardError)
SECRET_MASK = '************'
attr_encrypted :token,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32
attr_encrypted :url,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm',
key: Settings.attr_encrypted_db_key_base_32
attr_encrypted :url_variables,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
attr_encrypted :custom_headers,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_32,
algorithm: 'aes-256-gcm',
marshal: true,
marshaler: ::Gitlab::Json,
encode: false,
encode_iv: false
has_many :web_hook_logs
validates :url, presence: true
validates :url, public_url: true, unless: ->(hook) { hook.is_a?(SystemHook) || hook.url_variables? }
validates :token, format: { without: /\n/ }
after_initialize :initialize_url_variables
after_initialize :initialize_custom_headers
before_validation :reset_token
before_validation :reset_url_variables, unless: ->(hook) { hook.is_a?(ServiceHook) }, on: :update
before_validation :reset_custom_headers, unless: ->(hook) { hook.is_a?(ServiceHook) }, on: :update
before_validation :set_branch_filter_nil, if: :branch_filter_strategy_all_branches?
validates :push_events_branch_filter, untrusted_regexp: true, if: :branch_filter_strategy_regex?
validates :push_events_branch_filter, "web_hooks/wildcard_branch_filter": true, if: :branch_filter_strategy_wildcard?
validates :url_variables, json_schema: { filename: 'web_hooks_url_variables' }
validate :no_missing_url_variables
validates :interpolated_url, public_url: true, if: ->(hook) { hook.url_variables? && hook.errors.empty? }
validates :custom_headers, json_schema: { filename: 'web_hooks_custom_headers' }
validates :custom_webhook_template, length: { maximum: 4096 }
enum branch_filter_strategy: {
wildcard: 0,
regex: 1,
all_branches: 2
}, _prefix: true
# rubocop: disable CodeReuse/ServiceClass
def execute(data, hook_name, idempotency_key: nil, force: false)
# hook.executable? is checked in WebHookService#execute
WebHookService.new(self, data, hook_name, idempotency_key: idempotency_key, force: force).execute
end
# rubocop: enable CodeReuse/ServiceClass
# rubocop: disable CodeReuse/ServiceClass
def async_execute(data, hook_name, idempotency_key: nil)
WebHookService.new(self, data, hook_name, idempotency_key: idempotency_key).async_execute if executable?
end
# rubocop: enable CodeReuse/ServiceClass
# Allow urls pointing localhost and the local network
def allow_local_requests?
Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def help_path
'user/project/integrations/webhooks'
end
# @return [Boolean] Whether or not the WebHook is currently throttled.
def rate_limited?
rate_limiter.rate_limited?
end
# @return [Integer] The rate limit for the WebHook. `0` for no limit.
def rate_limit
rate_limiter.limit
end
# Returns the associated Project or Group for the WebHook if one exists.
# Overridden by inheriting classes.
def parent; end
# Custom attributes to be included in the worker context.
def application_context
{ related_class: type }
end
# Exclude binary columns by default - they have no sensible JSON encoding
def serializable_hash(options = nil)
options = options.try(:dup) || {}
options[:except] = Array(options[:except]).dup
options[:except].concat [:encrypted_url_variables, :encrypted_url_variables_iv]
super(options)
end
# See app/validators/json_schemas/web_hooks_url_variables.json
VARIABLE_REFERENCE_RE = /\{([A-Za-z]+[0-9]*(?:[._-][A-Za-z0-9]+)*)\}/
def interpolated_url(url = self.url, url_variables = self.url_variables)
return url unless url.include?('{')
vars = url_variables
url.gsub(VARIABLE_REFERENCE_RE) do
vars.fetch(_1.delete_prefix('{').delete_suffix('}'))
end
rescue KeyError => e
raise InterpolationError, "Invalid URL template. Missing key #{e.key}"
end
def masked_token
token.present? ? SECRET_MASK : nil
end
private
def reset_token
self.token = nil if url_changed? && !encrypted_token_changed?
end
def reset_url_variables
return if url_variables_were.blank? || !interpolated_url_changed?
self.url_variables = {} if url_variables_were.keys.intersection(url_variables.keys).any?
self.url_variables = {} if url_changed? && url_variables_were.to_a.intersection(url_variables.to_a).any?
end
def reset_custom_headers
return if url.nil? # checking interpolated_url with a nil url causes errors
return unless interpolated_url_changed?
self.custom_headers = {}
rescue InterpolationError
# ignore -- record is invalid and won't be saved. no need to reset custom_headers
end
def interpolated_url_changed?
interpolated_url_was = interpolated_url(decrypt_url_was, url_variables_were)
interpolated_url_was != interpolated_url
end
def decrypt_url_was
self.class.decrypt_url(encrypted_url_was, iv: Base64.decode64(encrypted_url_iv_was))
end
def url_variables_were
self.class.decrypt_url_variables(encrypted_url_variables_was, iv: encrypted_url_variables_iv_was)
end
def initialize_url_variables
self.url_variables = {} if encrypted_url_variables.nil?
end
def initialize_custom_headers
self.custom_headers = {} if encrypted_custom_headers.nil?
end
def rate_limiter
@rate_limiter ||= Gitlab::WebHooks::RateLimiter.new(self)
end
def no_missing_url_variables
return if url.nil?
variable_names = url_variables.keys
used_variables = url.scan(VARIABLE_REFERENCE_RE).map(&:first)
missing = used_variables - variable_names
return if missing.empty?
errors.add(:url, "Invalid URL template. Missing keys: #{missing}")
end
def set_branch_filter_nil
self.push_events_branch_filter = nil
end
include WebHooks::Hook
end

View File

@ -6,8 +6,6 @@ module Users
self.table_name = 'user_credit_card_validations'
ignore_columns %i[last_digits network holder_name expiration_date], remove_with: '16.9', remove_after: '2024-01-22'
attr_accessor :last_digits, :network, :holder_name, :expiration_date
belongs_to :user

View File

@ -16,7 +16,7 @@ module WebHooks
# treat this worker as idempotent. Currently this is set to
# the Job ID (jid) of the parent worker.
def perform(hook_id, log_data, response_category, _unique_by)
hook = WebHook.find_by_id(hook_id)
hook = ::WebHook.find_by_id(hook_id)
return unless hook # hook has been deleted before we could run.

View File

@ -66,7 +66,6 @@ domains:
description: Authorization layer
feature_categories:
- permissions
- system_access
Backup:
description: Backup and restore
@ -177,7 +176,7 @@ domains:
feature_categories:
- user_management
- groups_and_projects
- system_access
- permissions
MergeRequests:
description: Code collaboration and review including diffs, MR widgets and mergeability checks

View File

@ -18,7 +18,14 @@ raw_config = if File.exist?(Rails.root.join('config/session_store.yml'))
{}
end
session_cookie_token_prefix = raw_config.fetch(:session_cookie_token_prefix, "")
cell_id = Gitlab.config.cell.id
session_cookie_token_prefix = if raw_config.fetch(:session_cookie_token_prefix, '').present?
raw_config.fetch(:session_cookie_token_prefix)
elsif cell_id.present?
"cell-#{cell_id}"
else
""
end
cookie_key = if Rails.env.development?
cookie_key_prefix = raw_config.fetch(:cookie_key, "_gitlab_session")

View File

@ -5,4 +5,4 @@ feature_category: continuous_integration
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/169208
milestone: '17.6'
queued_migration_version: 20241014081026
finalized_by: # version of the migration that finalized this BBM
finalized_by: 20250113060954

View File

@ -9,14 +9,6 @@ description: Information about Designs, image files under management by the Desi
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/9801
milestone: '11.10'
gitlab_schema: gitlab_main_cell
desired_sharding_key:
namespace_id:
references: namespaces
backfill_via:
parent:
foreign_key: project_id
table: projects
sharding_key: namespace_id
belongs_to: project
desired_sharding_key_migration_job_name: BackfillDesignManagementDesignsNamespaceId
table_size: small
sharding_key:
namespace_id: namespaces

View File

@ -17,5 +17,4 @@ desired_sharding_key:
table: design_management_designs
sharding_key: namespace_id
belongs_to: design
awaiting_backfill_on_parent: true
table_size: small

View File

@ -17,5 +17,4 @@ desired_sharding_key:
table: design_management_designs
sharding_key: namespace_id
belongs_to: design
awaiting_backfill_on_parent: true
table_size: small

10
db/docs/system_hooks.yml Normal file
View File

@ -0,0 +1,10 @@
---
table_name: system_hooks
classes:
- NoStiSystemHook
feature_categories:
- webhooks
description: Webhooks data for system hooks. NoStiSystemHook is a temporary model whilst migrating system hooks from web_hooks to system_hooks.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/175729
milestone: '17.9'
gitlab_schema: gitlab_main_clusterwide

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
class CreateSystemHooks < Gitlab::Database::Migration[2.2]
milestone '17.9'
def change
create_table :system_hooks do |t|
t.timestamps null: true # rubocop:disable Migration/Timestamps -- Needs to match web_hooks table
t.datetime_with_timezone :disabled_until
t.integer :recent_failures, limit: 2, default: 0, null: false
t.integer :backoff_count, limit: 2, default: 0, null: false
t.integer :branch_filter_strategy, limit: 2, default: 0, null: false
t.boolean :push_events, default: true, null: false
t.boolean :merge_requests_events, default: false, null: false
t.boolean :tag_push_events, default: false
t.boolean :enable_ssl_verification, default: true
t.boolean :repository_update_events, default: false, null: false
t.text :push_events_branch_filter, limit: 5000
t.text :name, limit: 255
t.text :description, limit: 2048
t.text :custom_webhook_template, limit: 4096
t.binary :encrypted_token
t.binary :encrypted_token_iv
t.binary :encrypted_url
t.binary :encrypted_url_iv
t.binary :encrypted_url_variables
t.binary :encrypted_url_variables_iv
t.binary :encrypted_custom_headers
t.binary :encrypted_custom_headers_iv
end
end
end

View File

@ -0,0 +1,14 @@
# frozen_string_literal: true
class AddDesignManagementDesignsNamespaceIdNotNullConstraint < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '17.9'
def up
add_not_null_constraint :design_management_designs, :namespace_id
end
def down
remove_not_null_constraint :design_management_designs, :namespace_id
end
end

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
class EnsureBackfillForPipelineMessages < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_ci
MIGRATION = "BackfillCiPipelineMessagesProjectId"
TABLE = :ci_pipeline_messages
PRIMARY_KEY = :id
ARGUMENTS = %i[
project_id
p_ci_pipelines
project_id
pipeline_id
partition_id
]
def up
ensure_batched_background_migration_is_finished(
job_class_name: MIGRATION,
table_name: TABLE,
column_name: PRIMARY_KEY,
job_arguments: ARGUMENTS,
finalize: true
)
end
def down
# no-op
end
end

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class SyncIndexesForCiPipelineMessagesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
TABLE_NAME = :ci_pipeline_messages
INDEX_NAME = :index_ci_pipeline_messages_on_project_id
def up
add_concurrent_index(TABLE_NAME, :project_id, name: INDEX_NAME)
end
def down
remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME)
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddPrepareNotNullConstraintForPipelineMessagesProjectId < Gitlab::Database::Migration[2.2]
milestone '17.9'
disable_ddl_transaction!
TABLE = :ci_pipeline_messages
COLUMN = :project_id
CONSTRAINT = :check_fe8ee122a2
def up
add_not_null_constraint(TABLE, COLUMN, constraint_name: CONSTRAINT, validate: false)
prepare_async_check_constraint_validation(TABLE, name: CONSTRAINT)
end
def down
unprepare_async_check_constraint_validation(TABLE, name: CONSTRAINT)
drop_constraint(TABLE, CONSTRAINT)
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
class ValidateProjectsOrganizationIdNotNullConstraint < Gitlab::Database::Migration[2.2]
milestone '17.9'
def up
validate_not_null_constraint(:projects, :organization_id)
end
def down
# no-op
end
end

View File

@ -0,0 +1 @@
99d9003c96c9706ba3b96fcd758418684af607775eeecc79a34810a211940425

View File

@ -0,0 +1 @@
2705a1ff207a6d3d19030445e3ddb015188ff9129d54b6d41424998e2a02cd87

View File

@ -0,0 +1 @@
cb9dce644dab1213ece0db2d5bd013208f25016f9110c7306b3e836d218f82fa

View File

@ -0,0 +1 @@
2956d1d9d67f7dca3c1a8f24528876e356122e3b142240f2cb077dc2b807a516

View File

@ -0,0 +1 @@
c2b012d1fda5f54ab3392c717eb4d780875ebfbc98c0ed9334efc9d0434530f2

View File

@ -0,0 +1 @@
52007ce16202a4d5dc18538fbe9e3ffa0b5edbf2cdb338c759b50bfba69a1552

View File

@ -296,7 +296,8 @@ CREATE TABLE projects (
suggestion_commit_message character varying(255),
project_namespace_id bigint,
hidden boolean DEFAULT false NOT NULL,
organization_id bigint
organization_id bigint,
CONSTRAINT check_1a6f946a8a CHECK ((organization_id IS NOT NULL))
);
CREATE FUNCTION find_projects_by_id(projects_id bigint) RETURNS projects
@ -12312,7 +12313,8 @@ CREATE TABLE design_management_designs (
namespace_id bigint,
CONSTRAINT check_07155e2715 CHECK ((char_length((filename)::text) <= 255)),
CONSTRAINT check_aaf9fa6ae5 CHECK ((char_length(description) <= 1000000)),
CONSTRAINT check_cfb92df01a CHECK ((iid IS NOT NULL))
CONSTRAINT check_cfb92df01a CHECK ((iid IS NOT NULL)),
CONSTRAINT check_ed4c70e3f1 CHECK ((namespace_id IS NOT NULL))
);
CREATE SEQUENCE design_management_designs_id_seq
@ -21388,6 +21390,46 @@ CREATE SEQUENCE system_access_microsoft_graph_access_tokens_id_seq
ALTER SEQUENCE system_access_microsoft_graph_access_tokens_id_seq OWNED BY system_access_microsoft_graph_access_tokens.id;
CREATE TABLE system_hooks (
id bigint NOT NULL,
created_at timestamp(6) without time zone,
updated_at timestamp(6) without time zone,
disabled_until timestamp with time zone,
recent_failures smallint DEFAULT 0 NOT NULL,
backoff_count smallint DEFAULT 0 NOT NULL,
branch_filter_strategy smallint DEFAULT 0 NOT NULL,
push_events boolean DEFAULT true NOT NULL,
merge_requests_events boolean DEFAULT false NOT NULL,
tag_push_events boolean DEFAULT false,
enable_ssl_verification boolean DEFAULT true,
repository_update_events boolean DEFAULT false NOT NULL,
push_events_branch_filter text,
name text,
description text,
custom_webhook_template text,
encrypted_token bytea,
encrypted_token_iv bytea,
encrypted_url bytea,
encrypted_url_iv bytea,
encrypted_url_variables bytea,
encrypted_url_variables_iv bytea,
encrypted_custom_headers bytea,
encrypted_custom_headers_iv bytea,
CONSTRAINT check_32d89afab7 CHECK ((char_length(push_events_branch_filter) <= 5000)),
CONSTRAINT check_6439bc2682 CHECK ((char_length(name) <= 255)),
CONSTRAINT check_6e64a69bc5 CHECK ((char_length(custom_webhook_template) <= 4096)),
CONSTRAINT check_f6fffb36bd CHECK ((char_length(description) <= 2048))
);
CREATE SEQUENCE system_hooks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE system_hooks_id_seq OWNED BY system_hooks.id;
CREATE TABLE system_note_metadata (
commit_count integer,
action character varying,
@ -25171,6 +25213,8 @@ ALTER TABLE ONLY system_access_microsoft_applications ALTER COLUMN id SET DEFAUL
ALTER TABLE ONLY system_access_microsoft_graph_access_tokens ALTER COLUMN id SET DEFAULT nextval('system_access_microsoft_graph_access_tokens_id_seq'::regclass);
ALTER TABLE ONLY system_hooks ALTER COLUMN id SET DEFAULT nextval('system_hooks_id_seq'::regclass);
ALTER TABLE ONLY system_note_metadata ALTER COLUMN id SET DEFAULT nextval('system_note_metadata_id_seq'::regclass);
ALTER TABLE ONLY taggings ALTER COLUMN id SET DEFAULT nextval('taggings_id_seq'::regclass);
@ -26271,9 +26315,6 @@ ALTER TABLE ONLY chat_names
ALTER TABLE ONLY chat_teams
ADD CONSTRAINT chat_teams_pkey PRIMARY KEY (id);
ALTER TABLE projects
ADD CONSTRAINT check_1a6f946a8a CHECK ((organization_id IS NOT NULL)) NOT VALID;
ALTER TABLE workspaces
ADD CONSTRAINT check_2a89035b04 CHECK ((personal_access_token_id IS NOT NULL)) NOT VALID;
@ -26307,6 +26348,9 @@ ALTER TABLE events
ALTER TABLE projects
ADD CONSTRAINT check_fa75869cb1 CHECK ((project_namespace_id IS NOT NULL)) NOT VALID;
ALTER TABLE ci_pipeline_messages
ADD CONSTRAINT check_fe8ee122a2 CHECK ((project_id IS NOT NULL)) NOT VALID;
ALTER TABLE ONLY ci_build_needs
ADD CONSTRAINT ci_build_needs_pkey PRIMARY KEY (id);
@ -28035,6 +28079,9 @@ ALTER TABLE ONLY system_access_microsoft_applications
ALTER TABLE ONLY system_access_microsoft_graph_access_tokens
ADD CONSTRAINT system_access_microsoft_graph_access_tokens_pkey PRIMARY KEY (id);
ALTER TABLE ONLY system_hooks
ADD CONSTRAINT system_hooks_pkey PRIMARY KEY (id);
ALTER TABLE ONLY system_note_metadata
ADD CONSTRAINT system_note_metadata_pkey PRIMARY KEY (id);
@ -30976,6 +31023,8 @@ CREATE INDEX index_ci_pipeline_chat_data_on_project_id ON ci_pipeline_chat_data
CREATE INDEX index_ci_pipeline_messages_on_pipeline_id ON ci_pipeline_messages USING btree (pipeline_id);
CREATE INDEX index_ci_pipeline_messages_on_project_id ON ci_pipeline_messages USING btree (project_id);
CREATE INDEX index_ci_pipeline_metadata_on_project_id ON ci_pipeline_metadata USING btree (project_id);
CREATE INDEX index_ci_pipeline_schedule_variables_on_project_id ON ci_pipeline_schedule_variables USING btree (project_id);

View File

@ -56,34 +56,50 @@ or **Accept Next Line Of Inline Suggestion**:
## Switch GitLab accounts in VS Code
If you use multiple GitLab accounts (such as personal and work), the extension uses your `git remote` URL
to determine which account to use. In some cases, the extension can't determine which account to use, and
must ask you to select which project and account to use. This can happen:
The GitLab Workflow extension uses one account for each [VS Code Workspace](https://code.visualstudio.com/docs/editor/workspaces) (window). The extension automatically selects the account when:
- If you have a single remote URL `git@gitlab.com:gitlab-org/gitlab-vscode-extension.git`, but two accounts for
`gitlab.com` (like `@sidney` and `@sidney_work`).
- If you have a single GitLab account (for example `@sidney`), but you have multiple remotes, like:
- `origin`: `git@gitlab.com:gitlab-org/gitlab-vscode-extension.git`
- `personal-fork`: `git@gitlab.com:myusername/gitlab-vscode-extension.git`
- You have added only one GitLab account to the extension.
- All workspaces in your VS Code window use the same GitLab account, based on the `git remote` configuration.
In other cases, you must select a GitLab account for the active VS Code window.
To change the account selection:
1. Open the Command Palette:
- For macOS, press <kbd>Command</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>.
- For Windows or Linux, press <kbd>Ctrl</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>.
1. Run the command `GitLab: Select Account for this Workspace`.
1. Select your desired account from the list.
You can also change accounts by selecting the GitLab account status bar item.
## Select your GitLab project
When your Git repository can be associated with multiple GitLab projects, the extension cannot determine which account to use. This can happen when you have multiple remotes, for example:
- `origin`: `git@gitlab.com:gitlab-org/gitlab-vscode-extension.git`
- `personal-fork`: `git@gitlab.com:myusername/gitlab-vscode-extension.git`
In these cases, the extension adds a **(multiple projects)** label to show you must choose an account.
To select an account:
1. On the vertical menu bar, select **GitLab Workflow** (**{tanuki}**) to display the extension sidebar.
1. Expand **Issues and Merge Requests**.
1. Select the line containing **(multiple projects)** to expand the list of accounts.
1. Select the option you want to use:
1. Select your desired project:
![select project-account combination](../img/select-project-account_v17_7.png)
The **Issues and Merge requests** list updates with your information.
The **Issues and Merge requests** list updates with your selected project's information.
### Change your selection
To change your account selection for a project:
To change your project selection:
1. On the vertical menu bar, select **GitLab Workflow** (**{tanuki}**) to display the extension sidebar.
1. Expand **Issues and Merge Requests** to show the project list.
1. Right-click the project's name, and select **Clear selected project**.
1. Right-click the project's name.
1. Select **Clear selected project**.
## Use slash commands

View File

@ -29,16 +29,20 @@ module CsvBuilder
# * +header_to_value_hash+ - A hash of 'Column Heading' => 'value_method'.
# * +associations_to_preload+ - An array of records to preload with a batch of records.
# * +replace_newlines+ - default: false - If true, replaces newline characters with a literal "\n"
# * +order_hint+ - default: :created_at - The column used to order the rows
#
# The value method will be called once for each object in the collection, to
# determine the value for that row. It can either be the name of a method on
# the object, or a lamda to call passing in the object.
def self.new(collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false)
def self.new(
collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false,
order_hint: :created_at)
CsvBuilder::Builder.new(
collection,
header_to_value_hash,
associations_to_preload,
replace_newlines: replace_newlines
replace_newlines: replace_newlines,
order_hint: order_hint
)
end
end

View File

@ -6,13 +6,16 @@ module CsvBuilder
attr_reader :rows_written
def initialize(collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false)
def initialize(
collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false,
order_hint: :created_at)
@header_to_value_hash = header_to_value_hash
@collection = collection
@truncated = false
@rows_written = 0
@associations_to_preload = associations_to_preload
@replace_newlines = replace_newlines
@order_hint = order_hint
end
# Renders the csv to a string
@ -57,7 +60,7 @@ module CsvBuilder
def each(&block)
if @associations_to_preload&.any? && @collection.respond_to?(:each_batch)
@collection.each_batch(order_hint: :created_at) do |relation|
@collection.each_batch(order_hint: @order_hint) do |relation|
relation.preload(@associations_to_preload).order(:id).each(&block)
end
elsif @collection.respond_to?(:find_each)

View File

@ -36884,13 +36884,10 @@ msgstr[1] ""
msgid "NamespaceStorageSize|%{namespace_name} is now read-only. Your ability to write new data to this namespace is restricted. %{read_only_link_start}Which actions are restricted?%{link_end}"
msgstr ""
msgid "NamespaceStorageSize|For more information about storage limits, see our %{faq_link_start}FAQ%{link_end}."
msgid "NamespaceStorageSize|If %{namespace_name} exceeds the %{storage_docs_link_start}storage quota%{link_end}, your ability to write new data to this namespace will be restricted. %{read_only_link_start}Which actions become restricted?%{link_end}"
msgstr ""
msgid "NamespaceStorageSize|If %{namespace_name} exceeds the storage quota, your ability to write new data to this namespace will be restricted. %{read_only_link_start}Which actions become restricted?%{link_end}"
msgstr ""
msgid "NamespaceStorageSize|If a project reaches 100%% of the storage quota (%{free_size_limit}) the project will be in a read-only state, and you won't be able to push to your repository or add large files."
msgid "NamespaceStorageSize|If a project reaches 100%% of the %{storage_docs_link_start}storage quota%{link_end} (%{free_size_limit}) the project will be in a read-only state, and you won't be able to push to your repository or add large files."
msgstr ""
msgid "NamespaceStorageSize|To manage your usage and prevent your projects from being placed in a read-only state, you should immediately %{manage_storage_link_start}reduce storage%{link_end}, or %{support_link_start}contact support%{link_end} to help you manage your usage."
@ -36920,7 +36917,7 @@ msgstr ""
msgid "NamespaceStorageSize|We've noticed an unusually high storage usage on %{namespace_name}"
msgstr ""
msgid "NamespaceStorageSize|You have consumed all available storage and you can't push or add large files to projects over the free tier limit (%{free_size_limit})."
msgid "NamespaceStorageSize|You have consumed all available %{storage_docs_link_start}storage%{link_end} and you can't push or add large files to projects over the free tier limit (%{free_size_limit})."
msgstr ""
msgid "NamespaceStorageSize|You have reached the free storage limit of %{free_size_limit} for %{namespace_name}"
@ -52397,6 +52394,9 @@ msgstr ""
msgid "SecurityReports|New feature: Grouping"
msgstr ""
msgid "SecurityReports|New status must be different than current status."
msgstr ""
msgid "SecurityReports|No identifiers found."
msgstr ""
@ -62995,6 +62995,9 @@ msgstr ""
msgid "VulnerabilityManagement|Dismiss as..."
msgstr ""
msgid "VulnerabilityManagement|Dismissed: %{dismissalReason}"
msgstr ""
msgid "VulnerabilityManagement|Enter a name"
msgstr ""

View File

@ -50,7 +50,6 @@ module QA
#
# @return [Boolean]
def quarantine_changes?
return false if mr_diff.empty?
return false if mr_diff.any? { |change| change[:new_file] || change[:deleted_file] }
files_count = 0

View File

@ -8,7 +8,7 @@ namespace :ci do
include Task::Helpers::Util
include QA::Tools::Ci::Helpers
logger.info("*** Analyzing merge request changes*** ")
logger.info("*** Analyzing which E2E tests to execute based on MR changes or Scheduled pipeline ***")
pipeline_path = args[:pipeline_path] || "tmp"
run_all_label_present = mr_labels.include?("pipeline:run-all-e2e")
@ -30,32 +30,37 @@ namespace :ci do
end
diff = mr_diff
qa_changes = QA::Tools::Ci::QaChanges.new(diff)
if qa_changes.quarantine_changes?
logger.info("Merge request contains only quarantine changes, e2e test execution will be skipped!")
next pipeline_creator.create_noop
end
if qa_changes.only_spec_removal?
logger.info("Merge request contains only e2e spec removal, e2e test execution will be skipped!")
next pipeline_creator.create_noop
end
feature_flags_changes = QA::Tools::Ci::FfChanges.new(diff).fetch
# on run-all label or framework changes do not infer specific tests
run_all_tests = run_all_label_present || qa_changes.framework_changes? ||
!feature_flags_changes.nil?
tests = run_all_tests ? [] : qa_changes.qa_tests
if run_all_label_present
logger.info("Merge request has pipeline:run-all-e2e label, full test suite will be executed")
elsif qa_changes.framework_changes? # run all tests when framework changes detected
logger.info("Merge request contains qa framework changes, full test suite will be executed")
elsif tests.any?
logger.info("Following specs were selected for execution: '#{tests}'")
if diff.empty?
logger.info("No specific specs to execute detected, running full test suites")
else
logger.info("No specific specs to execute detected, full test suite will be executed")
qa_changes = QA::Tools::Ci::QaChanges.new(diff)
if qa_changes.quarantine_changes?
logger.info("Merge request contains only quarantine changes, e2e test execution will be skipped!")
next pipeline_creator.create_noop
end
if qa_changes.only_spec_removal?
logger.info("Merge request contains only e2e spec removal, e2e test execution will be skipped!")
next pipeline_creator.create_noop
end
feature_flags_changes = QA::Tools::Ci::FfChanges.new(diff).fetch
# on run-all label or framework changes do not infer specific tests
run_all_tests = run_all_label_present || qa_changes.framework_changes? ||
!feature_flags_changes.nil?
tests = run_all_tests ? [] : qa_changes.qa_tests
if run_all_label_present
logger.info("Merge request has pipeline:run-all-e2e label, full test suite will be executed")
elsif qa_changes.framework_changes? # run all tests when framework changes detected
logger.info("Merge request contains qa framework changes, full test suite will be executed")
elsif tests.any?
logger.info("Following specs were selected for execution: '#{tests}'")
else
logger.info("No specific specs to execute detected, full test suite will be executed")
end
end
creator_args = {

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
FactoryBot.define do
factory :no_sti_system_hook do
url { generate(:url) }
name { generate(:name) }
description { "Description of #{name}" }
trait :token do
token { generate(:token) }
end
trait :url_variables do
url_variables { { 'abc' => 'supers3cret', 'def' => 'foobar' } }
end
end
end

View File

@ -4,5 +4,13 @@ FactoryBot.define do
factory :service_hook do
url { generate(:url) }
integration
trait :url_variables do
url_variables { { 'abc' => 'supers3cret', 'def' => 'foobar' } }
end
trait :token do
token { generate(:token) }
end
end
end

View File

@ -5,5 +5,13 @@ FactoryBot.define do
url { generate(:url) }
name { generate(:name) }
description { "Description of #{name}" }
trait :url_variables do
url_variables { { 'abc' => 'supers3cret', 'def' => 'foobar' } }
end
trait :token do
token { generate(:token) }
end
end
end

View File

@ -234,6 +234,61 @@ describe('Agents', () => {
});
});
describe('sharedAgentsList computed property', () => {
const ciAccessAgent = sharedAgentsResponse.data.project.ciAccessAuthorizedAgents.nodes[0];
const userAccessAgent = sharedAgentsResponse.data.project.userAccessAuthorizedAgents.nodes[0];
const createSharedAgentsResponse = (ciAgents, userAgents) => ({
data: {
project: {
id: projectId,
ciAccessAuthorizedAgents: { nodes: ciAgents },
userAccessAuthorizedAgents: { nodes: userAgents },
},
},
});
it('filters out agents from the same project', async () => {
const sameProjectAgent = {
agent: {
...userAccessAgent.agent,
project: { id: projectId, fullPath: provideData.projectPath },
},
};
const updatedResponse = createSharedAgentsResponse([ciAccessAgent], [sameProjectAgent]);
createWrapper({
sharedAgentsQueryResponse: jest.fn().mockResolvedValue(updatedResponse),
});
await waitForPromises();
expect(findTab()).toHaveLength(2);
expect(findTab().at(1).attributes('title')).toBe('Shared agents');
expect(findTab().at(1).findComponent(AgentTable).props('agents')).toHaveLength(1);
});
it('filters out agents duplicates', async () => {
const updatedResponse = createSharedAgentsResponse(
[ciAccessAgent],
[ciAccessAgent, userAccessAgent],
);
createWrapper({
sharedAgentsQueryResponse: jest.fn().mockResolvedValue(updatedResponse),
});
await waitForPromises();
expect(findTab()).toHaveLength(2);
expect(findTab().at(1).attributes('title')).toBe('Shared agents');
expect(findTab().at(1).findComponent(AgentTable).props('agents')).toHaveLength(2);
});
});
describe('agent list update', () => {
const initialResponse = { ...clusterAgentsResponse };
const newAgent = {

View File

@ -318,14 +318,22 @@ const ciAccessAuthorizedAgentsNodes = [
userAccessAuthorizations: null,
connections: null,
tokens: null,
project: agentProject,
project: { id: '2', fullPath: 'path/to/another/project' },
},
},
];
const userAccessAuthorizedAgentsNodes = [
{
agent: {
...agents[0],
__typename: 'ClusterAgent',
id: '4',
name: 'user-access-agent-1',
webPath: 'shared-project/agent-1',
createdAt: timestamp,
userAccessAuthorizations: null,
connections: null,
tokens: null,
project: { id: '2', fullPath: 'path/to/another/project' },
},
},
];
@ -346,12 +354,12 @@ export const sharedAgentsResponse = {
data: {
project: {
id: 'gid://gitlab/Project/1',
},
ciAccessAuthorizedAgents: {
nodes: ciAccessAuthorizedAgentsNodes,
},
userAccessAuthorizedAgents: {
nodes: userAccessAuthorizedAgentsNodes,
ciAccessAuthorizedAgents: {
nodes: ciAccessAuthorizedAgentsNodes,
},
userAccessAuthorizedAgents: {
nodes: userAccessAuthorizedAgentsNodes,
},
},
},
};

View File

@ -0,0 +1,66 @@
import { DiffFile } from '~/rapid_diffs/diff_file';
import { ToggleFileAdapter } from '~/rapid_diffs/toggle_file/adapter';
describe('Diff File Toggle Behavior', () => {
// In our version of Jest/JSDOM we cannot use
//
// - CSS "&" nesting (baseline 2023)
// - Element.checkVisibility (baseline 2024)
// - :has (baseline 2023)
//
// so this cannot test CSS (which is a majority of our behavior), and must assume that
// browser CSS is working as documented when we tweak HTML attributes
const html = `
<diff-file data-viewer="any">
<div class="rd-diff-file">
<div class="rd-diff-file-header" data-testid="rd-diff-file-header">
<div class="rd-diff-file-toggle gl-mr-2"><
<button data-opened="" data-click="toggleFile" aria-label="Hide file contents" type="button"></button>
<button data-closed="" data-click="toggleFile" aria-label="Show file contents" type="button"></button>
</div>
</div>
<div data-file-body=""><!-- body content --></div>
<diff-file-mounted></diff-file-mounted>
</diff-file>
`;
function get(element) {
const elements = {
file: () => document.querySelector('diff-file'),
hide: () => get('file').querySelector('button[data-opened]'),
show: () => get('file').querySelector('button[data-closed]'),
body: () => get('file').querySelector('[data-file-body]'),
};
return elements[element]?.();
}
function assignAdapter(customAdapter) {
get('file').adapterConfig = { any: [customAdapter] };
}
beforeAll(() => {
customElements.define('diff-file', DiffFile);
});
beforeEach(() => {
document.body.innerHTML = html;
assignAdapter(ToggleFileAdapter);
get('file').mount();
});
it('starts with the file body visible', () => {
expect(get('body').hidden).toEqual(false);
});
it('marks the body hidden and focuses the other button when the hide button is clicked', () => {
const show = get('show');
const hide = get('hide');
const body = get('body');
hide.click();
expect(body.hidden).toEqual(true);
expect(document.activeElement).toEqual(show);
});
});

View File

@ -7,6 +7,7 @@ import {
WIDGET_TYPE_ASSIGNEES,
WIDGET_TYPE_LABELS,
WIDGET_TYPE_DESCRIPTION,
WIDGET_TYPE_HIERARCHY,
} from '~/work_items/constants';
import { createWorkItemQueryResponse } from '../mock_data';
@ -143,6 +144,52 @@ describe('work items graphql resolvers', () => {
});
});
describe('with parent input', () => {
it('updates parent if set', async () => {
await mutate({
parent: {
confidential: false,
id: 'gid://gitlab/WorkItem/1259',
iid: '56',
title: 'PARENT',
webUrl: 'http://127.0.0.1:3000/groups/flightjs/-/epics/56',
workItemType: {
id: 'gid://gitlab/WorkItems::Type/8',
name: 'Epic',
iconName: 'issue-type-epic',
__typename: 'WorkItemType',
},
__typename: 'WorkItem',
},
});
const queryResult = await query(WIDGET_TYPE_HIERARCHY);
expect(queryResult).toMatchObject({
parent: {
confidential: false,
id: 'gid://gitlab/WorkItem/1259',
iid: '56',
title: 'PARENT',
webUrl: 'http://127.0.0.1:3000/groups/flightjs/-/epics/56',
workItemType: {
id: 'gid://gitlab/WorkItems::Type/8',
name: 'Epic',
iconName: 'issue-type-epic',
__typename: 'WorkItemType',
},
__typename: 'WorkItem',
},
});
});
it('updates parent if cleared', async () => {
await mutate({ parent: null });
const queryResult = await query(WIDGET_TYPE_HIERARCHY);
expect(queryResult).toMatchObject({ parent: null });
});
});
it('updates the local storage with every mutation', async () => {
const AUTO_SAVE_KEY = `autosave/new-fullPath-issue-draft`;

View File

@ -14,18 +14,46 @@ RSpec.describe 'Session initializer for GitLab' do
end
describe 'config#session_store' do
it 'initialized as a redis_store with a proper servers configuration' do
expect(subject).to receive(:session_store).with(
Gitlab::Sessions::RedisStore,
a_hash_including(
redis_server: Gitlab::Redis::Sessions.params.merge(
namespace: Gitlab::Redis::Sessions::SESSION_NAMESPACE,
serializer: Gitlab::Sessions::RedisStoreSerializer
context 'when cell.id is configured' do
before do
stub_config(cell: { id: 1 })
end
it 'initialized as a `redis_store` with session cookies prefix that includes cell id' do
expect(subject).to receive(:session_store).with(
Gitlab::Sessions::RedisStore,
a_hash_including(
redis_server: Gitlab::Redis::Sessions.params.merge(
namespace: Gitlab::Redis::Sessions::SESSION_NAMESPACE,
serializer: Gitlab::Sessions::RedisStoreSerializer
),
session_cookie_token_prefix: 'cell-1'
)
)
)
load_session_store
load_session_store
end
end
context 'when cell.id is not configured' do
before do
stub_config(cell: { id: nil })
end
it 'initialized as a `redis_store` with empty session cookie prefix' do
expect(subject).to receive(:session_store).with(
Gitlab::Sessions::RedisStore,
a_hash_including(
redis_server: Gitlab::Redis::Sessions.params.merge(
namespace: Gitlab::Redis::Sessions::SESSION_NAMESPACE,
serializer: Gitlab::Sessions::RedisStoreSerializer
),
session_cookie_token_prefix: ''
)
)
load_session_store
end
end
end
end

View File

@ -1,89 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillPartitionIdCiPipelineMessage,
:suppress_partitioning_routing_analyzer,
feature_category: :continuous_integration do
let(:ci_pipelines_table) { table(:ci_pipelines, primary_key: :id, database: :ci) }
let(:ci_pipeline_messages_table) { table(:ci_pipeline_messages, database: :ci) }
let!(:pipeline_1) { ci_pipelines_table.create!(id: 1, partition_id: 100, project_id: 1) }
let!(:pipeline_2) { ci_pipelines_table.create!(id: 2, partition_id: 101, project_id: 1) }
let!(:pipeline_3) { ci_pipelines_table.create!(id: 3, partition_id: 100, project_id: 1) }
let!(:ci_pipeline_messages_100) do
ci_pipeline_messages_table.create!(
content: 'content',
pipeline_id: pipeline_1.id,
partition_id: pipeline_1.partition_id
)
end
let!(:ci_pipeline_messages_101) do
ci_pipeline_messages_table.create!(
content: 'content',
pipeline_id: pipeline_2.id,
partition_id: pipeline_2.partition_id
)
end
let!(:invalid_ci_pipeline_messages) do
ci_pipeline_messages_table.create!(
content: 'content',
pipeline_id: pipeline_3.id,
partition_id: pipeline_3.partition_id
)
end
let(:migration_attrs) do
{
start_id: ci_pipeline_messages_table.minimum(:id),
end_id: ci_pipeline_messages_table.maximum(:id),
batch_table: :ci_pipeline_messages,
batch_column: :id,
sub_batch_size: 1,
pause_ms: 0,
connection: connection
}
end
let!(:migration) { described_class.new(**migration_attrs) }
let(:connection) { Ci::ApplicationRecord.connection }
around do |example|
connection.transaction do
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines DISABLE TRIGGER ALL;
SQL
example.run
connection.execute(<<~SQL)
ALTER TABLE ci_pipelines ENABLE TRIGGER ALL;
SQL
end
end
describe '#perform' do
context 'when there are no invalid records' do
it 'does not execute the migration' do
expect { migration.perform }
.not_to change { invalid_ci_pipeline_messages.reload.partition_id }
end
end
context 'when second partition exists' do
before do
pipeline_3.update!(partition_id: 101)
end
it 'fixes invalid records in the wrong the partition' do
expect { migration.perform }
.to not_change { ci_pipeline_messages_100.reload.partition_id }
.and not_change { ci_pipeline_messages_101.reload.partition_id }
.and change { invalid_ci_pipeline_messages.reload.partition_id }
.from(100)
.to(101)
end
end
end
end

View File

@ -192,7 +192,6 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
work_in_progress = {
"namespaces" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476209',
"organization_users" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476210',
"projects" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476211',
"push_rules" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476212',
"snippets" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/476216',
"topics" => 'https://gitlab.com/gitlab-org/gitlab/-/issues/463254',

View File

@ -0,0 +1,92 @@
# frozen_string_literal: true
require "spec_helper"
RSpec.describe NoStiSystemHook, feature_category: :webhooks do
it_behaves_like 'a webhook', factory: :no_sti_system_hook, auto_disabling: false
it_behaves_like 'a hook that does not get automatically disabled on failure' do
let(:hook) { build(:no_sti_system_hook) }
let(:hook_factory) { :no_sti_system_hook }
let(:default_factory_arguments) { {} }
def find_hooks
described_class.all
end
end
describe 'default attributes' do
let(:no_sti_system_hook) { described_class.new }
it 'sets defined default parameters' do
attrs = {
push_events: false,
repository_update_events: true,
merge_requests_events: false
}
expect(no_sti_system_hook).to have_attributes(attrs)
end
end
describe 'associations' do
it { is_expected.not_to respond_to(:web_hook_logs) }
end
describe 'validations' do
describe 'url' do
let(:url) { 'http://localhost:9000' }
it { is_expected.not_to allow_value(url).for(:url) }
it 'is valid if application settings allow local requests from system hooks' do
settings = ApplicationSetting.new(allow_local_requests_from_system_hooks: true)
allow(ApplicationSetting).to receive(:current).and_return(settings)
is_expected.to allow_value(url).for(:url)
end
end
end
describe '.repository_update_hooks' do
it 'returns hooks for repository update events only' do
hook = create(:no_sti_system_hook, repository_update_events: true)
create(:no_sti_system_hook, repository_update_events: false)
expect(described_class.repository_update_hooks).to eq([hook])
end
end
describe 'execute WebHookService' do
let(:hook) { build(:no_sti_system_hook) }
let(:data) { { key: 'value' } }
let(:hook_name) { 'no_sti_system_hook' }
let(:web_hook_service) { instance_double(WebHookService, execute: true) }
it '#execute' do
expect(WebHookService).to receive(:new).with(hook, data, hook_name, idempotency_key: anything, force: false)
.and_return(web_hook_service)
expect(web_hook_service).to receive(:execute)
hook.execute(data, hook_name)
end
it '#async_execute' do
expect(WebHookService).to receive(:new).with(hook, data, hook_name, idempotency_key: anything)
.and_return(web_hook_service)
expect(web_hook_service).to receive(:async_execute)
hook.async_execute(data, hook_name)
end
end
describe '#application_context' do
let(:hook) { build(:no_sti_system_hook) }
it 'includes the type' do
expect(hook.application_context).to eq(
related_class: 'NoStiSystemHook'
)
end
end
end

View File

@ -17,6 +17,16 @@ RSpec.describe ProjectHook, feature_category: :webhooks do
describe 'associations' do
it { is_expected.to belong_to :project }
it { is_expected.to have_many(:web_hook_logs) }
end
describe '#destroy' do
it 'does not cascade to web_hook_logs' do
web_hook = create(:project_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
expect { web_hook.destroy! }.not_to change { web_hook.web_hook_logs.count }
end
end
describe 'validations' do

View File

@ -16,6 +16,16 @@ RSpec.describe ServiceHook, feature_category: :webhooks do
describe 'associations' do
it { is_expected.to belong_to(:integration) }
it { is_expected.to have_many(:web_hook_logs) }
end
describe '#destroy' do
it 'does not cascade to web_hook_logs' do
web_hook = create(:service_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
expect { web_hook.destroy! }.not_to change { web_hook.web_hook_logs.count }
end
end
describe 'validations' do

View File

@ -41,6 +41,19 @@ RSpec.describe SystemHook, feature_category: :webhooks do
end
end
describe 'associations' do
it { is_expected.to have_many(:web_hook_logs) }
end
describe '#destroy' do
it 'does not cascade to web_hook_logs' do
web_hook = create(:system_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
expect { web_hook.destroy! }.not_to change { web_hook.web_hook_logs.count }
end
end
describe "execute", :sidekiq_might_not_need_inline do
let_it_be(:system_hook) { create(:system_hook) }
let_it_be(:user) { create(:user) }
@ -228,4 +241,16 @@ RSpec.describe SystemHook, feature_category: :webhooks do
)
end
end
describe '#pluralized_name' do
subject { build(:no_sti_system_hook).pluralized_name }
it { is_expected.to eq('System hooks') }
end
describe '#help_path' do
subject { build(:no_sti_system_hook).help_path }
it { is_expected.to eq('administration/system_hooks') }
end
end

View File

@ -3,742 +3,5 @@
require 'spec_helper'
RSpec.describe WebHook, feature_category: :webhooks do
include AfterNextHelpers
let_it_be(:project) { create(:project) }
let(:hook) { build(:project_hook, project: project) }
around do |example|
if example.metadata[:skip_freeze_time]
example.run
else
freeze_time { example.run }
end
end
describe 'associations' do
it { is_expected.to have_many(:web_hook_logs) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:url) }
it { is_expected.to validate_length_of(:custom_webhook_template).is_at_most(4096) }
describe 'url_variables' do
it { is_expected.to allow_value({}).for(:url_variables) }
it { is_expected.to allow_value({ 'foo' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'FOO' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'MY_TOKEN' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'foo2' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x' => 'y' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x' => ('a' * 2048) }).for(:url_variables) }
it { is_expected.to allow_value({ 'foo' => 'bar', 'bar' => 'baz' }).for(:url_variables) }
it { is_expected.to allow_value((1..20).to_h { ["k#{_1}", 'value'] }).for(:url_variables) }
it { is_expected.to allow_value({ 'MY-TOKEN' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'my_secr3t-token' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x-y-z' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x_y_z' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'f.o.o' => 'bar' }).for(:url_variables) }
it { is_expected.not_to allow_value([]).for(:url_variables) }
it { is_expected.not_to allow_value({ 'foo' => 1 }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'bar' => :baz }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'bar' => nil }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'foo' => '' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'foo' => ('a' * 2049) }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'has spaces' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ '' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ '1foo' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value((1..21).to_h { ["k#{_1}", 'value'] }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'MY--TOKEN' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'MY__SECRET' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'x-_y' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'x..y' => 'foo' }).for(:url_variables) }
end
describe 'custom_headers' do
it { is_expected.to allow_value({}).for(:custom_headers) }
it { is_expected.to allow_value({ 'foo' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'FOO' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'MY_TOKEN' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'foo2' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x' => 'y' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x' => ('a' * 2048) }).for(:custom_headers) }
it { is_expected.to allow_value({ 'foo' => 'bar', 'bar' => 'baz' }).for(:custom_headers) }
it { is_expected.to allow_value((1..20).to_h { ["k#{_1}", 'value'] }).for(:custom_headers) }
it { is_expected.to allow_value({ 'MY-TOKEN' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'my_secr3t-token' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x-y-z' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x_y_z' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'f.o.o' => 'bar' }).for(:custom_headers) }
it { is_expected.not_to allow_value([]).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'foo' => 1 }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'bar' => :baz }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'bar' => nil }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'foo' => '' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'foo' => ('a' * 2049) }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'has spaces' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ '' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ '1foo' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value((1..21).to_h { ["k#{_1}", 'value'] }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'MY--TOKEN' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'MY__SECRET' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'x-_y' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'x..y' => 'foo' }).for(:custom_headers) }
end
describe 'url' do
it { is_expected.to allow_value('http://example.com').for(:url) }
it { is_expected.to allow_value('https://example.com').for(:url) }
it { is_expected.to allow_value(' https://example.com ').for(:url) }
it { is_expected.to allow_value('http://test.com/api').for(:url) }
it { is_expected.to allow_value('http://test.com/api?key=abc').for(:url) }
it { is_expected.to allow_value('http://test.com/api?key=abc&type=def').for(:url) }
it { is_expected.not_to allow_value('example.com').for(:url) }
it { is_expected.not_to allow_value('ftp://example.com').for(:url) }
it { is_expected.not_to allow_value('herp-and-derp').for(:url) }
context 'when url is local' do
let(:url) { 'http://localhost:9000' }
it { is_expected.not_to allow_value(url).for(:url) }
it 'is valid if application settings allow local requests from web hooks' do
settings = ApplicationSetting.new(allow_local_requests_from_web_hooks_and_services: true)
allow(ApplicationSetting).to receive(:current).and_return(settings)
is_expected.to allow_value(url).for(:url)
end
end
it 'strips :url before saving it' do
hook.url = ' https://example.com '
hook.save!
expect(hook.url).to eq('https://example.com')
end
context 'when there are URL variables' do
subject { hook }
before do
hook.url_variables = { 'one' => 'a', 'two' => 'b', 'url' => 'http://example.com' }
end
it { is_expected.to allow_value('http://example.com').for(:url) }
it { is_expected.to allow_value('http://example.com/{one}/{two}').for(:url) }
it { is_expected.to allow_value('http://example.com/{one}').for(:url) }
it { is_expected.to allow_value('http://example.com/{two}').for(:url) }
it { is_expected.to allow_value('http://user:s3cret@example.com/{two}').for(:url) }
it { is_expected.to allow_value('http://{one}:{two}@example.com').for(:url) }
it { is_expected.to allow_value('http://{one}').for(:url) }
it { is_expected.to allow_value('{url}').for(:url) }
it { is_expected.not_to allow_value('http://example.com/{one}/{two}/{three}').for(:url) }
it { is_expected.not_to allow_value('http://example.com/{foo}').for(:url) }
it { is_expected.not_to allow_value('http:{user}:{pwd}//example.com/{foo}').for(:url) }
it 'mentions all missing variable names' do
hook.url = 'http://example.com/{one}/{foo}/{two}/{three}'
expect(hook).to be_invalid
expect(hook.errors[:url].to_sentence).to eq "Invalid URL template. Missing keys: [\"foo\", \"three\"]"
end
end
end
describe 'token' do
it { is_expected.to allow_value("foobar").for(:token) }
it { is_expected.not_to allow_values("foo\nbar", "foo\r\nbar").for(:token) }
end
describe 'push_events_branch_filter' do
before do
subject.branch_filter_strategy = strategy
end
context 'with "all branches" strategy' do
let(:strategy) { 'all_branches' }
it {
is_expected.to allow_values(
"good_branch_name",
"another/good-branch_name",
"good branch name",
"good~branchname",
"good_branchname(",
"good_branchname[",
""
).for(:push_events_branch_filter)
}
end
context 'with "wildcard" strategy' do
let(:strategy) { 'wildcard' }
it {
is_expected.to allow_values(
"good_branch_name",
"another/good-branch_name",
"good_branch_name(",
""
).for(:push_events_branch_filter)
}
it {
is_expected.not_to allow_values(
"bad branch name",
"bad~branchname",
"bad_branch_name["
).for(:push_events_branch_filter)
}
it 'gets rid of whitespace' do
hook.push_events_branch_filter = ' branch '
hook.save!
expect(hook.push_events_branch_filter).to eq('branch')
end
it 'stores whitespace only as empty' do
hook.push_events_branch_filter = ' '
hook.save!
expect(hook.push_events_branch_filter).to eq('')
end
end
context 'with "regex" strategy' do
let(:strategy) { 'regex' }
it {
is_expected.to allow_values(
"good_branch_name",
"another/good-branch_name",
"good branch name",
"good~branch~name",
""
).for(:push_events_branch_filter)
}
it { is_expected.not_to allow_values("bad_branch_name(", "bad_branch_name[").for(:push_events_branch_filter) }
end
end
describe 'before_validation :reset_token' do
subject(:hook) { build_stubbed(:project_hook, :token, project: project) }
it 'resets token if url changed' do
hook.url = 'https://webhook.example.com/new-hook'
expect(hook).to be_valid
expect(hook.token).to be_nil
end
it 'does not reset token if new url is set together with the same token' do
hook.url = 'https://webhook.example.com/new-hook'
current_token = hook.token
hook.token = current_token
expect(hook).to be_valid
expect(hook.token).to eq(current_token)
expect(hook.url).to eq('https://webhook.example.com/new-hook')
end
it 'does not reset token if new url is set together with a new token' do
hook.url = 'https://webhook.example.com/new-hook'
hook.token = 'token'
expect(hook).to be_valid
expect(hook.token).to eq('token')
expect(hook.url).to eq('https://webhook.example.com/new-hook')
end
end
describe 'before_validation :reset_url_variables' do
subject(:hook) { build_stubbed(:project_hook, :url_variables, project: project, url: 'http://example.com/{abc}') }
it 'resets url variables if url changed' do
hook.url = 'http://example.com/new-hook'
expect(hook).to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url is changed but url variables stayed the same' do
hook.url = 'http://test.example.com/{abc}'
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url is changed and url variables are appended' do
hook.url = 'http://suspicious.example.com/{abc}/{foo}'
hook.url_variables = hook.url_variables.merge('foo' => 'bar')
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url is changed and url variables are removed' do
hook.url = 'http://suspicious.example.com/{abc}'
hook.url_variables = hook.url_variables.except("def")
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url variables are overwritten' do
hook.url_variables = hook.url_variables.merge('abc' => 'baz')
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'does not reset url variables if both url and url variables are changed' do
hook.url = 'http://example.com/{one}/{two}'
hook.url_variables = { 'one' => 'foo', 'two' => 'bar' }
expect(hook).to be_valid
expect(hook.url_variables).to eq({ 'one' => 'foo', 'two' => 'bar' })
end
context 'without url variables' do
subject(:hook) { build_stubbed(:project_hook, project: project, url: 'http://example.com', url_variables: nil) }
it 'does not reset url variables' do
hook.url = 'http://example.com/{one}/{two}'
hook.url_variables = { 'one' => 'foo', 'two' => 'bar' }
expect(hook).to be_valid
expect(hook.url_variables).to eq({ 'one' => 'foo', 'two' => 'bar' })
end
end
end
describe 'before_validation :reset_custom_headers' do
subject(:hook) { build_stubbed(:project_hook, :url_variables, project: project, url: 'http://example.com/{abc}', custom_headers: { test: 'blub' }) }
it 'resets custom headers if url changed' do
hook.url = 'http://example.com/new-hook'
expect(hook).to be_valid
expect(hook.custom_headers).to eq({})
end
it 'resets custom headers if url and url variables changed' do
hook.url = 'http://example.com/{something}'
hook.url_variables = { 'something' => 'testing-around' }
expect(hook).to be_valid
expect(hook.custom_headers).to eq({})
end
it 'does not reset custom headers if url stayed the same' do
hook.url = 'http://example.com/{abc}'
expect(hook).to be_valid
expect(hook.custom_headers).to eq({ test: 'blub' })
end
it 'does not reset custom headers if url and url variables changed and evaluate to the same url' do
hook.url = 'http://example.com/{def}'
hook.url_variables = { 'def' => 'supers3cret' }
expect(hook).to be_valid
expect(hook.custom_headers).to eq({ test: 'blub' })
end
end
it "only consider these branch filter strategies are valid" do
expected_valid_types = %w[all_branches regex wildcard]
expect(described_class.branch_filter_strategies.keys).to contain_exactly(*expected_valid_types)
end
end
describe 'encrypted attributes' do
subject { described_class.attr_encrypted_attributes.keys }
it { is_expected.to contain_exactly(:token, :url, :url_variables, :custom_headers) }
end
describe 'execute' do
let(:data) { { key: 'value' } }
let(:hook_name) { 'project hook' }
it '#execute' do
expect_next(WebHookService).to receive(:execute)
hook.execute(data, hook_name)
end
it 'passes force: false to the web hook service by default' do
expect(WebHookService)
.to receive(:new).with(hook, data, hook_name, idempotency_key: anything,
force: false).and_return(double(execute: :done))
expect(hook.execute(data, hook_name)).to eq :done
end
it 'passes force: true to the web hook service if required' do
expect(WebHookService)
.to receive(:new).with(hook, data, hook_name, idempotency_key: anything,
force: true).and_return(double(execute: :forced))
expect(hook.execute(data, hook_name, force: true)).to eq :forced
end
it 'forwards the idempotency key to the WebHook service when present' do
idempotency_key = SecureRandom.uuid
expect(WebHookService)
.to receive(:new)
.with(anything, anything, anything, idempotency_key: idempotency_key, force: anything)
.and_return(double(execute: :done))
expect(hook.execute(data, hook_name, idempotency_key: idempotency_key)).to eq :done
end
it 'forwards a nil idempotency key to the WebHook service when not supplied' do
expect(WebHookService)
.to receive(:new).with(anything, anything, anything, idempotency_key: nil,
force: anything).and_return(double(execute: :done))
expect(hook.execute(data, hook_name)).to eq :done
end
end
describe 'async_execute' do
let(:data) { { key: 'value' } }
let(:hook_name) { 'project hook' }
it '#async_execute' do
expect_next(WebHookService).to receive(:async_execute)
hook.async_execute(data, hook_name)
end
it 'forwards the idempotency key to the WebHook service when present' do
idempotency_key = SecureRandom.uuid
expect(WebHookService)
.to receive(:new)
.with(anything, anything, anything, idempotency_key: idempotency_key)
.and_return(double(async_execute: :done))
expect(hook.async_execute(data, hook_name, idempotency_key: idempotency_key)).to eq :done
end
it 'forwards a nil idempotency key to the WebHook service when not supplied' do
expect(WebHookService)
.to receive(:new).with(anything, anything, anything,
idempotency_key: nil).and_return(double(async_execute: :done))
expect(hook.async_execute(data, hook_name)).to eq :done
end
it 'does not async execute non-executable hooks' do
allow(hook).to receive(:executable?).and_return(false)
expect(WebHookService).not_to receive(:new)
hook.async_execute(data, hook_name)
end
end
describe '#destroy' do
it 'does not cascade to web_hook_logs' do
web_hook = create(:project_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
expect { web_hook.destroy! }.not_to change(web_hook.web_hook_logs, :count)
end
end
describe '#next_backoff' do
before do
hook.backoff_count = backoff_count
end
context 'when there was no last backoff' do
let(:backoff_count) { 0 }
it 'is the initial value' do
expect(hook.next_backoff).to eq(WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
context 'when we have backed off once' do
let(:backoff_count) { 1 }
it 'is twice the initial value' do
expect(hook.next_backoff).to eq(2 * WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
context 'when the next backoff is just before the max backoff limit' do
let(:backoff_count) { WebHooks::AutoDisabling::MAX_BACKOFF_COUNT - 1 }
it 'is an exponential of the initial backoff' do
expect(hook.next_backoff).to eq((2**backoff_count) * WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
it 'is not yet capped at the max limit' do
expect(hook.next_backoff).to be < WebHooks::AutoDisabling::MAX_BACKOFF
end
end
describe 'when next_backoff has reached the MAX_BACKOFF limit' do
let(:backoff_count) { WebHooks::AutoDisabling::MAX_BACKOFF_COUNT }
it 'does not exceed the max backoff value' do
expect(hook.next_backoff).to eq(WebHooks::AutoDisabling::MAX_BACKOFF)
end
end
end
describe '#rate_limited?' do
it 'is false when hook has not been rate limited' do
expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:rate_limited?).and_return(false)
end
expect(hook).not_to be_rate_limited
end
it 'is true when hook has been rate limited' do
expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:rate_limited?).and_return(true)
end
expect(hook).to be_rate_limited
end
end
describe '#rate_limit' do
it 'returns the hook rate limit' do
expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:limit).and_return(10)
end
expect(hook.rate_limit).to eq(10)
end
end
describe '#to_json' do
it 'does not error' do
expect { hook.to_json }.not_to raise_error
end
it 'does not contain binary attributes' do
expect(hook.to_json).not_to include('encrypted_url_variables')
end
end
describe '#interpolated_url' do
subject(:hook) { build(:project_hook, project: project) }
context 'when the hook URL does not contain variables' do
before do
hook.url = 'http://example.com'
end
it { is_expected.to have_attributes(interpolated_url: hook.url) }
end
it 'is not vulnerable to malicious input' do
hook.url = 'something%{%<foo>2147483628G}'
hook.url_variables = { 'foo' => '1234567890.12345678' }
expect(hook).to have_attributes(interpolated_url: hook.url)
end
context 'when the hook URL contains variables' do
before do
hook.url = 'http://example.com/{path}/resource?token={token}'
hook.url_variables = { 'path' => 'abc', 'token' => 'xyz' }
end
it { is_expected.to have_attributes(interpolated_url: 'http://example.com/abc/resource?token=xyz') }
context 'when a variable is missing' do
before do
hook.url_variables = { 'path' => 'present' }
end
it 'raises an error' do
# We expect validations to prevent this entirely - this is not user-error
expect { hook.interpolated_url }
.to raise_error(described_class::InterpolationError, include('Missing key token'))
end
end
context 'when the URL appears to include percent formatting' do
before do
hook.url = 'http://example.com/%{path}/resource?token=%{token}'
end
it 'succeeds, interpolates correctly' do
expect(hook.interpolated_url).to eq 'http://example.com/%abc/resource?token=%xyz'
end
end
end
end
describe '#masked_token' do
it { expect(hook.masked_token).to be_nil }
context 'with a token' do
let(:hook) { build(:project_hook, :token, project: project) }
it { expect(hook.masked_token).to eq described_class::SECRET_MASK }
end
end
describe '#backoff!' do
context 'when we have not backed off before' do
it 'increments the recent_failures count but does not disable the hook yet' do
expect { hook.backoff! }.to change(hook, :recent_failures).to(1)
expect(hook.class.executable).to include(hook)
end
end
context 'when hook is at the failure threshold' do
before do
WebHooks::AutoDisabling::FAILURE_THRESHOLD.times { hook.backoff! }
end
it 'is not yet disabled' do
expect(hook.class.executable).to include(hook)
expect(hook).to have_attributes(
recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD,
backoff_count: 0,
disabled_until: nil
)
end
context 'when hook is next told to backoff' do
before do
hook.backoff!
end
it 'causes the hook to become disabled for initial backoff period' do
expect(hook.class.executable).not_to include(hook)
expect(hook).to have_attributes(
recent_failures: (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1),
backoff_count: 1,
disabled_until: 1.minute.from_now
)
end
context 'when the backoff time has elapsed', :skip_freeze_time do
it 'is no longer disabled' do
travel_to(hook.disabled_until + 1.minute) do
expect(hook.class.executable).to include(hook)
end
end
context 'when the hook is next told to backoff' do
it 'disables the hook again, increasing the backoff time exponentially' do
travel_to(hook.disabled_until + 1.minute) do
hook.backoff!
expect(hook.class.executable).not_to include(hook)
expect(hook).to have_attributes(
recent_failures: (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 2),
backoff_count: 2,
disabled_until: 2.minutes.from_now
)
end
end
end
end
end
end
it 'does not do anything if the hook is currently temporarily disabled' do
allow(hook).to receive(:temporarily_disabled?).and_return(true)
sql_count = ActiveRecord::QueryRecorder.new { hook.backoff! }.count
expect(sql_count).to eq(0)
end
it 'does not do anything if the hook is currently permanently disabled' do
allow(hook).to receive(:permanently_disabled?).and_return(true)
sql_count = ActiveRecord::QueryRecorder.new { hook.backoff! }.count
expect(sql_count).to eq(0)
end
context 'when the counter are above MAX_FAILURES' do
let(:max_failures) { WebHooks::AutoDisabling::MAX_FAILURES }
before do
hook.update!(
recent_failures: (max_failures + 1),
backoff_count: (max_failures + 1),
disabled_until: 1.hour.ago
)
end
it 'reduces the counter to MAX_FAILURES' do
hook.backoff!
expect(hook).to have_attributes(
recent_failures: max_failures,
backoff_count: max_failures
)
end
end
end
describe '#failed!' do
it 'increments the recent_failures count but does not disable the hook yet' do
expect { hook.failed! }.to change(hook, :recent_failures).to(1)
expect(hook.class.executable).to include(hook)
end
context 'when hook is at the failure threshold' do
before do
WebHooks::AutoDisabling::FAILURE_THRESHOLD.times { hook.failed! }
end
it 'is not yet disabled' do
expect(hook.class.executable).to include(hook)
expect(hook).to have_attributes(
recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD,
backoff_count: 0,
disabled_until: nil
)
end
context 'when hook is next failed' do
before do
hook.failed!
end
it 'causes the hook to become disabled' do
expect(hook.class.executable).not_to include(hook)
expect(hook).to have_attributes(
recent_failures: (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1),
backoff_count: 0,
disabled_until: nil
)
end
end
end
it 'does not do anything if recent_failures is at MAX_FAILURES' do
hook.recent_failures = WebHooks::AutoDisabling::MAX_FAILURES
sql_count = ActiveRecord::QueryRecorder.new { hook.failed! }.count
expect(sql_count).to eq(0)
end
end
it_behaves_like 'a webhook', factory: :project_hook
end

View File

@ -73,3 +73,4 @@
- UserGroupNotificationSettingsFinder
- UserGroupsCounter
- Ai::FeatureSettings::FeatureSettingFinder
- Autocomplete::VulnerabilitiesAutocompleteFinder

View File

@ -331,7 +331,6 @@
- './ee/spec/finders/security/findings_finder_spec.rb'
- './ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
- './ee/spec/finders/security/scan_execution_policies_finder_spec.rb'
- './ee/spec/finders/security/vulnerabilities_finder_spec.rb'
- './ee/spec/finders/security/vulnerability_feedbacks_finder_spec.rb'
- './ee/spec/finders/snippets_finder_spec.rb'
- './ee/spec/finders/template_finder_spec.rb'
@ -1144,7 +1143,6 @@
- './ee/spec/mailers/emails/epics_spec.rb'
- './ee/spec/mailers/emails/group_memberships_spec.rb'
- './ee/spec/mailers/emails/merge_commits_spec.rb'
- './ee/spec/mailers/emails/namespace_storage_usage_mailer_spec.rb'
- './ee/spec/mailers/emails/requirements_spec.rb'
- './ee/spec/mailers/emails/user_cap_spec.rb'
- './ee/spec/mailers/license_mailer_spec.rb'

View File

@ -0,0 +1,705 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.shared_examples 'a webhook' do |factory:, auto_disabling: true|
include AfterNextHelpers
let(:hook) { build(factory) }
around do |example|
if example.metadata[:skip_freeze_time]
example.run
else
freeze_time { example.run }
end
end
describe 'validations' do
it { is_expected.to validate_presence_of(:url) }
it { is_expected.to validate_length_of(:custom_webhook_template).is_at_most(4096) }
describe 'url_variables' do
it { is_expected.to allow_value({}).for(:url_variables) }
it { is_expected.to allow_value({ 'foo' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'FOO' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'MY_TOKEN' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'foo2' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x' => 'y' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x' => ('a' * 2048) }).for(:url_variables) }
it { is_expected.to allow_value({ 'foo' => 'bar', 'bar' => 'baz' }).for(:url_variables) }
it { is_expected.to allow_value((1..20).to_h { |i| ["k#{i}", 'value'] }).for(:url_variables) }
it { is_expected.to allow_value({ 'MY-TOKEN' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'my_secr3t-token' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x-y-z' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'x_y_z' => 'bar' }).for(:url_variables) }
it { is_expected.to allow_value({ 'f.o.o' => 'bar' }).for(:url_variables) }
it { is_expected.not_to allow_value([]).for(:url_variables) }
it { is_expected.not_to allow_value({ 'foo' => 1 }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'bar' => :baz }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'bar' => nil }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'foo' => '' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'foo' => ('a' * 2049) }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'has spaces' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ '' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ '1foo' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value((1..21).to_h { |i| ["k#{i}", 'value'] }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'MY--TOKEN' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'MY__SECRET' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'x-_y' => 'foo' }).for(:url_variables) }
it { is_expected.not_to allow_value({ 'x..y' => 'foo' }).for(:url_variables) }
end
describe 'custom_headers' do
it { is_expected.to allow_value({}).for(:custom_headers) }
it { is_expected.to allow_value({ 'foo' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'FOO' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'MY_TOKEN' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'foo2' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x' => 'y' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x' => ('a' * 2048) }).for(:custom_headers) }
it { is_expected.to allow_value({ 'foo' => 'bar', 'bar' => 'baz' }).for(:custom_headers) }
it { is_expected.to allow_value((1..20).to_h { |i| ["k#{i}", 'value'] }).for(:custom_headers) }
it { is_expected.to allow_value({ 'MY-TOKEN' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'my_secr3t-token' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x-y-z' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'x_y_z' => 'bar' }).for(:custom_headers) }
it { is_expected.to allow_value({ 'f.o.o' => 'bar' }).for(:custom_headers) }
it { is_expected.not_to allow_value([]).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'foo' => 1 }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'bar' => :baz }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'bar' => nil }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'foo' => '' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'foo' => ('a' * 2049) }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'has spaces' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ '' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ '1foo' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value((1..21).to_h { |i| ["k#{i}", 'value'] }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'MY--TOKEN' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'MY__SECRET' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'x-_y' => 'foo' }).for(:custom_headers) }
it { is_expected.not_to allow_value({ 'x..y' => 'foo' }).for(:custom_headers) }
end
describe 'url' do
it { is_expected.to allow_value('http://example.com').for(:url) }
it { is_expected.to allow_value('https://example.com').for(:url) }
it { is_expected.to allow_value(' https://example.com ').for(:url) }
it { is_expected.to allow_value('http://test.com/api').for(:url) }
it { is_expected.to allow_value('http://test.com/api?key=abc').for(:url) }
it { is_expected.to allow_value('http://test.com/api?key=abc&type=def').for(:url) }
it { is_expected.not_to allow_value('example.com').for(:url) }
it { is_expected.not_to allow_value('ftp://example.com').for(:url) }
it { is_expected.not_to allow_value('herp-and-derp').for(:url) }
context 'when url is local' do
let(:url) { 'http://localhost:9000' }
it { is_expected.not_to allow_value(url).for(:url) }
it 'is valid if application settings allow local requests from web hooks' do
settings = ApplicationSetting.new(allow_local_requests_from_web_hooks_and_services: true)
allow(ApplicationSetting).to receive(:current).and_return(settings)
is_expected.to allow_value(url).for(:url)
end
end
it 'strips :url before saving it' do
hook.url = ' https://example.com '
hook.save!
expect(hook.url).to eq('https://example.com')
end
context 'when there are URL variables' do
subject { hook }
before do
hook.url_variables = { 'one' => 'a', 'two' => 'b', 'url' => 'http://example.com' }
end
it { is_expected.to allow_value('http://example.com').for(:url) }
it { is_expected.to allow_value('http://example.com/{one}/{two}').for(:url) }
it { is_expected.to allow_value('http://example.com/{one}').for(:url) }
it { is_expected.to allow_value('http://example.com/{two}').for(:url) }
it { is_expected.to allow_value('http://user:s3cret@example.com/{two}').for(:url) }
it { is_expected.to allow_value('http://{one}:{two}@example.com').for(:url) }
it { is_expected.to allow_value('http://{one}').for(:url) }
it { is_expected.to allow_value('{url}').for(:url) }
it { is_expected.not_to allow_value('http://example.com/{one}/{two}/{three}').for(:url) }
it { is_expected.not_to allow_value('http://example.com/{foo}').for(:url) }
it { is_expected.not_to allow_value('http:{user}:{pwd}//example.com/{foo}').for(:url) }
it 'mentions all missing variable names' do
hook.url = 'http://example.com/{one}/{foo}/{two}/{three}'
expect(hook).to be_invalid
expect(hook.errors[:url].to_sentence).to eq "Invalid URL template. Missing keys: [\"foo\", \"three\"]"
end
end
end
describe 'token' do
it { is_expected.to allow_value("foobar").for(:token) }
it { is_expected.not_to allow_values("foo\nbar", "foo\r\nbar").for(:token) }
end
describe 'push_events_branch_filter' do
before do
subject.branch_filter_strategy = strategy
end
context 'with "all branches" strategy' do
let(:strategy) { 'all_branches' }
let(:allowed_values) do
["good_branch_name", "another/good-branch_name", "good branch name", "good~branchname", "good_branchname(",
"good_branchname[", ""]
end
it { is_expected.to allow_values(*allowed_values).for(:push_events_branch_filter) }
end
context 'with "wildcard" strategy' do
let(:strategy) { 'wildcard' }
let(:allowed_values) { ["good_branch_name", "another/good-branch_name", "good_branch_name(", ""] }
let(:disallowed_values) { ["bad branch name", "bad~branchname", "bad_branch_name["] }
it { is_expected.to allow_values(*allowed_values).for(:push_events_branch_filter) }
it { is_expected.not_to allow_values(*disallowed_values).for(:push_events_branch_filter) }
it 'gets rid of whitespace' do
hook.push_events_branch_filter = ' branch '
hook.save!
expect(hook.push_events_branch_filter).to eq('branch')
end
it 'stores whitespace only as empty' do
hook.push_events_branch_filter = ' '
hook.save!
expect(hook.push_events_branch_filter).to eq('')
end
end
context 'with "regex" strategy' do
let(:strategy) { 'regex' }
let(:allowed_values) do
["good_branch_name", "another/good-branch_name", "good branch name", "good~branch~name", ""]
end
it { is_expected.to allow_values(*allowed_values).for(:push_events_branch_filter) }
it { is_expected.not_to allow_values("bad_branch_name(", "bad_branch_name[").for(:push_events_branch_filter) }
end
end
describe 'before_validation :reset_token' do
subject(:hook) { build_stubbed(factory, :token) }
it 'resets token if url changed' do
hook.url = 'https://webhook.example.com/new-hook'
expect(hook).to be_valid
expect(hook.token).to be_nil
end
it 'does not reset token if new url is set together with the same token' do
hook.url = 'https://webhook.example.com/new-hook'
current_token = hook.token
hook.token = current_token
expect(hook).to be_valid
expect(hook.token).to eq(current_token)
expect(hook.url).to eq('https://webhook.example.com/new-hook')
end
it 'does not reset token if new url is set together with a new token' do
hook.url = 'https://webhook.example.com/new-hook'
hook.token = 'token'
expect(hook).to be_valid
expect(hook.token).to eq('token')
expect(hook.url).to eq('https://webhook.example.com/new-hook')
end
end
describe 'before_validation :reset_url_variables' do
subject(:hook) { build_stubbed(factory, :url_variables, url: 'http://example.com/{abc}') }
it 'resets url variables if url changed' do
hook.url = 'http://example.com/new-hook'
expect(hook).to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url is changed but url variables stayed the same' do
hook.url = 'http://test.example.com/{abc}'
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url is changed and url variables are appended' do
hook.url = 'http://suspicious.example.com/{abc}/{foo}'
hook.url_variables = hook.url_variables.merge('foo' => 'bar')
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url is changed and url variables are removed' do
hook.url = 'http://suspicious.example.com/{abc}'
hook.url_variables = hook.url_variables.except("def")
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'resets url variables if url variables are overwritten' do
hook.url_variables = hook.url_variables.merge('abc' => 'baz')
expect(hook).not_to be_valid
expect(hook.url_variables).to eq({})
end
it 'does not reset url variables if both url and url variables are changed' do
hook.url = 'http://example.com/{one}/{two}'
hook.url_variables = { 'one' => 'foo', 'two' => 'bar' }
expect(hook).to be_valid
expect(hook.url_variables).to eq({ 'one' => 'foo', 'two' => 'bar' })
end
context 'without url variables' do
subject(:hook) { build_stubbed(factory, url: 'http://example.com', url_variables: nil) }
it 'does not reset url variables' do
hook.url = 'http://example.com/{one}/{two}'
hook.url_variables = { 'one' => 'foo', 'two' => 'bar' }
expect(hook).to be_valid
expect(hook.url_variables).to eq({ 'one' => 'foo', 'two' => 'bar' })
end
end
end
describe 'before_validation :reset_custom_headers' do
subject(:hook) { build_stubbed(factory, :url_variables, url: 'http://example.com/{abc}', custom_headers: { test: 'blub' }) }
it 'resets custom headers if url changed' do
hook.url = 'http://example.com/new-hook'
expect(hook).to be_valid
expect(hook.custom_headers).to eq({})
end
it 'resets custom headers if url and url variables changed' do
hook.url = 'http://example.com/{something}'
hook.url_variables = { 'something' => 'testing-around' }
expect(hook).to be_valid
expect(hook.custom_headers).to eq({})
end
it 'does not reset custom headers if url stayed the same' do
hook.url = 'http://example.com/{abc}'
expect(hook).to be_valid
expect(hook.custom_headers).to eq({ test: 'blub' })
end
it 'does not reset custom headers if url and url variables changed and evaluate to the same url' do
hook.url = 'http://example.com/{def}'
hook.url_variables = { 'def' => 'supers3cret' }
expect(hook).to be_valid
expect(hook.custom_headers).to eq({ test: 'blub' })
end
end
it "only consider these branch filter strategies are valid" do
expected_valid_types = %w[all_branches regex wildcard]
expect(described_class.branch_filter_strategies.keys).to match_array(expected_valid_types)
end
end
describe 'encrypted attributes' do
subject { described_class.attr_encrypted_attributes.keys }
it { is_expected.to contain_exactly(:token, :url, :url_variables, :custom_headers) }
end
describe 'execute' do
let(:data) { { key: 'value' } }
let(:hook_name) { 'the hook name' }
it '#execute' do
expect_next(WebHookService).to receive(:execute)
hook.execute(data, hook_name)
end
it 'passes force: false to the web hook service by default' do
expect(WebHookService)
.to receive(:new).with(hook, data, hook_name, idempotency_key: anything,
force: false).and_return(instance_double(WebHookService, execute: :done))
expect(hook.execute(data, hook_name)).to eq :done
end
it 'passes force: true to the web hook service if required' do
expect(WebHookService)
.to receive(:new).with(hook, data, hook_name, idempotency_key: anything,
force: true).and_return(instance_double(WebHookService, execute: :forced))
expect(hook.execute(data, hook_name, force: true)).to eq :forced
end
it 'forwards the idempotency key to the WebHook service when present' do
idempotency_key = SecureRandom.uuid
expect(WebHookService)
.to receive(:new)
.with(anything, anything, anything, idempotency_key: idempotency_key, force: anything)
.and_return(instance_double(WebHookService, execute: :done))
expect(hook.execute(data, hook_name, idempotency_key: idempotency_key)).to eq :done
end
it 'forwards a nil idempotency key to the WebHook service when not supplied' do
expect(WebHookService)
.to receive(:new).with(anything, anything, anything, idempotency_key: nil,
force: anything).and_return(instance_double(WebHookService, execute: :done))
expect(hook.execute(data, hook_name)).to eq :done
end
end
describe 'async_execute' do
let(:data) { { key: 'value' } }
let(:hook_name) { 'the hook name' }
it '#async_execute' do
expect_next(WebHookService).to receive(:async_execute)
hook.async_execute(data, hook_name)
end
it 'forwards the idempotency key to the WebHook service when present' do
idempotency_key = SecureRandom.uuid
expect(WebHookService)
.to receive(:new)
.with(anything, anything, anything, idempotency_key: idempotency_key)
.and_return(instance_double(WebHookService, async_execute: :done))
expect(hook.async_execute(data, hook_name, idempotency_key: idempotency_key)).to eq :done
end
it 'forwards a nil idempotency key to the WebHook service when not supplied' do
expect(WebHookService)
.to receive(:new).with(anything, anything, anything,
idempotency_key: nil).and_return(instance_double(WebHookService, async_execute: :done))
expect(hook.async_execute(data, hook_name)).to eq :done
end
it 'does not async execute non-executable hooks' do
allow(hook).to receive(:executable?).and_return(false)
expect(WebHookService).not_to receive(:new)
hook.async_execute(data, hook_name)
end
end
describe '#next_backoff' do
before do
hook.backoff_count = backoff_count
end
context 'when there was no last backoff' do
let(:backoff_count) { 0 }
it 'is the initial value' do
expect(hook.next_backoff).to eq(WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
context 'when we have backed off once' do
let(:backoff_count) { 1 }
it 'is twice the initial value' do
expect(hook.next_backoff).to eq(2 * WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
end
context 'when the next backoff is just before the max backoff limit' do
let(:backoff_count) { WebHooks::AutoDisabling::MAX_BACKOFF_COUNT - 1 }
it 'is an exponential of the initial backoff' do
expect(hook.next_backoff).to eq((2**backoff_count) * WebHooks::AutoDisabling::INITIAL_BACKOFF)
end
it 'is not yet capped at the max limit' do
expect(hook.next_backoff).to be < WebHooks::AutoDisabling::MAX_BACKOFF
end
end
describe 'when next_backoff has reached the MAX_BACKOFF limit' do
let(:backoff_count) { WebHooks::AutoDisabling::MAX_BACKOFF_COUNT }
it 'does not exceed the max backoff value' do
expect(hook.next_backoff).to eq(WebHooks::AutoDisabling::MAX_BACKOFF)
end
end
end
describe '#rate_limited?' do
it 'is false when hook has not been rate limited' do
expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:rate_limited?).and_return(false)
end
expect(hook).not_to be_rate_limited
end
it 'is true when hook has been rate limited' do
expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:rate_limited?).and_return(true)
end
expect(hook).to be_rate_limited
end
end
describe '#rate_limit' do
it 'returns the hook rate limit' do
expect_next_instance_of(Gitlab::WebHooks::RateLimiter) do |rate_limiter|
expect(rate_limiter).to receive(:limit).and_return(10)
end
expect(hook.rate_limit).to eq(10)
end
end
describe '#to_json' do
it 'does not error' do
expect { hook.to_json }.not_to raise_error
end
it 'does not contain binary attributes' do
expect(hook.to_json).not_to include('encrypted_url_variables')
end
end
describe '#interpolated_url' do
subject(:hook) { build(factory) }
context 'when the hook URL does not contain variables' do
before do
hook.url = 'http://example.com'
end
it { is_expected.to have_attributes(interpolated_url: hook.url) }
end
it 'is not vulnerable to malicious input' do
hook.url = 'something%{%<foo>2147483628G}'
hook.url_variables = { 'foo' => '1234567890.12345678' }
expect(hook).to have_attributes(interpolated_url: hook.url)
end
context 'when the hook URL contains variables' do
before do
hook.url = 'http://example.com/{path}/resource?token={token}'
hook.url_variables = { 'path' => 'abc', 'token' => 'xyz' }
end
it { is_expected.to have_attributes(interpolated_url: 'http://example.com/abc/resource?token=xyz') }
context 'when a variable is missing' do
before do
hook.url_variables = { 'path' => 'present' }
end
it 'raises an error' do
# We expect validations to prevent this entirely - this is not user-error
expect { hook.interpolated_url }
.to raise_error(described_class::InterpolationError, include('Missing key token'))
end
end
context 'when the URL appears to include percent formatting' do
before do
hook.url = 'http://example.com/%{path}/resource?token=%{token}'
end
it 'succeeds, interpolates correctly' do
expect(hook.interpolated_url).to eq 'http://example.com/%abc/resource?token=%xyz'
end
end
end
end
describe '#masked_token' do
it { expect(hook.masked_token).to be_nil }
context 'with a token' do
let(:hook) { build(factory, :token) }
it { expect(hook.masked_token).to eq described_class::SECRET_MASK }
end
end
describe '#backoff!', if: auto_disabling do
context 'when we have not backed off before' do
it 'increments the recent_failures count but does not disable the hook yet' do
expect { hook.backoff! }.to change { hook.recent_failures }.to(1)
expect(hook.class.executable).to include(hook)
end
end
context 'when hook is at the failure threshold' do
before do
WebHooks::AutoDisabling::FAILURE_THRESHOLD.times { hook.backoff! }
end
it 'is not yet disabled' do
expect(hook.class.executable).to include(hook)
expect(hook).to have_attributes(
recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD,
backoff_count: 0,
disabled_until: nil
)
end
context 'when hook is next told to backoff' do
before do
hook.backoff!
end
it 'causes the hook to become disabled for initial backoff period' do
expect(hook.class.executable).not_to include(hook)
expect(hook).to have_attributes(
recent_failures: (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1),
backoff_count: 1,
disabled_until: 1.minute.from_now
)
end
context 'when the backoff time has elapsed', :skip_freeze_time do
it 'is no longer disabled' do
travel_to(hook.disabled_until + 1.minute) do
expect(hook.class.executable).to include(hook)
end
end
context 'when the hook is next told to backoff' do
it 'disables the hook again, increasing the backoff time exponentially' do
travel_to(hook.disabled_until + 1.minute) do
hook.backoff!
expect(hook.class.executable).not_to include(hook)
expect(hook).to have_attributes(
recent_failures: (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 2),
backoff_count: 2,
disabled_until: 2.minutes.from_now
)
end
end
end
end
end
end
it 'does not do anything if the hook is currently temporarily disabled' do
allow(hook).to receive(:temporarily_disabled?).and_return(true)
sql_count = ActiveRecord::QueryRecorder.new { hook.backoff! }.count
expect(sql_count).to eq(0)
end
it 'does not do anything if the hook is currently permanently disabled' do
allow(hook).to receive(:permanently_disabled?).and_return(true)
sql_count = ActiveRecord::QueryRecorder.new { hook.backoff! }.count
expect(sql_count).to eq(0)
end
context 'when the counter are above MAX_FAILURES' do
let(:max_failures) { WebHooks::AutoDisabling::MAX_FAILURES }
before do
hook.update!(
recent_failures: (max_failures + 1),
backoff_count: (max_failures + 1),
disabled_until: 1.hour.ago
)
end
it 'reduces the counter to MAX_FAILURES' do
hook.backoff!
expect(hook).to have_attributes(
recent_failures: max_failures,
backoff_count: max_failures
)
end
end
end
describe '#failed!', if: auto_disabling do
it 'increments the recent_failures count but does not disable the hook yet' do
expect { hook.failed! }.to change { hook.recent_failures }.to(1)
expect(hook.class.executable).to include(hook)
end
context 'when hook is at the failure threshold' do
before do
WebHooks::AutoDisabling::FAILURE_THRESHOLD.times { hook.failed! }
end
it 'is not yet disabled' do
expect(hook.class.executable).to include(hook)
expect(hook).to have_attributes(
recent_failures: WebHooks::AutoDisabling::FAILURE_THRESHOLD,
backoff_count: 0,
disabled_until: nil
)
end
context 'when hook is next failed' do
before do
hook.failed!
end
it 'causes the hook to become disabled' do
expect(hook.class.executable).not_to include(hook)
expect(hook).to have_attributes(
recent_failures: (WebHooks::AutoDisabling::FAILURE_THRESHOLD + 1),
backoff_count: 0,
disabled_until: nil
)
end
end
end
it 'does not do anything if recent_failures is at MAX_FAILURES' do
hook.recent_failures = WebHooks::AutoDisabling::MAX_FAILURES
sql_count = ActiveRecord::QueryRecorder.new { hook.failed! }.count
expect(sql_count).to eq(0)
end
end
end