Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
3310ea451f
commit
43b92e0ec4
|
|
@ -48,11 +48,6 @@ Include the following detail as necessary:
|
|||
/label ~"documentation"
|
||||
/label ~"docs-only"
|
||||
|
||||
<!-- Keep one docs:: label, or leave as is to apply ~"docs::improvement"-->
|
||||
/label ~"docs::feature"
|
||||
/label ~"docs::fix"
|
||||
/label ~"docs::improvement"
|
||||
|
||||
/label ~"type::maintenance" ~"maintenance::refactor"
|
||||
|
||||
/milestone %Backlog
|
||||
|
|
|
|||
|
|
@ -49,9 +49,5 @@ If you aren't sure which tech writer to ask, use [roulette](https://gitlab-org.g
|
|||
/label ~documentation
|
||||
/label ~"docs-only"
|
||||
|
||||
/label ~"docs::feature"
|
||||
/label ~"docs::fix"
|
||||
/label ~"docs::improvement"
|
||||
|
||||
/label ~"type::maintenance" ~"maintenance::refactor"
|
||||
/assign me
|
||||
|
|
|
|||
|
|
@ -524,6 +524,7 @@ Gitlab/AvoidCurrentOrganization:
|
|||
- 'ee/lib/api/**/*'
|
||||
- 'app/graphql/**/*'
|
||||
- 'ee/app/graphql/**/*'
|
||||
- 'lib/gitlab/current_settings.rb'
|
||||
- 'lib/gitlab/middleware/organizations/current.rb'
|
||||
- 'spec/**/*'
|
||||
- 'ee/spec/**/*'
|
||||
|
|
|
|||
|
|
@ -2,5 +2,4 @@
|
|||
Gitlab/AvoidCurrentOrganization:
|
||||
Details: grace period
|
||||
Exclude:
|
||||
- 'lib/gitlab/current_settings.rb'
|
||||
- 'lib/gitlab/github_gists_import/importer/gist_importer.rb'
|
||||
|
|
|
|||
|
|
@ -2682,7 +2682,6 @@ Gitlab/BoundedContexts:
|
|||
- 'ee/app/mailers/ee/emails/projects.rb'
|
||||
- 'ee/app/mailers/ee/notify.rb'
|
||||
- 'ee/app/mailers/ee/preview/notify_preview.rb'
|
||||
- 'ee/app/mailers/emails/abandoned_trial_notification.rb'
|
||||
- 'ee/app/mailers/emails/block_seat_overages.rb'
|
||||
- 'ee/app/mailers/emails/ci_runner_usage_by_project.rb'
|
||||
- 'ee/app/mailers/emails/compliance_frameworks.rb'
|
||||
|
|
|
|||
|
|
@ -262,7 +262,6 @@ Layout/LineEndStringConcatenationIndentation:
|
|||
- 'ee/spec/lib/gitlab/usage/metrics/instrumentations/protected_environments_required_approvals_average_metric_spec.rb'
|
||||
- 'ee/spec/lib/gitlab/vulnerability_scanning/security_report_builder_spec.rb'
|
||||
- 'ee/spec/lib/security/scan_result_policies/policy_violation_details_spec.rb'
|
||||
- 'ee/spec/mailers/emails/abandoned_trial_notification_spec.rb'
|
||||
- 'ee/spec/mailers/emails/block_seat_overages_spec.rb'
|
||||
- 'ee/spec/models/dependency_proxy/packages/setting_spec.rb'
|
||||
- 'ee/spec/models/members/member_role_spec.rb'
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -501,7 +501,7 @@ group :development, :test do
|
|||
gem 'awesome_print', require: false # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
|
||||
gem 'database_cleaner-active_record', '~> 2.2.0', feature_category: :database
|
||||
gem 'rspec-rails', '~> 6.1.1', feature_category: :shared
|
||||
gem 'rspec-rails', '~> 6.1.5', feature_category: :shared
|
||||
gem 'factory_bot_rails', '~> 6.4.3', feature_category: :tooling
|
||||
|
||||
# Prevent occasions where minitest is not bundled in packaged versions of ruby (see #3826)
|
||||
|
|
|
|||
|
|
@ -594,17 +594,17 @@
|
|||
{"name":"rouge","version":"4.3.0","platform":"ruby","checksum":"9ee3d9ec53338e78c03fff0cbcd08881d80d69152349b046761e48ccf2de581c"},
|
||||
{"name":"rqrcode","version":"2.2.0","platform":"ruby","checksum":"23eea88bb44c7ee6d6cab9354d08c287f7ebcdc6112e1fe7bcc2d010d1ffefc1"},
|
||||
{"name":"rqrcode_core","version":"1.2.0","platform":"ruby","checksum":"cf4989dc82d24e2877984738c4ee569308625fed2a810960f1b02d68d0308d1a"},
|
||||
{"name":"rspec","version":"3.12.0","platform":"ruby","checksum":"ccc41799a43509dc0be84070e3f0410ac95cbd480ae7b6c245543eb64162399c"},
|
||||
{"name":"rspec","version":"3.13.0","platform":"ruby","checksum":"d490914ac1d5a5a64a0e1400c1d54ddd2a501324d703b8cfe83f458337bab993"},
|
||||
{"name":"rspec-benchmark","version":"0.6.0","platform":"ruby","checksum":"1014adb57ec2599a2455c63884229f367a2fff6a63a77fd68ce5d804c83dd6cf"},
|
||||
{"name":"rspec-core","version":"3.12.2","platform":"ruby","checksum":"155b54480f28e2b2813185077fe435c2d663031616360ed3b179a9d6a55d2551"},
|
||||
{"name":"rspec-expectations","version":"3.12.3","platform":"ruby","checksum":"093d18e2e7e0a2c619ef8f7343d442fc6c0793fb7897d56f16f26c8a9d244416"},
|
||||
{"name":"rspec-mocks","version":"3.12.6","platform":"ruby","checksum":"de51a4148ba2ce6f1c1646a2a03e9df2f52da9a42b164f2e7467b2cbe37e07bf"},
|
||||
{"name":"rspec-core","version":"3.13.1","platform":"ruby","checksum":"9daa4ff29812e620193ebc8952e032f031fe167a9f6daf7ea3d29dc31d47c868"},
|
||||
{"name":"rspec-expectations","version":"3.13.3","platform":"ruby","checksum":"0e6b5af59b900147698ea0ff80456c4f2e69cac4394fbd392fbd1ca561f66c58"},
|
||||
{"name":"rspec-mocks","version":"3.13.1","platform":"ruby","checksum":"087189899c337937bcf1d66a50dc3fc999ac88335bbeba4d385c2a38c87d7b38"},
|
||||
{"name":"rspec-parameterized","version":"1.0.2","platform":"ruby","checksum":"b456dec0091924175ac13963e173cdbaa2ab3e1581a405a948addc34e3f3f4c2"},
|
||||
{"name":"rspec-parameterized-core","version":"1.0.0","platform":"ruby","checksum":"287b494985e79821160af63aba4f91db8dbfa9a21cb200db34ba38f40e16ccc1"},
|
||||
{"name":"rspec-parameterized-table_syntax","version":"1.0.0","platform":"ruby","checksum":"d7df951eff9c5dd367ca7d5f9ae4853bb7ab7941f9d5b35bba361d112704988c"},
|
||||
{"name":"rspec-rails","version":"6.1.1","platform":"ruby","checksum":"bd949e61f89379f410ea1e43133163282f8d977c683ce6d10bf5aef6b1e995b2"},
|
||||
{"name":"rspec-rails","version":"6.1.5","platform":"ruby","checksum":"d11afce893ceb6e2c3c11db280f83dee6d0120d150228cef6b989d37c7394c4b"},
|
||||
{"name":"rspec-retry","version":"0.6.2","platform":"ruby","checksum":"6101ba23a38809811ae3484acde4ab481c54d846ac66d5037ccb40131a60d858"},
|
||||
{"name":"rspec-support","version":"3.12.0","platform":"ruby","checksum":"dd4d44b247ff679b95b5607ac5641d197a5f9b1d33f916123cb98fc5f917c58b"},
|
||||
{"name":"rspec-support","version":"3.13.1","platform":"ruby","checksum":"48877d4f15b772b7538f3693c22225f2eda490ba65a0515c4e7cf6f2f17de70f"},
|
||||
{"name":"rspec_junit_formatter","version":"0.6.0","platform":"ruby","checksum":"40dde674e6ae4e6cc0ff560da25497677e34fefd2338cc467a8972f602b62b15"},
|
||||
{"name":"rspec_profiling","version":"0.0.9","platform":"ruby","checksum":"6199be2daeaa14bac3d10d704dbb0a8df052cf046332c505603263aea24f7590"},
|
||||
{"name":"rubocop","version":"1.62.1","platform":"ruby","checksum":"aeb1ec501aef5833617b3b6a1512303806218c349c28ce5b3ea72e3782ad4a35"},
|
||||
|
|
|
|||
34
Gemfile.lock
34
Gemfile.lock
|
|
@ -1580,23 +1580,23 @@ GEM
|
|||
chunky_png (~> 1.0)
|
||||
rqrcode_core (~> 1.0)
|
||||
rqrcode_core (1.2.0)
|
||||
rspec (3.12.0)
|
||||
rspec-core (~> 3.12.0)
|
||||
rspec-expectations (~> 3.12.0)
|
||||
rspec-mocks (~> 3.12.0)
|
||||
rspec (3.13.0)
|
||||
rspec-core (~> 3.13.0)
|
||||
rspec-expectations (~> 3.13.0)
|
||||
rspec-mocks (~> 3.13.0)
|
||||
rspec-benchmark (0.6.0)
|
||||
benchmark-malloc (~> 0.2)
|
||||
benchmark-perf (~> 0.6)
|
||||
benchmark-trend (~> 0.4)
|
||||
rspec (>= 3.0)
|
||||
rspec-core (3.12.2)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-expectations (3.12.3)
|
||||
rspec-core (3.13.1)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-expectations (3.13.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-mocks (3.12.6)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-mocks (3.13.1)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-parameterized (1.0.2)
|
||||
rspec-parameterized-core (< 2)
|
||||
rspec-parameterized-table_syntax (< 2)
|
||||
|
|
@ -1608,17 +1608,17 @@ GEM
|
|||
rspec-parameterized-table_syntax (1.0.0)
|
||||
binding_of_caller
|
||||
rspec-parameterized-core (< 2)
|
||||
rspec-rails (6.1.1)
|
||||
rspec-rails (6.1.5)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
railties (>= 6.1)
|
||||
rspec-core (~> 3.12)
|
||||
rspec-expectations (~> 3.12)
|
||||
rspec-mocks (~> 3.12)
|
||||
rspec-support (~> 3.12)
|
||||
rspec-core (~> 3.13)
|
||||
rspec-expectations (~> 3.13)
|
||||
rspec-mocks (~> 3.13)
|
||||
rspec-support (~> 3.13)
|
||||
rspec-retry (0.6.2)
|
||||
rspec-core (> 3.3)
|
||||
rspec-support (3.12.0)
|
||||
rspec-support (3.13.1)
|
||||
rspec_junit_formatter (0.6.0)
|
||||
rspec-core (>= 2, < 4, != 2.12.0)
|
||||
rspec_profiling (0.0.9)
|
||||
|
|
@ -2259,7 +2259,7 @@ DEPENDENCIES
|
|||
rqrcode (~> 2.2)
|
||||
rspec-benchmark (~> 0.6.0)
|
||||
rspec-parameterized (~> 1.0, >= 1.0.2)
|
||||
rspec-rails (~> 6.1.1)
|
||||
rspec-rails (~> 6.1.5)
|
||||
rspec-retry (~> 0.6.2)
|
||||
rspec_junit_formatter
|
||||
rspec_profiling (~> 0.0.9)
|
||||
|
|
|
|||
|
|
@ -607,17 +607,17 @@
|
|||
{"name":"rouge","version":"4.3.0","platform":"ruby","checksum":"9ee3d9ec53338e78c03fff0cbcd08881d80d69152349b046761e48ccf2de581c"},
|
||||
{"name":"rqrcode","version":"2.2.0","platform":"ruby","checksum":"23eea88bb44c7ee6d6cab9354d08c287f7ebcdc6112e1fe7bcc2d010d1ffefc1"},
|
||||
{"name":"rqrcode_core","version":"1.2.0","platform":"ruby","checksum":"cf4989dc82d24e2877984738c4ee569308625fed2a810960f1b02d68d0308d1a"},
|
||||
{"name":"rspec","version":"3.12.0","platform":"ruby","checksum":"ccc41799a43509dc0be84070e3f0410ac95cbd480ae7b6c245543eb64162399c"},
|
||||
{"name":"rspec","version":"3.13.0","platform":"ruby","checksum":"d490914ac1d5a5a64a0e1400c1d54ddd2a501324d703b8cfe83f458337bab993"},
|
||||
{"name":"rspec-benchmark","version":"0.6.0","platform":"ruby","checksum":"1014adb57ec2599a2455c63884229f367a2fff6a63a77fd68ce5d804c83dd6cf"},
|
||||
{"name":"rspec-core","version":"3.12.2","platform":"ruby","checksum":"155b54480f28e2b2813185077fe435c2d663031616360ed3b179a9d6a55d2551"},
|
||||
{"name":"rspec-expectations","version":"3.12.3","platform":"ruby","checksum":"093d18e2e7e0a2c619ef8f7343d442fc6c0793fb7897d56f16f26c8a9d244416"},
|
||||
{"name":"rspec-mocks","version":"3.12.6","platform":"ruby","checksum":"de51a4148ba2ce6f1c1646a2a03e9df2f52da9a42b164f2e7467b2cbe37e07bf"},
|
||||
{"name":"rspec-core","version":"3.13.1","platform":"ruby","checksum":"9daa4ff29812e620193ebc8952e032f031fe167a9f6daf7ea3d29dc31d47c868"},
|
||||
{"name":"rspec-expectations","version":"3.13.3","platform":"ruby","checksum":"0e6b5af59b900147698ea0ff80456c4f2e69cac4394fbd392fbd1ca561f66c58"},
|
||||
{"name":"rspec-mocks","version":"3.13.1","platform":"ruby","checksum":"087189899c337937bcf1d66a50dc3fc999ac88335bbeba4d385c2a38c87d7b38"},
|
||||
{"name":"rspec-parameterized","version":"1.0.2","platform":"ruby","checksum":"b456dec0091924175ac13963e173cdbaa2ab3e1581a405a948addc34e3f3f4c2"},
|
||||
{"name":"rspec-parameterized-core","version":"1.0.0","platform":"ruby","checksum":"287b494985e79821160af63aba4f91db8dbfa9a21cb200db34ba38f40e16ccc1"},
|
||||
{"name":"rspec-parameterized-table_syntax","version":"1.0.0","platform":"ruby","checksum":"d7df951eff9c5dd367ca7d5f9ae4853bb7ab7941f9d5b35bba361d112704988c"},
|
||||
{"name":"rspec-rails","version":"6.1.1","platform":"ruby","checksum":"bd949e61f89379f410ea1e43133163282f8d977c683ce6d10bf5aef6b1e995b2"},
|
||||
{"name":"rspec-rails","version":"6.1.5","platform":"ruby","checksum":"d11afce893ceb6e2c3c11db280f83dee6d0120d150228cef6b989d37c7394c4b"},
|
||||
{"name":"rspec-retry","version":"0.6.2","platform":"ruby","checksum":"6101ba23a38809811ae3484acde4ab481c54d846ac66d5037ccb40131a60d858"},
|
||||
{"name":"rspec-support","version":"3.12.0","platform":"ruby","checksum":"dd4d44b247ff679b95b5607ac5641d197a5f9b1d33f916123cb98fc5f917c58b"},
|
||||
{"name":"rspec-support","version":"3.13.1","platform":"ruby","checksum":"48877d4f15b772b7538f3693c22225f2eda490ba65a0515c4e7cf6f2f17de70f"},
|
||||
{"name":"rspec_junit_formatter","version":"0.6.0","platform":"ruby","checksum":"40dde674e6ae4e6cc0ff560da25497677e34fefd2338cc467a8972f602b62b15"},
|
||||
{"name":"rspec_profiling","version":"0.0.9","platform":"ruby","checksum":"6199be2daeaa14bac3d10d704dbb0a8df052cf046332c505603263aea24f7590"},
|
||||
{"name":"rubocop","version":"1.62.1","platform":"ruby","checksum":"aeb1ec501aef5833617b3b6a1512303806218c349c28ce5b3ea72e3782ad4a35"},
|
||||
|
|
|
|||
|
|
@ -1606,23 +1606,23 @@ GEM
|
|||
chunky_png (~> 1.0)
|
||||
rqrcode_core (~> 1.0)
|
||||
rqrcode_core (1.2.0)
|
||||
rspec (3.12.0)
|
||||
rspec-core (~> 3.12.0)
|
||||
rspec-expectations (~> 3.12.0)
|
||||
rspec-mocks (~> 3.12.0)
|
||||
rspec (3.13.0)
|
||||
rspec-core (~> 3.13.0)
|
||||
rspec-expectations (~> 3.13.0)
|
||||
rspec-mocks (~> 3.13.0)
|
||||
rspec-benchmark (0.6.0)
|
||||
benchmark-malloc (~> 0.2)
|
||||
benchmark-perf (~> 0.6)
|
||||
benchmark-trend (~> 0.4)
|
||||
rspec (>= 3.0)
|
||||
rspec-core (3.12.2)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-expectations (3.12.3)
|
||||
rspec-core (3.13.1)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-expectations (3.13.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-mocks (3.12.6)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-mocks (3.13.1)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-parameterized (1.0.2)
|
||||
rspec-parameterized-core (< 2)
|
||||
rspec-parameterized-table_syntax (< 2)
|
||||
|
|
@ -1634,17 +1634,17 @@ GEM
|
|||
rspec-parameterized-table_syntax (1.0.0)
|
||||
binding_of_caller
|
||||
rspec-parameterized-core (< 2)
|
||||
rspec-rails (6.1.1)
|
||||
rspec-rails (6.1.5)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
railties (>= 6.1)
|
||||
rspec-core (~> 3.12)
|
||||
rspec-expectations (~> 3.12)
|
||||
rspec-mocks (~> 3.12)
|
||||
rspec-support (~> 3.12)
|
||||
rspec-core (~> 3.13)
|
||||
rspec-expectations (~> 3.13)
|
||||
rspec-mocks (~> 3.13)
|
||||
rspec-support (~> 3.13)
|
||||
rspec-retry (0.6.2)
|
||||
rspec-core (> 3.3)
|
||||
rspec-support (3.12.0)
|
||||
rspec-support (3.13.1)
|
||||
rspec_junit_formatter (0.6.0)
|
||||
rspec-core (>= 2, < 4, != 2.12.0)
|
||||
rspec_profiling (0.0.9)
|
||||
|
|
@ -2286,7 +2286,7 @@ DEPENDENCIES
|
|||
rqrcode (~> 2.2)
|
||||
rspec-benchmark (~> 0.6.0)
|
||||
rspec-parameterized (~> 1.0, >= 1.0.2)
|
||||
rspec-rails (~> 6.1.1)
|
||||
rspec-rails (~> 6.1.5)
|
||||
rspec-retry (~> 0.6.2)
|
||||
rspec_junit_formatter
|
||||
rspec_profiling (~> 0.0.9)
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ export default {
|
|||
</script>
|
||||
<template>
|
||||
<div class="!gl-my-4">
|
||||
<table ref="table" class="!gl-mb-2 !gl-mt-0">
|
||||
<table ref="table" class="!gl-mb-2 !gl-mt-0 gl-overflow-y-hidden">
|
||||
<thead>
|
||||
<tr v-if="table">
|
||||
<th-resizable v-for="(field, fieldIndex) in fields" :key="field.key" :table="table">
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ export const JIRA_USER_REQUIREMENTS_DOC_LINK = helpPagePath(
|
|||
export const FAILED_TO_UPDATE_DOC_LINK = helpPagePath(
|
||||
'administration/settings/jira_cloud_app_troubleshooting',
|
||||
{
|
||||
anchor: 'failed-to-update-the-gitlab-instance',
|
||||
anchor: 'error-failed-to-update-the-gitlab-instance',
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -170,6 +170,10 @@ export default {
|
|||
return getLocationHash();
|
||||
},
|
||||
noteUrl() {
|
||||
const routeParamType = this.$route?.params?.type;
|
||||
if (routeParamType && !this.note.url.includes(routeParamType)) {
|
||||
return this.note.url.replace('work_items', routeParamType);
|
||||
}
|
||||
return this.note.url;
|
||||
},
|
||||
hasAwardEmojiPermission() {
|
||||
|
|
@ -368,7 +372,7 @@ export default {
|
|||
:author="author"
|
||||
:created-at="note.createdAt"
|
||||
:note-id="note.id"
|
||||
:note-url="note.url"
|
||||
:note-url="noteUrl"
|
||||
:is-internal-note="note.internal"
|
||||
>
|
||||
<span v-if="note.createdAt" class="gl-hidden sm:gl-inline">·</span>
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ class ApplicationSetting < ApplicationRecord
|
|||
encrypted_vertex_ai_access_token_iv
|
||||
], remove_with: '17.5', remove_after: '2024-09-19'
|
||||
ignore_columns %i[toggle_security_policy_custom_ci lock_toggle_security_policy_custom_ci], remove_with: '17.6', remove_after: '2024-10-17'
|
||||
ignore_column :runners_registration_token, remove_with: '17.7', remove_after: '2024-11-22'
|
||||
|
||||
INSTANCE_REVIEW_MIN_USERS = 50
|
||||
GRAFANA_URL_ERROR_MESSAGE = 'Please check your Grafana URL setting in ' \
|
||||
|
|
|
|||
|
|
@ -39,7 +39,9 @@ class ProjectFeature < ApplicationRecord
|
|||
metrics_dashboard: Gitlab::Access::REPORTER,
|
||||
container_registry: Gitlab::Access::REPORTER,
|
||||
package_registry: Gitlab::Access::REPORTER,
|
||||
environments: Gitlab::Access::REPORTER
|
||||
environments: Gitlab::Access::REPORTER,
|
||||
model_experiments: Gitlab::Access::REPORTER,
|
||||
model_registry: Gitlab::Access::REPORTER
|
||||
}.freeze
|
||||
PRIVATE_FEATURES_MIN_ACCESS_LEVEL_FOR_PRIVATE_PROJECT = { repository: Gitlab::Access::REPORTER }.freeze
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,9 @@ module VirtualRegistries
|
|||
length: { maximum: 255 }
|
||||
validates :file_final_path, length: { maximum: 1024 }
|
||||
validates :downloads_count, numericality: { greater_than: 0, only_integer: true }
|
||||
validates :relative_path, uniqueness: { scope: :upstream_id }, if: :upstream
|
||||
validates :relative_path,
|
||||
uniqueness: { scope: [:upstream_id, :status] },
|
||||
if: -> { upstream.present? && default? }
|
||||
validates :file, presence: true
|
||||
|
||||
mount_file_store_uploader ::VirtualRegistries::CachedResponseUploader
|
||||
|
|
@ -51,7 +53,11 @@ module VirtualRegistries
|
|||
# safe_find_or_create_by.
|
||||
# We are using the check existence and rescue alternative.
|
||||
def self.create_or_update_by!(upstream:, group_id:, relative_path:, updates: {})
|
||||
find_or_initialize_by(upstream: upstream, group_id: group_id, relative_path: relative_path).tap do |record|
|
||||
default.find_or_initialize_by(
|
||||
upstream: upstream,
|
||||
group_id: group_id,
|
||||
relative_path: relative_path
|
||||
).tap do |record|
|
||||
record.increment(:downloads_count) if record.persisted?
|
||||
record.update!(**updates)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1019,7 +1019,7 @@ class ProjectPolicy < BasePolicy
|
|||
enable :read_namespace_catalog
|
||||
end
|
||||
|
||||
rule { reporter & model_registry_enabled }.policy do
|
||||
rule { model_registry_enabled }.policy do
|
||||
enable :read_model_registry
|
||||
end
|
||||
|
||||
|
|
@ -1027,7 +1027,7 @@ class ProjectPolicy < BasePolicy
|
|||
enable :write_model_registry
|
||||
end
|
||||
|
||||
rule { reporter & model_experiments_enabled }.policy do
|
||||
rule { model_experiments_enabled }.policy do
|
||||
enable :read_model_experiments
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ module VirtualRegistries
|
|||
def cached_response
|
||||
# TODO change this to support multiple upstreams
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
|
||||
registry.upstream.cached_responses.find_by_relative_path(relative_path)
|
||||
registry.upstream.cached_responses.default.find_by_relative_path(relative_path)
|
||||
end
|
||||
strong_memoize_attr :cached_response
|
||||
|
||||
|
|
|
|||
|
|
@ -129,6 +129,11 @@ InitializerConnections.raise_if_new_database_connection do
|
|||
end
|
||||
end
|
||||
|
||||
# HTTP Router
|
||||
# Creating a black hole for /-/http_router/version since it is taken by the
|
||||
# cloudflare worker, see: https://gitlab.com/gitlab-org/cells/http-router/-/issues/47
|
||||
match '/http_router/version', to: proc { [204, {}, ['']] }, via: :all
|
||||
|
||||
# '/-/health' implemented by BasicHealthCheck middleware
|
||||
get 'liveness' => 'health#liveness'
|
||||
get 'readiness' => 'health#readiness'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
migration_job_name: BackfillPackagesComposerMetadataProjectId
|
||||
description: Backfills sharding key `packages_composer_metadata.project_id` from `packages_packages`.
|
||||
feature_category: package_registry
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/167444
|
||||
milestone: '17.5'
|
||||
queued_migration_version: 20240927123742
|
||||
finalize_after: '2024-10-22'
|
||||
finalized_by: # version of the migration that finalized this BBM
|
||||
|
|
@ -19,3 +19,4 @@ desired_sharding_key:
|
|||
table: packages_packages
|
||||
sharding_key: project_id
|
||||
belongs_to: package
|
||||
desired_sharding_key_migration_job_name: BackfillPackagesComposerMetadataProjectId
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class UpdateUniqueIndexOnVirtualRegistriesPackagesMavenCachedResponses < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
disable_ddl_transaction!
|
||||
|
||||
TABLE_NAME = :virtual_registries_packages_maven_cached_responses
|
||||
OLD_INDEX_NAME = 'idx_vregs_pkgs_mvn_cached_resp_on_uniq_upstrm_id_and_rel_path'
|
||||
NEW_INDEX_NAME = 'idx_vregs_pkgs_mvn_cached_resp_on_uniq_default_upt_id_relpath'
|
||||
ADDITIONAL_INDEX_NAME = 'idx_vregs_pkgs_mvn_cached_resp_on_upst_id_status_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index(
|
||||
TABLE_NAME,
|
||||
[:upstream_id, :relative_path],
|
||||
unique: true,
|
||||
name: NEW_INDEX_NAME,
|
||||
where: 'status = 0' # status: :default
|
||||
)
|
||||
add_concurrent_index(
|
||||
TABLE_NAME,
|
||||
[:upstream_id, :status, :id],
|
||||
name: ADDITIONAL_INDEX_NAME
|
||||
)
|
||||
remove_concurrent_index_by_name(TABLE_NAME, OLD_INDEX_NAME)
|
||||
end
|
||||
|
||||
def down
|
||||
add_concurrent_index(
|
||||
TABLE_NAME,
|
||||
[:upstream_id, :relative_path],
|
||||
unique: true,
|
||||
name: OLD_INDEX_NAME
|
||||
)
|
||||
remove_concurrent_index_by_name(TABLE_NAME, ADDITIONAL_INDEX_NAME)
|
||||
remove_concurrent_index_by_name(TABLE_NAME, NEW_INDEX_NAME)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddProjectIdToPackagesComposerMetadata < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
|
||||
def change
|
||||
add_column :packages_composer_metadata, :project_id, :bigint
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class IndexPackagesComposerMetadataOnProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
disable_ddl_transaction!
|
||||
|
||||
INDEX_NAME = 'index_packages_composer_metadata_on_project_id'
|
||||
|
||||
def up
|
||||
add_concurrent_index :packages_composer_metadata, :project_id, name: INDEX_NAME
|
||||
end
|
||||
|
||||
def down
|
||||
remove_concurrent_index_by_name :packages_composer_metadata, INDEX_NAME
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPackagesComposerMetadataProjectIdFk < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_concurrent_foreign_key :packages_composer_metadata, :projects, column: :project_id, on_delete: :cascade
|
||||
end
|
||||
|
||||
def down
|
||||
with_lock_retries do
|
||||
remove_foreign_key :packages_composer_metadata, column: :project_id
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPackagesComposerMetadataProjectIdTrigger < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
|
||||
def up
|
||||
install_sharding_key_assignment_trigger(
|
||||
table: :packages_composer_metadata,
|
||||
sharding_key: :project_id,
|
||||
parent_table: :packages_packages,
|
||||
parent_sharding_key: :project_id,
|
||||
foreign_key: :package_id
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
remove_sharding_key_assignment_trigger(
|
||||
table: :packages_composer_metadata,
|
||||
sharding_key: :project_id,
|
||||
parent_table: :packages_packages,
|
||||
parent_sharding_key: :project_id,
|
||||
foreign_key: :package_id
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class QueueBackfillPackagesComposerMetadataProjectId < Gitlab::Database::Migration[2.2]
|
||||
milestone '17.5'
|
||||
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
|
||||
|
||||
MIGRATION = "BackfillPackagesComposerMetadataProjectId"
|
||||
DELAY_INTERVAL = 2.minutes
|
||||
BATCH_SIZE = 1000
|
||||
SUB_BATCH_SIZE = 100
|
||||
|
||||
def up
|
||||
queue_batched_background_migration(
|
||||
MIGRATION,
|
||||
:packages_composer_metadata,
|
||||
:package_id,
|
||||
:project_id,
|
||||
:packages_packages,
|
||||
:project_id,
|
||||
:package_id,
|
||||
job_interval: DELAY_INTERVAL,
|
||||
batch_size: BATCH_SIZE,
|
||||
sub_batch_size: SUB_BATCH_SIZE
|
||||
)
|
||||
end
|
||||
|
||||
def down
|
||||
delete_batched_background_migration(
|
||||
MIGRATION,
|
||||
:packages_composer_metadata,
|
||||
:package_id,
|
||||
[
|
||||
:project_id,
|
||||
:packages_packages,
|
||||
:project_id,
|
||||
:package_id
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
86e827aea80b8cf9849cf6d361c525650f5af0ffb9d17bf3c070cf59ab5a7328
|
||||
|
|
@ -0,0 +1 @@
|
|||
72d7624f33ab95d79148a337a10f3f3029e4df9ee4fcb094570a21fb880a9539
|
||||
|
|
@ -0,0 +1 @@
|
|||
e131206896509320a8508c60ebe8b77951463456a36a616800f9f148c0fed12c
|
||||
|
|
@ -0,0 +1 @@
|
|||
f712aa4313ad05c4b48dfe22f5c48239dfe2cdf213f7600f2ab8941b71d5c333
|
||||
|
|
@ -0,0 +1 @@
|
|||
28ad8697e3aa45f9935f7abbd66dfd184ff85a1f4b5df5867b3ccc0077b3124b
|
||||
|
|
@ -0,0 +1 @@
|
|||
0d0e29413b0fae2c3cf2ceec33f392b08e20f41b52080778df431d688b5f38b3
|
||||
|
|
@ -2361,6 +2361,22 @@ RETURN NEW;
|
|||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_da5fd3d6d75c() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW."project_id" IS NULL THEN
|
||||
SELECT "project_id"
|
||||
INTO NEW."project_id"
|
||||
FROM "packages_packages"
|
||||
WHERE "packages_packages"."id" = NEW."package_id";
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE FUNCTION trigger_dadd660afe2c() RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
|
|
@ -15098,7 +15114,8 @@ CREATE TABLE packages_composer_metadata (
|
|||
package_id bigint NOT NULL,
|
||||
target_sha bytea NOT NULL,
|
||||
composer_json jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
version_cache_sha bytea
|
||||
version_cache_sha bytea,
|
||||
project_id bigint
|
||||
);
|
||||
|
||||
CREATE TABLE packages_conan_file_metadata (
|
||||
|
|
@ -27536,7 +27553,9 @@ CREATE INDEX idx_user_details_on_provisioned_by_group_id_user_id ON user_details
|
|||
|
||||
CREATE INDEX idx_vreg_pkgs_maven_cached_responses_on_relative_path_trigram ON virtual_registries_packages_maven_cached_responses USING gin (relative_path gin_trgm_ops);
|
||||
|
||||
CREATE UNIQUE INDEX idx_vregs_pkgs_mvn_cached_resp_on_uniq_upstrm_id_and_rel_path ON virtual_registries_packages_maven_cached_responses USING btree (upstream_id, relative_path);
|
||||
CREATE UNIQUE INDEX idx_vregs_pkgs_mvn_cached_resp_on_uniq_default_upt_id_relpath ON virtual_registries_packages_maven_cached_responses USING btree (upstream_id, relative_path) WHERE (status = 0);
|
||||
|
||||
CREATE INDEX idx_vregs_pkgs_mvn_cached_resp_on_upst_id_status_id ON virtual_registries_packages_maven_cached_responses USING btree (upstream_id, status, id);
|
||||
|
||||
CREATE INDEX idx_vuln_reads_for_filtering ON vulnerability_reads USING btree (project_id, state, dismissal_reason, severity DESC, vulnerability_id DESC NULLS LAST);
|
||||
|
||||
|
|
@ -30110,6 +30129,8 @@ CREATE UNIQUE INDEX index_packages_composer_cache_namespace_and_sha ON packages_
|
|||
|
||||
CREATE UNIQUE INDEX index_packages_composer_metadata_on_package_id_and_target_sha ON packages_composer_metadata USING btree (package_id, target_sha);
|
||||
|
||||
CREATE INDEX index_packages_composer_metadata_on_project_id ON packages_composer_metadata USING btree (project_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_packages_conan_file_metadata_on_package_file_id ON packages_conan_file_metadata USING btree (package_file_id);
|
||||
|
||||
CREATE UNIQUE INDEX index_packages_conan_metadata_on_package_id_username_channel ON packages_conan_metadata USING btree (package_id, package_username, package_channel);
|
||||
|
|
@ -33800,6 +33821,8 @@ CREATE TRIGGER trigger_d4487a75bd44 BEFORE INSERT OR UPDATE ON terraform_state_v
|
|||
|
||||
CREATE TRIGGER trigger_d5c895007948 BEFORE INSERT OR UPDATE ON protected_environment_approval_rules FOR EACH ROW EXECUTE FUNCTION trigger_d5c895007948();
|
||||
|
||||
CREATE TRIGGER trigger_da5fd3d6d75c BEFORE INSERT OR UPDATE ON packages_composer_metadata FOR EACH ROW EXECUTE FUNCTION trigger_da5fd3d6d75c();
|
||||
|
||||
CREATE TRIGGER trigger_dadd660afe2c BEFORE INSERT OR UPDATE ON packages_debian_group_distribution_keys FOR EACH ROW EXECUTE FUNCTION trigger_dadd660afe2c();
|
||||
|
||||
CREATE TRIGGER trigger_dbdd61a66a91 BEFORE INSERT OR UPDATE ON agent_activity_events FOR EACH ROW EXECUTE FUNCTION trigger_dbdd61a66a91();
|
||||
|
|
@ -35283,6 +35306,9 @@ ALTER TABLE ONLY user_preferences
|
|||
ALTER TABLE ONLY packages_debian_group_components
|
||||
ADD CONSTRAINT fk_e63e8ee3b1 FOREIGN KEY (group_id) REFERENCES namespaces(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY packages_composer_metadata
|
||||
ADD CONSTRAINT fk_e65180da68 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY approval_project_rules_protected_branches
|
||||
ADD CONSTRAINT fk_e6ee913fc2 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
|
||||
|
||||
|
|
|
|||
|
|
@ -414,7 +414,10 @@ repositories that have been forked and use GitLab [object pools](../repository_s
|
|||
You can restore specific repositories using the `REPOSITORIES_PATHS` and the `SKIP_REPOSITORIES_PATHS` options.
|
||||
Both options accept a comma-separated list of project and group paths. If you
|
||||
specify a group path, all repositories in all projects in the group and
|
||||
descendent groups are included or skipped, depending on which option you used. The project and group repositories must exist within the specified backup.
|
||||
descendent groups are included or skipped, depending on which option you used. The project and group repositories must exist in the specified backup.
|
||||
|
||||
NOTE:
|
||||
The `REPOSITORIES_PATHS` and `SKIP_REPOSITORIES_PATHS` options apply only to Git repositories. All projects and other data are restored regardless of these options.
|
||||
|
||||
For example, to restore all repositories for all projects in **Group A** (`group-a`), the repository for **Project C** in **Group B** (`group-b/project-c`),
|
||||
and skip the **Project D** in **Group A** (`group-a/project-d`):
|
||||
|
|
|
|||
|
|
@ -356,7 +356,7 @@ these steps:
|
|||
[requires a header prefix in the URL](https://plantuml.com/text-encoding)
|
||||
to distinguish different encoding types.
|
||||
|
||||
## Troubleshooting PlantUML configuration
|
||||
## Troubleshooting
|
||||
|
||||
### Rendered diagram URL remains the same after update
|
||||
|
||||
|
|
|
|||
|
|
@ -353,7 +353,7 @@ that cannot be accessed directly from the internet, keep the following in mind:
|
|||
[Atlassian IP addresses](https://support.atlassian.com/organization-administration/docs/ip-addresses-and-domains-for-atlassian-cloud-products/#Outgoing-Connections) only.
|
||||
- If you use a rewrite or subfilter with your proxy, ensure the proxy
|
||||
does not rewrite or replace the `gitlab-jira-connect-${host}` app key.
|
||||
Otherwise, you might get a [`Failed to link group`](jira_cloud_app_troubleshooting.md#failed-to-link-group) error.
|
||||
Otherwise, you might get a [`Failed to link group`](jira_cloud_app_troubleshooting.md#error-failed-to-link-group) error.
|
||||
|
||||
### External NGINX
|
||||
|
||||
|
|
|
|||
|
|
@ -132,7 +132,7 @@ Depending on how you installed the app, you might want to check the following:
|
|||
self-managed GitLab instance.
|
||||
- [Reinstall the app](jira_cloud_app.md#install-the-gitlab-for-jira-cloud-app-manually). This method might remove all [synced data](../../integration/jira/connect-app.md#gitlab-data-synced-to-jira) from the [Jira development panel](../../integration/jira/development_panel.md).
|
||||
|
||||
## `Failed to update the GitLab instance`
|
||||
## Error: `Failed to update the GitLab instance`
|
||||
|
||||
When you set up the GitLab for Jira Cloud app, you might get a `Failed to update the GitLab instance` error after you enter your self-managed instance URL.
|
||||
|
||||
|
|
@ -249,7 +249,7 @@ For the second log, you might have one of the following scenarios:
|
|||
- `json.exception.class` and `json.exception.message` are present.
|
||||
- `json.exception.class` and `json.exception.message` contain whether an issue occurred while contacting the self-managed instance.
|
||||
|
||||
## `Failed to link group`
|
||||
## Error: `Failed to link group`
|
||||
|
||||
When you link a group, you might get the following error:
|
||||
|
||||
|
|
@ -279,7 +279,7 @@ This error can be returned for multiple reasons.
|
|||
JiraConnectInstallation.delete_all
|
||||
```
|
||||
|
||||
## `Failed to load Jira Connect Application ID`
|
||||
## Error: `Failed to load Jira Connect Application ID`
|
||||
|
||||
When you sign in to the GitLab for Jira Cloud app after you point the app
|
||||
to your self-managed instance, you might get the following error:
|
||||
|
|
@ -305,7 +305,7 @@ To resolve this issue:
|
|||
1. If you [installed the app from the official Atlassian Marketplace listing](jira_cloud_app.md#install-the-gitlab-for-jira-cloud-app-from-the-atlassian-marketplace),
|
||||
ensure [**Jira Connect Proxy URL**](jira_cloud_app.md#set-up-your-instance) is set to `https://gitlab.com` with no trailing slash.
|
||||
|
||||
## `Missing required parameter: client_id`
|
||||
## Error: `Missing required parameter: client_id`
|
||||
|
||||
When you sign in to the GitLab for Jira Cloud app after you point the app
|
||||
to your self-managed instance, you might get the following error:
|
||||
|
|
|
|||
|
|
@ -535,11 +535,13 @@ Use lists to present information in a format that is easier to scan.
|
|||
|
||||
- Make all items in the list parallel.
|
||||
For example, do not start some items with nouns and others with verbs.
|
||||
- Do not use a period if the item is not a full sentence.
|
||||
- Use a period after every full sentence. Do not use semicolons or commas.
|
||||
- Give all items the same punctuation.
|
||||
- Start all items with a capital letter.
|
||||
- Separate the introductory phrase from explanatory text with a colon (`:`). For example:
|
||||
- Give all items the same punctuation.
|
||||
- Do not use a period if the item is not a full sentence.
|
||||
- Use a period after every full sentence.
|
||||
Do not use semicolons or commas.
|
||||
- Add a colon (`:`) after the introductory phrase.
|
||||
For example:
|
||||
|
||||
```markdown
|
||||
You can:
|
||||
|
|
@ -1288,17 +1290,22 @@ Do not:
|
|||
|
||||
When writing alt text:
|
||||
|
||||
- Write short, descriptive alt text in 155 characters or fewer. Screen readers
|
||||
typically stop reading after this amount.
|
||||
- If the image has complex information, like a workflow diagram, use a short alt text to identify the image and
|
||||
include detailed information in the text.
|
||||
- Use end punctuation.
|
||||
- Use sentence case and avoid using all-caps. Some screen readers read capitals as individual letters.
|
||||
- Don't use phrases like **Image of** or **Graphic of**.
|
||||
- Don't use a string of keywords. Include keywords in phrases or sentences to enhance context.
|
||||
- Introduce the image in the section text, not the alt text.
|
||||
- Try to avoid repeating content that you've already used in the section text.
|
||||
- Don't use inline styling, like bold, italics, or backticks. Screen readers read `**text**` as `star star text star star`.
|
||||
- Write short, descriptive alt text in 155 characters or fewer.
|
||||
Screen readers typically stop reading after this many characters.
|
||||
- If the image has complex information like a workflow diagram, use short alt text
|
||||
to identify the image and include detailed information in the text.
|
||||
- Use punctuation.
|
||||
- Do not use a period if the text is not a full sentence.
|
||||
- Use a period after every full sentence.
|
||||
- Use sentence case and avoid using all caps.
|
||||
Some screen readers read capitals as individual letters.
|
||||
- Do not use phrases like **Image of** or **Graphic of**.
|
||||
- Do not use a string of keywords.
|
||||
Include keywords in the text to enhance context.
|
||||
- Introduce the image in the topic, not the alt text.
|
||||
- Try to avoid repeating text you've already used in the topic.
|
||||
- Do not use inline styling like bold, italics, or backticks.
|
||||
Screen readers read `**text**` as `star star text star star`.
|
||||
|
||||
#### Automatic screenshot generator
|
||||
|
||||
|
|
|
|||
|
|
@ -461,7 +461,7 @@ Ensure you've read about [Elasticsearch Migrations](../advanced_search/elasticse
|
|||
|
||||
If there is a halted migration and your [`elasticsearch.log`](../../administration/logs/index.md#elasticsearchlog) file contain errors, this could potentially be a bug/issue. Escalate to GitLab support if retrying migrations does not succeed.
|
||||
|
||||
## `Can't specify parent if no parent field has been configured` error
|
||||
## Error: `Can't specify parent if no parent field has been configured`
|
||||
|
||||
If you enabled Elasticsearch before GitLab 8.12 and have not rebuilt indices, you get
|
||||
exceptions in lots of different cases:
|
||||
|
|
@ -483,12 +483,12 @@ Elasticsearch::Transport::Transport::Errors::BadRequest([400] {
|
|||
This is because we changed the index mapping in GitLab 8.12 and the old indices should be removed and built from scratch again,
|
||||
see details in the [update guide](../../update/upgrading_from_source.md).
|
||||
|
||||
## `Elasticsearch::Transport::Transport::Errors::BadRequest`
|
||||
## Error: `Elasticsearch::Transport::Transport::Errors::BadRequest`
|
||||
|
||||
If you have this exception (just like in the case above but the actual message is different), check that you have the correct Elasticsearch version and you met the other [requirements](elasticsearch.md#system-requirements).
|
||||
There is also an easy way to check it automatically with `sudo gitlab-rake gitlab:check` command.
|
||||
|
||||
## `Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge`
|
||||
## Error: `Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge`
|
||||
|
||||
```plaintext
|
||||
[413] {"Message":"Request size exceeded 10485760 bytes"}
|
||||
|
|
@ -498,9 +498,11 @@ This exception is seen when your Elasticsearch cluster is configured to reject r
|
|||
|
||||
AWS has [network limits](https://docs.aws.amazon.com/opensearch-service/latest/developerguide/limits.html#network-limits) on the maximum size of HTTP request payloads based on the size of the underlying instance. Set the maximum bulk request size to a value lower than 10 MiB.
|
||||
|
||||
## `Faraday::TimeoutError (execution expired)` error when using a proxy
|
||||
## Error: `Faraday::TimeoutError (execution expired)`
|
||||
|
||||
Set a custom `gitlab_rails['env']` environment variable, called [`no_proxy`](https://docs.gitlab.com/omnibus/settings/environment-variables.html) with the IP address of your Elasticsearch host.
|
||||
When you use a proxy, set a custom `gitlab_rails['env']` environment variable
|
||||
named [`no_proxy`](https://docs.gitlab.com/omnibus/settings/environment-variables.html)
|
||||
with the IP address of your Elasticsearch host.
|
||||
|
||||
## My single node Elasticsearch cluster status never goes from `yellow` to `green`
|
||||
|
||||
|
|
@ -520,7 +522,7 @@ curl --request PUT localhost:9200/gitlab-production/_settings --header 'Content-
|
|||
}'
|
||||
```
|
||||
|
||||
## `health check timeout: no Elasticsearch node available` error in Sidekiq
|
||||
## Error: `health check timeout: no Elasticsearch node available`
|
||||
|
||||
If you're getting a `health check timeout: no Elasticsearch node available` error in Sidekiq during the indexing process:
|
||||
|
||||
|
|
@ -593,7 +595,7 @@ $ jq '.class' sidekiq/current | sort | uniq -c | sort -nr
|
|||
In this case, `free -m` on the overloaded GitLab node would also show
|
||||
unexpectedly high `buff/cache` usage.
|
||||
|
||||
## `Couldn't load task status` error when reindexing
|
||||
## Error: `Couldn't load task status`
|
||||
|
||||
When you reindex, you might get a `Couldn't load task status` error. A `sliceId must be greater than 0 but was [-1]` error might also appear on the Elasticsearch host. As a workaround, consider [reindexing from scratch](../../integration/advanced_search/elasticsearch_troubleshooting.md#last-resort-to-recreate-an-index) or upgrading to GitLab 16.3.
|
||||
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ You can find information in:
|
|||
- `json.exception.message`
|
||||
- `json.message`
|
||||
|
||||
### `Test Failed. Save Anyway` error
|
||||
### Error: `Test Failed. Save Anyway`
|
||||
|
||||
When you configure an integration on an uninitialized repository, the integration might fail with
|
||||
a `Test Failed. Save Anyway` error. This error occurs because the integration uses push data
|
||||
|
|
|
|||
|
|
@ -171,9 +171,9 @@ If you cannot [provide GitLab with your Jenkins server URL and authentication in
|
|||
|
||||
## Troubleshooting
|
||||
|
||||
### Error during GitLab configuration - "Connection failed. Please check your settings"
|
||||
### Error: `Connection failed. Please check your settings`
|
||||
|
||||
While configuring GitLab, you might get an error that states "Connection failed. Please check your settings".
|
||||
When you configure GitLab, you might get an error that states `Connection failed. Please check your settings`.
|
||||
|
||||
This issue has multiple possible causes and solutions:
|
||||
|
||||
|
|
@ -184,7 +184,7 @@ This issue has multiple possible causes and solutions:
|
|||
| The credentials for the Jenkins instance do not have sufficient access or are invalid.| Grant the credentials sufficient access or create valid credentials. |
|
||||
|The **Enable authentication for `/project` end-point** checkbox is not selected in your [Jenkins plugin configuration](#configure-the-jenkins-server)| Select the checkbox. |
|
||||
|
||||
### Error in merge requests - "Could not connect to the CI server"
|
||||
### Error: `Could not connect to the CI server`
|
||||
|
||||
You might get an error that states `Could not connect to the CI server` in a merge
|
||||
request if GitLab did not receive a build status update from Jenkins through the
|
||||
|
|
|
|||
|
|
@ -192,7 +192,7 @@ When working with the GitLab for Jira Cloud app, you might encounter the followi
|
|||
|
||||
For administrator documentation, see [GitLab for Jira Cloud app administration](../../administration/settings/jira_cloud_app_troubleshooting.md).
|
||||
|
||||
### Error when connecting the app
|
||||
### Error: `Failed to link group`
|
||||
|
||||
When you connect the GitLab for Jira Cloud app, you might get this error:
|
||||
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ Potential resolutions:
|
|||
[GitLab account configuration](https://confluence.atlassian.com/adminjiraserver/linking-gitlab-accounts-1027142272.html#LinkingGitLabaccounts-InGitLab). Review
|
||||
the **Scopes** field and ensure the `api` checkbox is selected.
|
||||
|
||||
## `410 Gone` when connecting to Jira
|
||||
## Error: `410 Gone`
|
||||
|
||||
When you connect to Jira and synchronize repositories, you might get a `410 Gone` error.
|
||||
This issue occurs when you use the Jira DVCS connector and your integration is configured to use **GitHub Enterprise**.
|
||||
|
|
@ -111,9 +111,9 @@ resynchronize the information:
|
|||
For more information, see the
|
||||
[Atlassian documentation](https://support.atlassian.com/jira-cloud-administration/docs/integrate-with-development-tools/).
|
||||
|
||||
## `Sync Failed` when refreshing repository data
|
||||
## Error: `Sync Failed`
|
||||
|
||||
If you get a `Sync Failed` error in Jira when [refreshing repository data](index.md#refresh-data-imported-to-jira) for specific projects, check your Jira DVCS connector logs. Look for errors that occur when executing requests to API resources in GitLab. For example:
|
||||
If you get a `Sync Failed` error in Jira when you [refresh repository data](index.md#refresh-data-imported-to-jira) for specific projects, check your Jira DVCS connector logs. Look for errors that occur when executing requests to API resources in GitLab. For example:
|
||||
|
||||
```plaintext
|
||||
Failed to execute request [https://gitlab.com/api/v4/projects/:id/merge_requests?page=1&per_page=100 GET https://gitlab.com/api/v4/projects/:id/merge_requests?page=1&per_page=100 returned a response status of 403 Forbidden] errors:
|
||||
|
|
|
|||
|
|
@ -130,9 +130,9 @@ For more information, see [issue 341571](https://gitlab.com/gitlab-org/gitlab/-/
|
|||
|
||||
To resolve this issue, disable and then re-enable the integration.
|
||||
|
||||
## `certificate verify failed` when testing the integration settings
|
||||
## Error: `certificate verify failed`
|
||||
|
||||
When testing the Jira issue integration settings, you might get the following error:
|
||||
When you test the Jira issue integration settings, you might get the following error:
|
||||
|
||||
```plaintext
|
||||
Connection failed. Check your integration settings. SSL_connect returned=1 errno=0 peeraddr=<jira.example.com> state=error: certificate verify failed (unable to get local issuer certificate)
|
||||
|
|
@ -253,9 +253,9 @@ end
|
|||
|
||||
When [viewing Jira issues](configure.md#view-jira-issues) in GitLab, you might encounter the following issues.
|
||||
|
||||
### `500 We're sorry` when accessing a Jira issue in GitLab
|
||||
### Error: `500 We're sorry`
|
||||
|
||||
When accessing a Jira issue in GitLab, you might get a `500 We're sorry. Something went wrong on our end` error.
|
||||
When you access a Jira issue in GitLab, you might get a `500 We're sorry. Something went wrong on our end` error.
|
||||
Check [`production.log`](../../administration/logs/index.md#productionlog) to see if the file contains the following exception:
|
||||
|
||||
```plaintext
|
||||
|
|
@ -264,7 +264,7 @@ Check [`production.log`](../../administration/logs/index.md#productionlog) to se
|
|||
|
||||
If that's the case, ensure the [**Due date** field is visible for issues](https://confluence.atlassian.com/jirakb/due-date-field-is-missing-189431917.html) in the integrated Jira project.
|
||||
|
||||
### Error when requesting data from Jira
|
||||
### Error: `An error occurred while requesting data from Jira`
|
||||
|
||||
When you try to view the Jira issue list or create a Jira issue in GitLab, you might get one of the following errors:
|
||||
|
||||
|
|
@ -289,7 +289,7 @@ Your Jira project key must not have [restricted words and characters](https://co
|
|||
|
||||
When you try to view the Jira issue list in GitLab, you might see one of the following errors.
|
||||
|
||||
#### Error: `The value '<project>' does not exist for the field 'project'.`
|
||||
#### Error: `The value '<project>' does not exist for the field 'project'`
|
||||
|
||||
If you use the wrong authentication credentials for your Jira installation, you might see this error:
|
||||
|
||||
|
|
@ -310,7 +310,7 @@ For more information, see [Jira issue integration](configure.md).
|
|||
|
||||
To resolve this issue, update the authentication credentials to match your Jira installation.
|
||||
|
||||
#### Error: `The credentials for accessing Jira are not allowed to access the data.`
|
||||
#### Error: `The credentials for accessing Jira are not allowed to access the data`
|
||||
|
||||
If your Jira credentials cannot access the Jira project key you specified in the
|
||||
[Jira issue integration](configure.md#configure-the-integration), you might see this error:
|
||||
|
|
|
|||
|
|
@ -103,10 +103,24 @@ Secret push protection does not check a file in a commit when:
|
|||
|
||||
- The file is a binary file.
|
||||
- The file is larger than 1 MiB.
|
||||
- The diff patch for the file is larger than 1 MiB (when using _[diff scanning](#diff-scanning)_).
|
||||
- The file was renamed, deleted, or moved without changes to the content.
|
||||
- The content of the file is identical to the content of another file in the source code.
|
||||
- The file is contained in the initial push that created the repository.
|
||||
|
||||
## Diff scanning
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/469161) in GitLab 17.5 [with a project-level flag](../../../../administration/feature_flags.md) named `spp_scan_diffs`.
|
||||
|
||||
Secret Push Protection scans all contents of modified files by default.
|
||||
This can cause a [push to be blocked unexpectedly](#push-blocked-unexpectedly) when a file containing a secret is scanned.
|
||||
You can enable the `spp_scan_diffs` [feature flag](../../../../administration/feature_flags.md) for your project,
|
||||
which modifies Secret Push Protection to only scan newly committed changes (or diffs), and not the rest of the file.
|
||||
|
||||
When `spp_scan_diffs` is enabled, Secret Push Protection scans the diffs for CLI-based pushes via HTTP/SSH.
|
||||
Changes committed via the WebIDE still result in the entire file being scanned due to a technical limitation.
|
||||
[Issue 491282](https://gitlab.com/gitlab-org/gitlab/-/issues/491282) addresses the limitation so only the diffs are scanned for WebIDE changes.
|
||||
|
||||
## Resolve a blocked push
|
||||
|
||||
When secret push protection blocks a push, you can either:
|
||||
|
|
@ -196,12 +210,11 @@ When working with secret push protection, you may encounter the following situat
|
|||
|
||||
### Push blocked unexpectedly
|
||||
|
||||
Secret Push Protection scans all contents of modified files. This can cause a push to be
|
||||
unexpectedly blocked if a modified file contains a secret, even if the secret is not part of the diff.
|
||||
Secret Push Protection scans all contents of modified files. This can cause a push to be unexpectedly blocked
|
||||
if a modified file contains a secret, even if the secret is not part of the diff.
|
||||
|
||||
To push a change to a file that contains a secret, you need to [skip secret push protection](#skip-secret-push-protection).
|
||||
|
||||
[Issue 469161](https://gitlab.com/gitlab-org/gitlab/-/issues/469161) proposes to change the scanning logic to scan only diffs.
|
||||
[Enable the `spp_scan_diffs` feature flag](#diff-scanning) to ensure that only newly committed changes are scanned.
|
||||
To push a WebIDE change to a file that contains a secret, you need to [skip secret push protection](#skip-secret-push-protection).
|
||||
|
||||
### File was not scanned
|
||||
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@ When working with the GitGuardian integration, you might encounter the following
|
|||
|
||||
### `500` HTTP errors
|
||||
|
||||
You might get a HTTP `500` error.
|
||||
You might get an HTTP `500` error.
|
||||
|
||||
This issue occurs for when requests time out for commits with a lot of changed files.
|
||||
|
||||
|
|
@ -107,9 +107,9 @@ If this happens with a commit with more than 50 files changed,
|
|||
the workaround is to break down your changes into smaller commits and push
|
||||
them one by one.
|
||||
|
||||
### `Filename: ensure this value has at most 256 characters`
|
||||
### Error: `Filename: ensure this value has at most 256 characters`
|
||||
|
||||
You might get a HTTP `400` error that states `Filename: ensure this value has at most 256 characters`.
|
||||
You might get an HTTP `400` error that states `Filename: ensure this value has at most 256 characters`.
|
||||
|
||||
This issue occurs when some of the changed files you are pushing in that commit have the filename (not the path) longer then 256 characters.
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ For administrator documentation, see [GitLab for Slack app administration](../..
|
|||
|
||||
The GitLab for Slack app might not appear in the list of integrations. To have the GitLab for Slack app on your self-managed instance, an administrator must [enable the integration](../../../administration/settings/slack_app.md). On GitLab.com, the GitLab for Slack app is available by default.
|
||||
|
||||
## `Project or alias not found`
|
||||
## Error: `Project or alias not found`
|
||||
|
||||
Some Slack commands must have a project full path or alias and fail with the following error
|
||||
if the project cannot be found:
|
||||
|
|
@ -50,7 +50,7 @@ If you're not receiving notifications to a Slack channel, ensure:
|
|||
|
||||
If the [App Home](https://api.slack.com/start/overview#app_home) does not display properly, ensure your [app is up to date](gitlab_slack_application.md#reinstall-the-gitlab-for-slack-app).
|
||||
|
||||
## Error: `Validation failed: Alias This alias has already been taken`
|
||||
## Error: `This alias has already been taken`
|
||||
|
||||
You might encounter error `422: The change you requested was rejected` when trying to set up on a new project. The returned Rails error might be:
|
||||
|
||||
|
|
|
|||
|
|
@ -100,13 +100,13 @@ If your Slack integration is not working, start troubleshooting by
|
|||
searching through the [Sidekiq logs](../../../administration/logs/index.md#sidekiqlog)
|
||||
for errors relating to your Slack service.
|
||||
|
||||
### Something went wrong on our end
|
||||
### Error: `Something went wrong on our end`
|
||||
|
||||
You might get this generic error message in the GitLab UI.
|
||||
Review [the logs](../../../administration/logs/index.md#productionlog) to find
|
||||
the error message and keep troubleshooting from there.
|
||||
|
||||
### `certificate verify failed`
|
||||
### Error: `certificate verify failed`
|
||||
|
||||
You might see an entry like the following in your Sidekiq log:
|
||||
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ USER gitlab-workspaces
|
|||
|
||||
When working with workspaces, you might encounter the following issues.
|
||||
|
||||
### `Failed to renew lease` when creating a workspace
|
||||
### Error: `Failed to renew lease`
|
||||
|
||||
You might not be able to create a workspace due to a known issue in the GitLab agent for Kubernetes.
|
||||
The following error message might appear in the agent's log:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# Gitlab::SecretDetection
|
||||
|
||||
The gitlab-secret_detection gem performs keyword and regex matching on git blobs that may include secrets. The gem accepts one or more git blobs, matches them against a defined ruleset of regular expressions, and returns scan results.
|
||||
The gitlab-secret_detection gem performs keyword and regex matching on input payloads that may include secrets. The gem accepts one or more payloads, matches them against a defined ruleset of regular expressions, and returns scan results.
|
||||
|
||||
##### Scan parameters
|
||||
|
||||
|
|
@ -10,10 +10,10 @@ accepts the following parameters:
|
|||
|
||||
| Parameter | Type | Required | Default | Description |
|
||||
|----------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `blobs` | Array | Yes | NA | Array of blobs with each blob to have `id` and `data` properties. `id` represents the uniqueness of the blob in the given array and `data` is the content of the blob to scan. |
|
||||
| `payloads` | Array | Yes | NA | Array of input payloads. Each payload has attributes: `id`, and data`. |
|
||||
| `timeout` | Number | No | [`60s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L22) | The maximum duration allowed for the scan to run on a commit request comprising multiple blobs. If the specified timeout elapses, the scan is automatically terminated. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. |
|
||||
| `blob_timeout` | Number | No | [`5s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L24) | The maximum duration allowed for the scan to run on an individual blob. Upon expiration of the specified timeout, the scan is interrupted for the current blob and advances to the next blob in the request. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. |
|
||||
| `subprocess` | Boolean | No | [`true`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L34) | Runs the scan operation within a subprocess rather than the main process. This design aims to mitigate memory overconsumption issues that may arise from scanning multiple large blobs within a single subprocess. Check [here](https://docs.gitlab.com/ee/architecture/blueprints/secret_detection/decisions/002_run_scan_within_subprocess.html) for more details. |
|
||||
| `payload_timeout` | Number | No | [`5s`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L24) | The maximum duration allowed for the scan to run on an individual payload. Upon expiration of the specified timeout, the scan is interrupted for the current payload and advances to the next payload in the request. The timeout duration is specified in seconds but can also accept floating-point values to denote smaller units. For instance, use `0.5` to represent `500ms`. |
|
||||
| `subprocess` | Boolean | No | [`true`](https://gitlab.com/gitlab-org/gitlab/-/blob/5dfcf7431bfff25519c05a7e66c0cbb8d7b362be/gems/gitlab-secret_detection/lib/gitlab/secret_detection/scan.rb#L34) | Runs the scan operation within a subprocess rather than the main process. This design aims to mitigate memory overconsumption issues that may arise from scanning multiple large paylaods within a single subprocess. Check [here](https://docs.gitlab.com/ee/architecture/blueprints/secret_detection/decisions/002_run_scan_within_subprocess.html) for more details. |
|
||||
|
||||
##### Scan Constraints
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ require_relative 'secret_detection/status'
|
|||
require_relative 'secret_detection/finding'
|
||||
require_relative 'secret_detection/response'
|
||||
require_relative 'secret_detection/scan'
|
||||
require_relative 'secret_detection/scan_diffs'
|
||||
|
||||
module Gitlab
|
||||
module SecretDetection
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ module Gitlab
|
|||
rescue Timeout::Error => e
|
||||
logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}"
|
||||
SecretDetection::Finding.new(blob.id,
|
||||
SecretDetection::Status::BLOB_TIMEOUT)
|
||||
SecretDetection::Status::PAYLOAD_TIMEOUT)
|
||||
end
|
||||
|
||||
found_secrets.freeze
|
||||
|
|
@ -187,7 +187,7 @@ module Gitlab
|
|||
rescue Timeout::Error => e
|
||||
logger.error "Secret Detection scan timed out on the blob(id:#{blob.id}): #{e}"
|
||||
SecretDetection::Finding.new(blob.id,
|
||||
SecretDetection::Status::BLOB_TIMEOUT)
|
||||
SecretDetection::Status::PAYLOAD_TIMEOUT)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -238,7 +238,7 @@ module Gitlab
|
|||
def overall_scan_status(found_secrets)
|
||||
return SecretDetection::Status::NOT_FOUND if found_secrets.empty?
|
||||
|
||||
timed_out_blobs = found_secrets.count { |el| el.status == SecretDetection::Status::BLOB_TIMEOUT }
|
||||
timed_out_blobs = found_secrets.count { |el| el.status == SecretDetection::Status::PAYLOAD_TIMEOUT }
|
||||
|
||||
case timed_out_blobs
|
||||
when 0
|
||||
|
|
|
|||
|
|
@ -0,0 +1,317 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'toml-rb'
|
||||
require 're2'
|
||||
require 'logger'
|
||||
require 'timeout'
|
||||
require 'parallel'
|
||||
|
||||
module Gitlab
|
||||
module SecretDetection
|
||||
# Scan is responsible for running Secret Detection scan operation
|
||||
class ScanDiffs
|
||||
# RulesetParseError is thrown when the code fails to parse the
|
||||
# ruleset file from the given path
|
||||
RulesetParseError = Class.new(StandardError)
|
||||
|
||||
# RulesetCompilationError is thrown when the code fails to compile
|
||||
# the predefined rulesets
|
||||
RulesetCompilationError = Class.new(StandardError)
|
||||
|
||||
# default time limit(in seconds) for running the scan operation per invocation
|
||||
DEFAULT_SCAN_TIMEOUT_SECS = 60
|
||||
# default time limit(in seconds) for running the scan operation on a single diff
|
||||
DEFAULT_PAYLOAD_TIMEOUT_SECS = 5
|
||||
# file path where the secrets ruleset file is located
|
||||
RULESET_FILE_PATH = File.expand_path('../../gitleaks.toml', __dir__)
|
||||
# Max no of child processes to spawn per request
|
||||
# ref: https://gitlab.com/gitlab-org/gitlab/-/issues/430160
|
||||
MAX_PROCS_PER_REQUEST = 5
|
||||
# Minimum cumulative size of the diffs required to spawn and
|
||||
# run the scan within a new subprocess.
|
||||
MIN_CHUNK_SIZE_PER_PROC_BYTES = 2_097_152 # 2MiB
|
||||
# Whether to run scan in subprocesses or not. Default is true.
|
||||
RUN_IN_SUBPROCESS = true
|
||||
|
||||
# Initializes the instance with logger along with following operations:
|
||||
# 1. Parse ruleset for the given +ruleset_path+(default: +RULESET_FILE_PATH+). Raises +RulesetParseError+
|
||||
# in case the operation fails.
|
||||
# 2. Extract keywords from the parsed ruleset to use it for matching keywords before regex operation.
|
||||
# 3. Build and Compile rule regex patterns obtained from the ruleset. Raises +RulesetCompilationError+
|
||||
# in case the compilation fails.
|
||||
def initialize(logger: Logger.new($stdout), ruleset_path: RULESET_FILE_PATH)
|
||||
@logger = logger
|
||||
@rules = parse_ruleset(ruleset_path)
|
||||
@keywords = create_keywords(rules)
|
||||
@pattern_matcher = build_pattern_matcher(rules)
|
||||
end
|
||||
|
||||
# Runs Secret Detection scan on the list of given diffs. Both the total scan duration and
|
||||
# the duration for each diff is time bound via +timeout+ and +payload_timeout+ respectively.
|
||||
#
|
||||
# +diffs+:: Array of diffs between diff pairs. Each diff has attributes: left_blob_id, right_blob_id,
|
||||
# patch, status, binary, and over_patch_bytes_limit.
|
||||
# +timeout+:: No of seconds(accepts floating point for smaller time values) to limit the total scan duration
|
||||
# +payload_timeout+:: No of seconds(accepts floating point for smaller time values) to limit
|
||||
# the scan duration on each diff
|
||||
# +subprocess+:: If passed true, the scan is performed within subprocess instead of main process.
|
||||
# To avoid over-consuming memory by running scan on multiple large diffs within a single subprocess,
|
||||
# it instead groups the diffs into smaller array where each array contains diffs with cumulative size of
|
||||
# +MIN_CHUNK_SIZE_PER_PROC_BYTES+ bytes and each group runs in a separate sub-process. Default value
|
||||
# is true.
|
||||
#
|
||||
# NOTE:
|
||||
# Running the scan in fork mode primarily focuses on reducing the memory consumption of the scan by
|
||||
# offloading regex operations on large diffs to sub-processes. However, it does not assure the improvement
|
||||
# in the overall latency of the scan, specifically in the case of smaller diff sizes, where the overhead of
|
||||
# forking a new process adds to the overall latency of the scan instead. More reference on Subprocess-based
|
||||
# execution is found here: https://gitlab.com/gitlab-org/gitlab/-/issues/430160.
|
||||
#
|
||||
# Returns an instance of SecretDetection::Response by following below structure:
|
||||
# {
|
||||
# status: One of the SecretDetection::Status values
|
||||
# results: [SecretDetection::Finding]
|
||||
# }
|
||||
#
|
||||
def secrets_scan(
|
||||
diffs,
|
||||
timeout: DEFAULT_SCAN_TIMEOUT_SECS,
|
||||
payload_timeout: DEFAULT_PAYLOAD_TIMEOUT_SECS,
|
||||
subprocess: RUN_IN_SUBPROCESS
|
||||
)
|
||||
|
||||
return SecretDetection::Response.new(SecretDetection::Status::INPUT_ERROR) unless validate_scan_input(diffs)
|
||||
|
||||
Timeout.timeout(timeout) do
|
||||
matched_diffs = filter_by_keywords(diffs)
|
||||
|
||||
next SecretDetection::Response.new(SecretDetection::Status::NOT_FOUND) if matched_diffs.empty?
|
||||
|
||||
secrets =
|
||||
if subprocess
|
||||
run_scan_within_subprocess(matched_diffs, payload_timeout)
|
||||
else
|
||||
run_scan(matched_diffs, payload_timeout)
|
||||
end
|
||||
|
||||
scan_status = overall_scan_status(secrets)
|
||||
|
||||
SecretDetection::Response.new(scan_status, secrets)
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
logger.error "Secret detection operation timed out: #{e}"
|
||||
|
||||
SecretDetection::Response.new(SecretDetection::Status::SCAN_TIMEOUT)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :logger, :rules, :keywords, :pattern_matcher
|
||||
|
||||
# parses given ruleset file and returns the parsed rules
|
||||
def parse_ruleset(ruleset_file_path)
|
||||
rules_data = TomlRB.load_file(ruleset_file_path)
|
||||
rules_data['rules']
|
||||
rescue StandardError => e
|
||||
logger.error "Failed to parse secret detection ruleset from '#{ruleset_file_path}' path: #{e}"
|
||||
|
||||
raise RulesetParseError
|
||||
end
|
||||
|
||||
# builds RE2::Set pattern matcher for the given rules
|
||||
def build_pattern_matcher(rules)
|
||||
matcher = RE2::Set.new
|
||||
|
||||
rules.each do |rule|
|
||||
matcher.add(rule["regex"])
|
||||
end
|
||||
|
||||
unless matcher.compile
|
||||
logger.error "Failed to compile secret detection rulesets in RE::Set"
|
||||
|
||||
raise RulesetCompilationError
|
||||
end
|
||||
|
||||
matcher
|
||||
end
|
||||
|
||||
# creates and returns the unique set of rule matching keywords
|
||||
def create_keywords(rules)
|
||||
secrets_keywords = []
|
||||
|
||||
rules.each do |rule|
|
||||
secrets_keywords << rule["keywords"]
|
||||
end
|
||||
|
||||
secrets_keywords.flatten.compact.to_set
|
||||
end
|
||||
|
||||
# returns only those diffs that contain at least one of the keywords
|
||||
# from the keywords list
|
||||
def filter_by_keywords(diffs)
|
||||
matched_diffs = []
|
||||
|
||||
diffs.each do |diff|
|
||||
matched_diffs << diff if keywords.any? { |keyword| diff.patch.include?(keyword) }
|
||||
end
|
||||
|
||||
matched_diffs.freeze
|
||||
end
|
||||
|
||||
def run_scan(diffs, payload_timeout)
|
||||
found_secrets = diffs.flat_map do |diff|
|
||||
Timeout.timeout(payload_timeout) do
|
||||
find_secrets(diff)
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
logger.error "Secret Detection scan timed out on the diff(id:#{diff.right_blob_id}): #{e}"
|
||||
SecretDetection::Finding.new(diff.right_blob_id,
|
||||
SecretDetection::Status::PAYLOAD_TIMEOUT)
|
||||
end
|
||||
|
||||
found_secrets.freeze
|
||||
end
|
||||
|
||||
def run_scan_within_subprocess(diffs, payload_timeout)
|
||||
diff_sizes = diffs.map { |diff| diff.patch.bytesize }
|
||||
grouped_diff_indicies = group_by_chunk_size(diff_sizes)
|
||||
|
||||
grouped_diffs = grouped_diff_indicies.map { |idx_arr| idx_arr.map { |i| diffs[i] } }
|
||||
|
||||
found_secrets = Parallel.flat_map(
|
||||
grouped_diffs,
|
||||
in_processes: MAX_PROCS_PER_REQUEST,
|
||||
isolation: true # do not reuse sub-processes
|
||||
) do |grouped_diff|
|
||||
grouped_diff.flat_map do |diff|
|
||||
Timeout.timeout(payload_timeout) do
|
||||
find_secrets(diff)
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
logger.error "Secret Detection scan timed out on the diff(id:#{diff.right_blob_id}): #{e}"
|
||||
SecretDetection::Finding.new(diff.right_blob_id,
|
||||
SecretDetection::Status::PAYLOAD_TIMEOUT)
|
||||
end
|
||||
end
|
||||
|
||||
found_secrets.freeze
|
||||
end
|
||||
|
||||
# finds secrets in the given diff with a timeout circuit breaker
|
||||
def find_secrets(diff)
|
||||
line_number_offset = 0
|
||||
secrets = []
|
||||
|
||||
# The following section parses the diff patch.
|
||||
#
|
||||
# If the line starts with @@, it is the hunk header, used to calculate the line number.
|
||||
# If the line starts with +, it is newly added in this diff, and we
|
||||
# scan the line for newly added secrets. Also increment line number.
|
||||
# If the line starts with -, it is removed in this diff, do not increment line number.
|
||||
# If the line starts with \\, it is the no newline marker, do not increment line number.
|
||||
# If the line starts with a space character, it is a context line, just increment the line number.
|
||||
#
|
||||
# A context line that starts with an important character would still be treated
|
||||
# like a context line, as shown below:
|
||||
# @@ -1,5 +1,5 @@
|
||||
# context line
|
||||
# -removed line
|
||||
# +added line
|
||||
# @@this context line has a @@ but starts with a space so isnt a header
|
||||
# +this context line has a + but starts with a space so isnt an addition
|
||||
# -this context line has a - but starts with a space so isnt a removal
|
||||
diff.patch.each_line do |line|
|
||||
# Parse hunk header for start line
|
||||
if line.start_with?("@@")
|
||||
hunk_info = line.match(/@@ -\d+(,\d+)? \+(\d+)(,\d+)? @@/)
|
||||
start_line = hunk_info[2].to_i
|
||||
line_number_offset = start_line - 1
|
||||
# Line added in this commit
|
||||
elsif line.start_with?('+')
|
||||
line_number_offset += 1
|
||||
# Remove leading +
|
||||
line_content = line[1..]
|
||||
|
||||
patterns = pattern_matcher.match(line_content, exception: false)
|
||||
next unless patterns.any?
|
||||
|
||||
patterns.each do |pattern|
|
||||
type = rules[pattern]["id"]
|
||||
description = rules[pattern]["description"]
|
||||
|
||||
secrets << SecretDetection::Finding.new(
|
||||
diff.right_blob_id,
|
||||
SecretDetection::Status::FOUND,
|
||||
line_number_offset,
|
||||
type,
|
||||
description
|
||||
)
|
||||
end
|
||||
# Line not added in this commit, just increment line number
|
||||
elsif line.start_with?(' ')
|
||||
line_number_offset += 1
|
||||
# Line removed in this commit or no newline marker, do not increment line number
|
||||
elsif line.start_with?('-', '\\')
|
||||
# No increment
|
||||
end
|
||||
end
|
||||
|
||||
secrets
|
||||
rescue StandardError => e
|
||||
logger.error "Secret Detection scan failed on the diff(id:#{diff.right_blob_id}): #{e}"
|
||||
|
||||
SecretDetection::Finding.new(diff.right_blob_id, SecretDetection::Status::SCAN_ERROR)
|
||||
end
|
||||
|
||||
def validate_scan_input(diffs)
|
||||
return false if diffs.nil? || !diffs.instance_of?(Array)
|
||||
|
||||
diffs.each { |diff| diff.patch.freeze }
|
||||
end
|
||||
|
||||
def overall_scan_status(found_secrets)
|
||||
return SecretDetection::Status::NOT_FOUND if found_secrets.empty?
|
||||
|
||||
timed_out_diffs = found_secrets.count { |el| el.status == SecretDetection::Status::PAYLOAD_TIMEOUT }
|
||||
|
||||
case timed_out_diffs
|
||||
when 0
|
||||
SecretDetection::Status::FOUND
|
||||
when found_secrets.length
|
||||
SecretDetection::Status::SCAN_TIMEOUT
|
||||
else
|
||||
SecretDetection::Status::FOUND_WITH_ERRORS
|
||||
end
|
||||
end
|
||||
|
||||
# This method accepts an array of diff sizes(in bytes) and groups them into an array
|
||||
# of arrays structure where each element is the group of indicies of the input
|
||||
# array whose cumulative diff sizes has at least +MIN_CHUNK_SIZE_PER_PROC_BYTES+
|
||||
def group_by_chunk_size(diff_size_arr)
|
||||
cumulative_size = 0
|
||||
chunk_indexes = []
|
||||
chunk_idx_start = 0
|
||||
|
||||
diff_size_arr.each_with_index do |size, index|
|
||||
cumulative_size += size
|
||||
next unless cumulative_size >= MIN_CHUNK_SIZE_PER_PROC_BYTES
|
||||
|
||||
chunk_indexes << (chunk_idx_start..index).to_a
|
||||
|
||||
chunk_idx_start = index + 1
|
||||
cumulative_size = 0
|
||||
end
|
||||
|
||||
if cumulative_size.positive? && (chunk_idx_start < diff_size_arr.length)
|
||||
chunk_indexes << if chunk_idx_start == diff_size_arr.length - 1
|
||||
[chunk_idx_start]
|
||||
else
|
||||
(chunk_idx_start..diff_size_arr.length - 1).to_a
|
||||
end
|
||||
end
|
||||
|
||||
chunk_indexes
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,7 +8,7 @@ module Gitlab
|
|||
FOUND = 1 # When scan operation completes with one or more findings
|
||||
FOUND_WITH_ERRORS = 2 # When scan operation completes with one or more findings along with some errors
|
||||
SCAN_TIMEOUT = 3 # When the scan operation runs beyond given time out
|
||||
BLOB_TIMEOUT = 4 # When the scan operation on a blob runs beyond given time out
|
||||
PAYLOAD_TIMEOUT = 4 # When the scan operation on a diff runs beyond given time out
|
||||
SCAN_ERROR = 5 # When the scan operation fails due to regex error
|
||||
INPUT_ERROR = 6 # When the scan operation fails due to invalid input
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,381 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::SecretDetection::ScanDiffs, feature_category: :secret_detection do
|
||||
subject(:scan) { described_class.new }
|
||||
|
||||
let(:diff_blob) do
|
||||
Struct.new(:left_blob_id, :right_blob_id, :patch, :status, :binary, :over_patch_bytes_limit, keyword_init: true)
|
||||
end
|
||||
|
||||
let(:sha1_blank_sha) { ('0' * 40).freeze }
|
||||
let(:sample_blob_id) { 'fe29d93da4843da433e62711ace82db601eb4f8f' }
|
||||
|
||||
let(:ruleset) do
|
||||
{
|
||||
"title" => "gitleaks config",
|
||||
"rules" => [
|
||||
{
|
||||
"id" => "gitlab_personal_access_token",
|
||||
"description" => "GitLab Personal Access Token",
|
||||
"regex" => "\bglpat-[0-9a-zA-Z_-]{20}\b",
|
||||
"tags" => %w[gitlab revocation_type],
|
||||
"keywords" => ["glpat"]
|
||||
},
|
||||
{
|
||||
"id" => "gitlab_pipeline_trigger_token",
|
||||
"description" => "GitLab Pipeline Trigger Token",
|
||||
"regex" => "\bglptt-[0-9a-zA-Z_-]{40}\b",
|
||||
"tags" => ["gitlab"],
|
||||
"keywords" => ["glptt"]
|
||||
},
|
||||
{
|
||||
"id" => "gitlab_runner_registration_token",
|
||||
"description" => "GitLab Runner Registration Token",
|
||||
"regex" => "\bGR1348941[0-9a-zA-Z_-]{20}\b",
|
||||
"tags" => ["gitlab"],
|
||||
"keywords" => ["GR1348941"]
|
||||
},
|
||||
{
|
||||
"id" => "gitlab_feed_token_v2",
|
||||
"description" => "GitLab Feed token",
|
||||
"regex" => "\bglft-[0-9a-zA-Z_-]{20}\b",
|
||||
"tags" => ["gitlab"],
|
||||
"keywords" => ["glft"]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
it "does not raise an error parsing the toml file" do
|
||||
expect { scan }.not_to raise_error
|
||||
end
|
||||
|
||||
context "when it creates RE2 patterns from file data" do
|
||||
before do
|
||||
allow(scan).to receive(:parse_ruleset).and_return(ruleset)
|
||||
end
|
||||
|
||||
it "does not raise an error when building patterns" do
|
||||
expect { scan }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context "when matching patterns" do
|
||||
before do
|
||||
allow(scan).to receive(:parse_ruleset).and_return(ruleset)
|
||||
end
|
||||
|
||||
context 'when the diff does not contain a secret' do
|
||||
let(:diffs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+BASE_URL=https://foo.bar\n\\ No newline at end of file\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
]
|
||||
end
|
||||
|
||||
it "does not match" do
|
||||
expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::NOT_FOUND)
|
||||
|
||||
expect(scan.secrets_scan(diffs)).to eq(expected_response)
|
||||
end
|
||||
|
||||
it "attempts to keyword match returning no diffs for further scan" do
|
||||
expect(scan).to receive(:filter_by_keywords)
|
||||
.with(diffs)
|
||||
.and_return([])
|
||||
|
||||
scan.secrets_scan(diffs)
|
||||
end
|
||||
|
||||
it "does not attempt to regex match" do
|
||||
expect(scan).not_to receive(:match_rules_bulk)
|
||||
|
||||
scan.secrets_scan(diffs)
|
||||
end
|
||||
end
|
||||
|
||||
context "when multiple diffs contains secrets" do
|
||||
let(:diffs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+glpat-12312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,3 @@\n+\n+\n+glptt-1231231231231231231212312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+GR134894112312312312312312312\n+glft-12312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1 @@\n+glptt-1231231231231231231212312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+glpat-12312312312312312312\n+GR134894112312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
]
|
||||
end
|
||||
|
||||
let(:expected_response) do
|
||||
Gitlab::SecretDetection::Response.new(
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[0].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][0]['id'],
|
||||
ruleset['rules'][0]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[1].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
3,
|
||||
ruleset['rules'][1]['id'],
|
||||
ruleset['rules'][1]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[3].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][2]['id'],
|
||||
ruleset['rules'][2]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[3].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
2,
|
||||
ruleset['rules'][3]['id'],
|
||||
ruleset['rules'][3]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[6].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][1]['id'],
|
||||
ruleset['rules'][1]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[7].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][0]['id'],
|
||||
ruleset['rules'][0]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[7].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
2,
|
||||
ruleset['rules'][2]['id'],
|
||||
ruleset['rules'][2]['description']
|
||||
)
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
it "attempts to keyword match returning only filtered diffs for further scan" do
|
||||
expected = diffs.reject { |d| d.patch.include?("data with no secret") }
|
||||
|
||||
expect(scan).to receive(:filter_by_keywords)
|
||||
.with(diffs)
|
||||
.and_return(expected)
|
||||
|
||||
scan.secrets_scan(diffs)
|
||||
end
|
||||
|
||||
it "matches multiple rules when running in main process" do
|
||||
expect(scan.secrets_scan(diffs, subprocess: false)).to eq(expected_response)
|
||||
end
|
||||
end
|
||||
|
||||
context "when configured with time out" do
|
||||
let(:each_payload_timeout_secs) { 0.000_001 } # 1 micro-sec to intentionally timeout large diff
|
||||
|
||||
let(:large_data) do
|
||||
("\n+large data with a secret glpat-12312312312312312312" * 10_000_000).freeze # gitleaks:allow
|
||||
end
|
||||
|
||||
let(:diffs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+GR134894112312312312312312312\n", # gitleaks:allow
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,2 @@\n+data with no secret\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
]
|
||||
end
|
||||
|
||||
let(:all_large_diffs) do
|
||||
[
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
),
|
||||
diff_blob.new(
|
||||
left_blob_id: sha1_blank_sha,
|
||||
right_blob_id: sample_blob_id,
|
||||
patch: "@@ -0,0 +1,10000001 @@\n#{large_data}\n",
|
||||
status: :STATUS_END_OF_PATCH,
|
||||
binary: false,
|
||||
over_patch_bytes_limit: false
|
||||
)
|
||||
]
|
||||
end
|
||||
|
||||
it "whole secret detection scan operation times out" do
|
||||
scan_timeout_secs = 0.000_001 # 1 micro-sec to intentionally timeout large diff
|
||||
|
||||
expected_response = Gitlab::SecretDetection::Response.new(Gitlab::SecretDetection::Status::SCAN_TIMEOUT)
|
||||
|
||||
begin
|
||||
response = scan.secrets_scan(diffs, timeout: scan_timeout_secs)
|
||||
expect(response).to eq(expected_response)
|
||||
rescue ArgumentError
|
||||
# When RSpec's main process terminates and attempts to clean up child processes upon completion, it terminates
|
||||
# subprocesses where the scans might be still ongoing. This behavior is not recognized by the
|
||||
# upstream library (parallel), which manages all forked subprocesses it created for running scans. When the
|
||||
# upstream library attempts to close its forked subprocesses which already terminated, it raises an
|
||||
# 'ArgumentError' with the message 'bad signal type NilClass,' resulting in flaky failures in the test
|
||||
# expectations.
|
||||
#
|
||||
# Example: https://gitlab.com/gitlab-org/gitlab/-/jobs/6935051992
|
||||
#
|
||||
puts "skipping the test since the subprocesses forked for SD scanning are terminated by main process"
|
||||
end
|
||||
end
|
||||
|
||||
it "one of the diffs times out while others continue to get scanned" do
|
||||
expected_response = Gitlab::SecretDetection::Response.new(
|
||||
Gitlab::SecretDetection::Status::FOUND_WITH_ERRORS,
|
||||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[0].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::FOUND,
|
||||
1,
|
||||
ruleset['rules'][2]['id'],
|
||||
ruleset['rules'][2]['description']
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
diffs[2].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
expect(scan.secrets_scan(diffs, payload_timeout: each_payload_timeout_secs)).to eq(expected_response)
|
||||
end
|
||||
|
||||
it "all the diffs time out" do
|
||||
# scan status changes to SCAN_TIMEOUT when *all* the diffs time out
|
||||
expected_scan_status = Gitlab::SecretDetection::Status::SCAN_TIMEOUT
|
||||
|
||||
expected_response = Gitlab::SecretDetection::Response.new(
|
||||
expected_scan_status,
|
||||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_diffs[0].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_diffs[1].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_diffs[2].right_blob_id,
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
expect(scan.secrets_scan(all_large_diffs, payload_timeout: each_payload_timeout_secs)).to eq(expected_response)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -277,7 +277,7 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio
|
|||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
blobs[2].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
)
|
||||
]
|
||||
)
|
||||
|
|
@ -294,15 +294,15 @@ RSpec.describe Gitlab::SecretDetection::Scan, feature_category: :secret_detectio
|
|||
[
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_blobs[0].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_blobs[1].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
),
|
||||
Gitlab::SecretDetection::Finding.new(
|
||||
all_large_blobs[2].id,
|
||||
Gitlab::SecretDetection::Status::BLOB_TIMEOUT
|
||||
Gitlab::SecretDetection::Status::PAYLOAD_TIMEOUT
|
||||
)
|
||||
]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -13,11 +13,11 @@ module API
|
|||
|
||||
helpers do
|
||||
def cached_responses
|
||||
upstream.cached_responses.search_by_relative_path(params[:search])
|
||||
upstream.cached_responses.default.search_by_relative_path(params[:search])
|
||||
end
|
||||
|
||||
def cached_response
|
||||
upstream.cached_responses.find_by_relative_path!(declared_params[:cached_response_id])
|
||||
upstream.cached_responses.default.find_by_relative_path!(declared_params[:cached_response_id])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Gitlab
|
||||
module BackgroundMigration
|
||||
class BackfillPackagesComposerMetadataProjectId < BackfillDesiredShardingKeyJob
|
||||
operation_name :backfill_packages_composer_metadata_project_id
|
||||
feature_category :package_registry
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -3905,6 +3905,9 @@ msgstr ""
|
|||
msgid "AdminSelfHostedModels|By enabling self-hosted models, you accept the %{link_start}GitLab Testing Agreement%{link_end}."
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|Configure AI Features"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|Create model"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -3968,6 +3971,9 @@ msgstr ""
|
|||
msgid "AdminSelfHostedModels|New self-hosted model"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|Once the model is no longer in use, you can return here to delete it."
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|Please add a valid endpoint."
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -4004,6 +4010,15 @@ msgstr ""
|
|||
msgid "AdminSelfHostedModels|There was an error saving the self-hosted model. Please try again."
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|This self-hosted model cannot be deleted"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|To remove %{boldStart}%{modelName}%{boldEnd}, you must first remove it from the following AI Feature(s):"
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|You are about to delete the %{boldStart}%{modelName}%{boldEnd} self-hosted model. This action cannot be undone."
|
||||
msgstr ""
|
||||
|
||||
msgid "AdminSelfHostedModels|Your self-hosted model was successfully deleted."
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -174,18 +174,24 @@ module InternalEventsCli
|
|||
|
||||
ADDITIONAL_PROPERTIES_INTRO = <<~TEXT.freeze
|
||||
#{format_info('ADDITIONAL PROPERTIES')}
|
||||
If you provide extra context with each triggered event, extra capabilities are enabled:
|
||||
- Service Ping: filter metrics to a specific subset of events
|
||||
- Snowflake: view/sort/group individual events from GitLab.com
|
||||
Describe any related attributes or information which should be tracked when the event occurs. This enables extra capabilities:
|
||||
- Service Ping: define metrics filtered to a specific subset of events (built-in properties only)
|
||||
- Snowflake: view/sort/group individual events from GitLab.com
|
||||
|
||||
A few specific attributes are available for recording the context of each event. These include 2 strings and 1 numeric value.
|
||||
BUILT-IN PROPERTIES (recommended)
|
||||
For the best performance and flexibility, provide event context using:
|
||||
|
||||
ex) For an event like 'change_merge_request_status', we might want to include:
|
||||
property (string), label (string), value (numeric)
|
||||
|
||||
Attribute: String 1 (attribute will be named `label`)
|
||||
Description: Status of merge request after update (one of opened, merged, closed)
|
||||
These attribute names correspond to repurposed fields in Snowflake. They have no special meaning other than data type.
|
||||
|
||||
This would enable us to create a metric like: Monthly count of unique users who changed an MR status to "closed"
|
||||
ex) To add a metric like "Monthly count of unique users who changed an MR status to closed" using a 'change_merge_request_status' event, define an additional property like:
|
||||
Attribute: label (string)
|
||||
Description: Status of merge request after update (one of opened, merged, closed)
|
||||
|
||||
CUSTOM PROPERTIES (as-needed)
|
||||
When the built-in properties are insufficient, properties of any name can be provided.
|
||||
This option becomes available after both property and label are defined, or after value is defined.
|
||||
|
||||
TEXT
|
||||
|
||||
|
|
|
|||
|
|
@ -286,7 +286,7 @@ RSpec.describe 'Database schema',
|
|||
it 'only has existing indexes in the ignored duplicate indexes duplicate_indexes.yml' do
|
||||
table_ignored_indexes = (ignored_indexes[table] || {}).to_a.flatten.uniq
|
||||
indexes_by_name = indexes.map(&:name)
|
||||
expect(indexes_by_name).to include(*table_ignored_indexes)
|
||||
expect(indexes_by_name).to include(*table_ignored_indexes) unless table_ignored_indexes.empty?
|
||||
end
|
||||
|
||||
it 'does not have any duplicated indexes' do
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ FactoryBot.define do
|
|||
content_type { 'text/plain' }
|
||||
downloads_count { 5 }
|
||||
file_final_path { '5f/9c/5f9c/@final/c7/4c/240c' }
|
||||
status { :default }
|
||||
|
||||
transient do
|
||||
file_fixture { 'spec/fixtures/bfg_object_map.txt' }
|
||||
|
|
|
|||
|
|
@ -265,6 +265,10 @@ describe('Work Item Note', () => {
|
|||
it('should have the project name', () => {
|
||||
expect(findNoteActions().props('projectName')).toBe('Project name');
|
||||
});
|
||||
|
||||
it('should pass the noteUrl to the note header and should be a work items url', () => {
|
||||
expect(findNoteHeader().props('noteUrl')).toContain('work_items');
|
||||
});
|
||||
});
|
||||
|
||||
describe('comment threads', () => {
|
||||
|
|
|
|||
|
|
@ -240,6 +240,6 @@ RSpec.describe Mutations::Commits::Create do
|
|||
|
||||
def expect_to_contain_deltas(expected_deltas)
|
||||
expect(deltas.count).to eq(expected_deltas.count)
|
||||
expect(deltas).to include(*expected_deltas)
|
||||
expect(deltas).to include(*expected_deltas) unless expected_deltas.empty?
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1222,7 +1222,8 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching, feature_cate
|
|||
|
||||
it { is_expected.to include(*described_class::API_SCOPES - [:read_user]) }
|
||||
it { is_expected.to include(*described_class::REPOSITORY_SCOPES) }
|
||||
it { is_expected.to include(*described_class.registry_scopes) }
|
||||
|
||||
it { is_expected.to include(*described_class.registry_scopes) } unless described_class.registry_scopes.empty?
|
||||
it { is_expected.to include(*described_class::OBSERVABILITY_SCOPES) }
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BackgroundMigration::BackfillPackagesComposerMetadataProjectId,
|
||||
feature_category: :package_registry,
|
||||
schema: 20240927123738 do
|
||||
include_examples 'desired sharding key backfill job' do
|
||||
let(:batch_table) { :packages_composer_metadata }
|
||||
let(:batch_column) { :package_id }
|
||||
let(:backfill_column) { :project_id }
|
||||
let(:backfill_via_table) { :packages_packages }
|
||||
let(:backfill_via_column) { :project_id }
|
||||
let(:backfill_via_foreign_key) { :package_id }
|
||||
end
|
||||
end
|
||||
|
|
@ -311,8 +311,8 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml', feature_category: :auto_devops do
|
|||
end
|
||||
|
||||
it 'creates a pipeline with the expected jobs' do
|
||||
expect(build_names).to include(*include_build_names)
|
||||
expect(build_names).not_to include(*not_include_build_names)
|
||||
expect(build_names).to include(*include_build_names) unless include_build_names.empty?
|
||||
expect(build_names).not_to include(*not_include_build_names) unless not_include_build_names.empty?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
require_migration!
|
||||
|
||||
RSpec.describe QueueBackfillPackagesComposerMetadataProjectId, feature_category: :package_registry do
|
||||
let!(:batched_migration) { described_class::MIGRATION }
|
||||
|
||||
it 'schedules a new batched migration' do
|
||||
reversible_migration do |migration|
|
||||
migration.before -> {
|
||||
expect(batched_migration).not_to have_scheduled_batched_migration
|
||||
}
|
||||
|
||||
migration.after -> {
|
||||
expect(batched_migration).to have_scheduled_batched_migration(
|
||||
table_name: :packages_composer_metadata,
|
||||
column_name: :package_id,
|
||||
interval: described_class::DELAY_INTERVAL,
|
||||
batch_size: described_class::BATCH_SIZE,
|
||||
sub_batch_size: described_class::SUB_BATCH_SIZE,
|
||||
gitlab_schema: :gitlab_main_cell,
|
||||
job_arguments: [
|
||||
:project_id,
|
||||
:packages_packages,
|
||||
:project_id,
|
||||
:package_id
|
||||
]
|
||||
)
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -5710,10 +5710,9 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
subject { project.predefined_project_variables.to_runner_variables }
|
||||
|
||||
specify do
|
||||
expect(subject).to include
|
||||
[
|
||||
expect(subject).to include(
|
||||
{ key: 'CI_CONFIG_PATH', value: Ci::Pipeline::DEFAULT_CONFIG_PATH, public: true, masked: false }
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
context 'when ci config path is overridden' do
|
||||
|
|
@ -5722,10 +5721,9 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
end
|
||||
|
||||
it do
|
||||
expect(subject).to include
|
||||
[
|
||||
expect(subject).to include(
|
||||
{ key: 'CI_CONFIG_PATH', value: 'random.yml', public: true, masked: false }
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ RSpec.describe VirtualRegistries::Packages::Maven::CachedResponse, type: :model,
|
|||
cached_response.save!
|
||||
end
|
||||
|
||||
it { is_expected.to validate_uniqueness_of(:relative_path).scoped_to(:upstream_id) }
|
||||
it { is_expected.to validate_uniqueness_of(:relative_path).scoped_to(:upstream_id, :status) }
|
||||
|
||||
context 'when upstream_id is nil' do
|
||||
let(:new_cached_response) { build(:virtual_registries_packages_maven_cached_response) }
|
||||
|
|
@ -38,6 +38,33 @@ RSpec.describe VirtualRegistries::Packages::Maven::CachedResponse, type: :model,
|
|||
expect(new_cached_response.errors.messages_for(:relative_path)).not_to include 'has already been taken'
|
||||
end
|
||||
end
|
||||
|
||||
context 'with a similar cached response in a different status' do
|
||||
let!(:cached_response_in_error) do
|
||||
create(
|
||||
:virtual_registries_packages_maven_cached_response,
|
||||
:error,
|
||||
group_id: cached_response.group_id,
|
||||
upstream_id: cached_response.upstream_id,
|
||||
relative_path: cached_response.relative_path
|
||||
)
|
||||
end
|
||||
|
||||
let(:new_cached_response) do
|
||||
build(
|
||||
:virtual_registries_packages_maven_cached_response,
|
||||
:error,
|
||||
group_id: cached_response.group_id,
|
||||
upstream_id: cached_response.upstream_id,
|
||||
relative_path: cached_response.relative_path
|
||||
)
|
||||
end
|
||||
|
||||
it 'does not validate uniqueness of relative_path' do
|
||||
new_cached_response.validate
|
||||
expect(new_cached_response.errors.messages_for(:relative_path)).not_to include 'has already been taken'
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -3601,10 +3601,13 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
|
|||
|
||||
where(:feature_flag_enabled, :current_user, :access_level, :allowed) do
|
||||
false | ref(:owner) | Featurable::ENABLED | false
|
||||
true | ref(:non_member) | Featurable::ENABLED | false
|
||||
true | ref(:anonymous) | Featurable::ENABLED | true
|
||||
true | ref(:anonymous) | Featurable::PRIVATE | false
|
||||
true | ref(:anonymous) | Featurable::DISABLED | false
|
||||
true | ref(:non_member) | Featurable::ENABLED | true
|
||||
true | ref(:non_member) | Featurable::PRIVATE | false
|
||||
true | ref(:non_member) | Featurable::DISABLED | false
|
||||
true | ref(:guest) | Featurable::ENABLED | false
|
||||
true | ref(:guest) | Featurable::ENABLED | true
|
||||
true | ref(:guest) | Featurable::PRIVATE | false
|
||||
true | ref(:guest) | Featurable::DISABLED | false
|
||||
true | ref(:reporter) | Featurable::ENABLED | true
|
||||
|
|
@ -3639,6 +3642,9 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
|
|||
|
||||
where(:feature_flag_enabled, :current_user, :access_level, :allowed) do
|
||||
false | ref(:owner) | Featurable::ENABLED | false
|
||||
true | ref(:anonymous) | Featurable::ENABLED | false
|
||||
true | ref(:anonymous) | Featurable::PRIVATE | false
|
||||
true | ref(:anonymous) | Featurable::DISABLED | false
|
||||
true | ref(:non_member) | Featurable::ENABLED | false
|
||||
true | ref(:non_member) | Featurable::PRIVATE | false
|
||||
true | ref(:non_member) | Featurable::DISABLED | false
|
||||
|
|
@ -3677,10 +3683,13 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
|
|||
|
||||
where(:ff_ml_experiment_tracking, :current_user, :access_level, :allowed) do
|
||||
false | ref(:owner) | Featurable::ENABLED | false
|
||||
true | ref(:non_member) | Featurable::ENABLED | false
|
||||
true | ref(:anonymous) | Featurable::ENABLED | true
|
||||
true | ref(:anonymous) | Featurable::PRIVATE | false
|
||||
true | ref(:anonymous) | Featurable::DISABLED | false
|
||||
true | ref(:non_member) | Featurable::ENABLED | true
|
||||
true | ref(:non_member) | Featurable::PRIVATE | false
|
||||
true | ref(:non_member) | Featurable::DISABLED | false
|
||||
true | ref(:guest) | Featurable::ENABLED | false
|
||||
true | ref(:guest) | Featurable::ENABLED | true
|
||||
true | ref(:guest) | Featurable::PRIVATE | false
|
||||
true | ref(:guest) | Featurable::DISABLED | false
|
||||
true | ref(:reporter) | Featurable::ENABLED | true
|
||||
|
|
@ -3715,6 +3724,9 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
|
|||
|
||||
where(:ff_ml_experiment_tracking, :current_user, :access_level, :allowed) do
|
||||
false | ref(:owner) | Featurable::ENABLED | false
|
||||
true | ref(:anonymous) | Featurable::ENABLED | false
|
||||
true | ref(:anonymous) | Featurable::PRIVATE | false
|
||||
true | ref(:anonymous) | Featurable::DISABLED | false
|
||||
true | ref(:non_member) | Featurable::ENABLED | false
|
||||
true | ref(:non_member) | Featurable::PRIVATE | false
|
||||
true | ref(:non_member) | Featurable::DISABLED | false
|
||||
|
|
|
|||
|
|
@ -434,7 +434,7 @@ RSpec.describe API::Integrations, feature_category: :integrations do
|
|||
end
|
||||
|
||||
def assert_secret_fields_filtered(response_keys, integration)
|
||||
expect(response_keys).not_to include(*integration.secret_fields)
|
||||
expect(response_keys).not_to include(*integration.secret_fields) unless integration.secret_fields.empty?
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -60,24 +60,24 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
|
|||
true | :developer | :private | false | :personal_access_token | :not_found
|
||||
true | :developer | :private | true | :job_token | :success
|
||||
true | :developer | :private | true | :personal_access_token | :success
|
||||
true | :developer | :public | false | :job_token | :not_found
|
||||
true | :developer | :public | false | :personal_access_token | :not_found
|
||||
true | :developer | :public | false | :job_token | :forbidden
|
||||
true | :developer | :public | false | :personal_access_token | :forbidden
|
||||
true | :developer | :public | true | :job_token | :success
|
||||
true | :developer | :public | true | :personal_access_token | :success
|
||||
true | :guest | :private | false | :job_token | :forbidden
|
||||
true | :guest | :private | false | :personal_access_token | :not_found
|
||||
true | :guest | :private | true | :job_token | :not_found
|
||||
true | :guest | :private | true | :personal_access_token | :not_found
|
||||
true | :guest | :public | false | :job_token | :not_found
|
||||
true | :guest | :public | false | :personal_access_token | :not_found
|
||||
true | :guest | :public | true | :job_token | :not_found
|
||||
true | :guest | :public | true | :personal_access_token | :not_found
|
||||
true | :guest | :private | true | :job_token | :forbidden
|
||||
true | :guest | :private | true | :personal_access_token | :forbidden
|
||||
true | :guest | :public | false | :job_token | :forbidden
|
||||
true | :guest | :public | false | :personal_access_token | :forbidden
|
||||
true | :guest | :public | true | :job_token | :forbidden
|
||||
true | :guest | :public | true | :personal_access_token | :forbidden
|
||||
true | :reporter | :private | false | :job_token | :forbidden
|
||||
true | :reporter | :private | false | :personal_access_token | :not_found
|
||||
true | :reporter | :private | true | :job_token | :forbidden
|
||||
true | :reporter | :private | true | :personal_access_token | :forbidden
|
||||
true | :reporter | :public | false | :job_token | :not_found
|
||||
true | :reporter | :public | false | :personal_access_token | :not_found
|
||||
true | :reporter | :public | false | :job_token | :forbidden
|
||||
true | :reporter | :public | false | :personal_access_token | :forbidden
|
||||
true | :reporter | :public | true | :job_token | :forbidden
|
||||
true | :reporter | :public | true | :personal_access_token | :forbidden
|
||||
end
|
||||
|
|
@ -101,29 +101,29 @@ RSpec.describe ::API::MlModelPackages, feature_category: :mlops do
|
|||
false | :guest | :public | true | :job_token | :unauthorized
|
||||
false | :guest | :public | true | :personal_access_token | :unauthorized
|
||||
true | :anonymous | :private | false | :personal_access_token | :not_found
|
||||
true | :anonymous | :public | false | :personal_access_token | :not_found
|
||||
true | :anonymous | :public | false | :personal_access_token | :success
|
||||
true | :developer | :private | false | :job_token | :forbidden
|
||||
true | :developer | :private | false | :personal_access_token | :not_found
|
||||
true | :developer | :private | true | :job_token | :success
|
||||
true | :developer | :private | true | :personal_access_token | :success
|
||||
true | :developer | :public | false | :job_token | :not_found
|
||||
true | :developer | :public | false | :personal_access_token | :not_found
|
||||
true | :developer | :public | false | :job_token | :success
|
||||
true | :developer | :public | false | :personal_access_token | :success
|
||||
true | :developer | :public | true | :job_token | :success
|
||||
true | :developer | :public | true | :personal_access_token | :success
|
||||
true | :guest | :private | false | :job_token | :forbidden
|
||||
true | :guest | :private | false | :personal_access_token | :not_found
|
||||
true | :guest | :private | true | :job_token | :not_found
|
||||
true | :guest | :private | true | :personal_access_token | :not_found
|
||||
true | :guest | :public | false | :job_token | :not_found
|
||||
true | :guest | :public | false | :personal_access_token | :not_found
|
||||
true | :guest | :public | true | :job_token | :not_found
|
||||
true | :guest | :public | true | :personal_access_token | :not_found
|
||||
true | :guest | :private | true | :job_token | :forbidden
|
||||
true | :guest | :private | true | :personal_access_token | :forbidden
|
||||
true | :guest | :public | false | :job_token | :success
|
||||
true | :guest | :public | false | :personal_access_token | :success
|
||||
true | :guest | :public | true | :job_token | :success
|
||||
true | :guest | :public | true | :personal_access_token | :success
|
||||
true | :reporter | :private | false | :job_token | :forbidden
|
||||
true | :reporter | :private | false | :personal_access_token | :not_found
|
||||
true | :reporter | :private | true | :job_token | :success
|
||||
true | :reporter | :private | true | :personal_access_token | :success
|
||||
true | :reporter | :public | false | :job_token | :not_found
|
||||
true | :reporter | :public | false | :personal_access_token | :not_found
|
||||
true | :reporter | :public | false | :job_token | :success
|
||||
true | :reporter | :public | false | :personal_access_token | :success
|
||||
true | :reporter | :public | true | :job_token | :success
|
||||
true | :reporter | :public | true | :personal_access_token | :success
|
||||
end
|
||||
|
|
|
|||
|
|
@ -214,7 +214,7 @@ RSpec.describe API::PersonalAccessTokens, :aggregate_failures, feature_category:
|
|||
|
||||
expect(response).to have_gitlab_http_status(status)
|
||||
|
||||
expect(json_response.map { |pat| pat['id'] }).to include(*result) if status == :ok
|
||||
expect(json_response.map { |pat| pat['id'] }).to include(*result) if status == :ok && !result.empty?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -237,7 +237,7 @@ RSpec.describe API::PersonalAccessTokens, :aggregate_failures, feature_category:
|
|||
|
||||
expect(response).to have_gitlab_http_status(status)
|
||||
|
||||
expect(json_response.map { |pat| pat['id'] }).to include(*result) if status == :ok
|
||||
expect(json_response.map { |pat| pat['id'] }).to include(*result) if status == :ok && !result.empty?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -997,6 +997,16 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
|
|||
"/virtual_registries/packages/maven/registries/#{registry.id}/upstreams/#{upstream_id}/cached_responses"
|
||||
end
|
||||
|
||||
let_it_be(:processing_cached_response) do
|
||||
create(
|
||||
:virtual_registries_packages_maven_cached_response,
|
||||
:processing,
|
||||
upstream: upstream,
|
||||
group: upstream.group,
|
||||
relative_path: cached_response.relative_path
|
||||
)
|
||||
end
|
||||
|
||||
subject(:api_request) { get api(url), headers: headers }
|
||||
|
||||
shared_examples 'successful response' do
|
||||
|
|
@ -1106,6 +1116,16 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
|
|||
"cached_responses/#{cached_response_id}"
|
||||
end
|
||||
|
||||
let_it_be(:processing_cached_response) do
|
||||
create(
|
||||
:virtual_registries_packages_maven_cached_response,
|
||||
:processing,
|
||||
upstream: upstream,
|
||||
group: upstream.group,
|
||||
relative_path: cached_response.relative_path
|
||||
)
|
||||
end
|
||||
|
||||
subject(:api_request) { delete api(url), headers: headers }
|
||||
|
||||
shared_examples 'successful response' do
|
||||
|
|
@ -1368,8 +1388,6 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
|
|||
describe 'POST /api/v4/virtual_registries/packages/maven/:id/*path/upload' do
|
||||
include_context 'workhorse headers'
|
||||
|
||||
let(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
|
||||
let(:url) { "/virtual_registries/packages/maven/#{registry.id}/#{path}/upload" }
|
||||
let(:file_upload) { fixture_file_upload('spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom') }
|
||||
let(:gid_header) { { described_class::UPSTREAM_GID_HEADER => upstream.to_global_id.to_s } }
|
||||
let(:additional_headers) do
|
||||
|
|
@ -1378,6 +1396,18 @@ RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, fea
|
|||
|
||||
let(:headers) { workhorse_headers.merge(additional_headers) }
|
||||
|
||||
let_it_be(:path) { 'com/test/package/1.2.3/package-1.2.3.pom' }
|
||||
let_it_be(:url) { "/virtual_registries/packages/maven/#{registry.id}/#{path}/upload" }
|
||||
let_it_be(:processing_cached_response) do
|
||||
create(
|
||||
:virtual_registries_packages_maven_cached_response,
|
||||
:processing,
|
||||
upstream: upstream,
|
||||
group: upstream.group,
|
||||
relative_path: "/#{path}"
|
||||
)
|
||||
end
|
||||
|
||||
subject(:request) do
|
||||
workhorse_finalize(
|
||||
api(url),
|
||||
|
|
|
|||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'HTTP Router Version Route', type: :request, feature_category: :api do
|
||||
shared_examples 'a successful empty response' do |http_method|
|
||||
it "responds with 204 status code for #{http_method.to_s.upcase}" do
|
||||
send(http_method, '/-/http_router/version') # Use send to make the request dynamically
|
||||
|
||||
expect(response).to have_gitlab_http_status(:no_content) # Check for 204 No Content
|
||||
expect(response.body).to be_empty # Ensure the response body is empty
|
||||
end
|
||||
end
|
||||
|
||||
# Specify all the HTTP methods you want to test
|
||||
%i[get post put delete patch head options].each do |http_method|
|
||||
include_examples 'a successful empty response', http_method
|
||||
end
|
||||
end
|
||||
|
|
@ -39,6 +39,16 @@ RSpec.describe VirtualRegistries::Packages::Maven::HandleFileRequestService, :ag
|
|||
end
|
||||
|
||||
context 'with a User' do
|
||||
let_it_be(:processing_cached_response) do
|
||||
create(
|
||||
:virtual_registries_packages_maven_cached_response,
|
||||
:upstream_checked,
|
||||
:processing,
|
||||
upstream: registry.upstream,
|
||||
relative_path: "/#{path}"
|
||||
)
|
||||
end
|
||||
|
||||
context 'with no cached response' do
|
||||
it_behaves_like 'returning a service response success response', action: :workhorse_upload_url
|
||||
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ require 'rspec-parameterized'
|
|||
require 'shoulda/matchers'
|
||||
require 'test_prof/recipes/rspec/let_it_be'
|
||||
require 'test_prof/factory_default'
|
||||
require 'test_prof/factory_prof/nate_heckler' if ENV.fetch('ENABLE_FACTORY_PROF', 'true') == 'true'
|
||||
require 'test_prof/factory_prof/nate_heckler'
|
||||
require 'parslet/rig/rspec'
|
||||
require 'axe-rspec'
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,10 @@ RSpec::Matchers.define :match_snowplow_schema do |schema, dir: nil, **options|
|
|||
end
|
||||
end
|
||||
|
||||
RSpec::Matchers.define :match_schema do |schema, dir: nil, **options|
|
||||
RSpec::Matchers.define :match_schema do |schema, options = {}|
|
||||
# NOTE: https://github.com/rspec/rspec-support/pull/591 broke kwarg parsing
|
||||
dir = options.fetch(:dir, nil)
|
||||
|
||||
match do |data|
|
||||
schema = SchemaPath.expand(schema, dir)
|
||||
schema = Pathname.new(schema) if schema.is_a?(String)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,10 @@ RSpec.shared_examples 'set up an integration' do |endpoint:, integration:|
|
|||
current_integration = project.integrations.by_name(integration).first
|
||||
expect(current_integration).to have_attributes(integration_attrs)
|
||||
expect(json_response['properties'].keys).to match_array(current_integration.api_field_names)
|
||||
expect(json_response['properties'].keys).not_to include(*current_integration.secret_fields)
|
||||
|
||||
unless current_integration.secret_fields.empty?
|
||||
expect(json_response['properties'].keys).not_to include(*current_integration.secret_fields)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when all booleans are flipped' do
|
||||
|
|
@ -136,7 +139,10 @@ RSpec.shared_examples 'get an integration settings' do |endpoint:, integration:|
|
|||
expect(initialized_integration).not_to be_active
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
|
||||
expect(json_response['properties'].keys).not_to include(*integration_instance.secret_fields)
|
||||
|
||||
unless integration_instance.secret_fields.empty?
|
||||
expect(json_response['properties'].keys).not_to include(*integration_instance.secret_fields)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -151,7 +157,10 @@ RSpec.shared_examples 'get an integration settings' do |endpoint:, integration:|
|
|||
expect(initialized_integration).to be_active
|
||||
expect(response).to have_gitlab_http_status(:ok)
|
||||
expect(json_response['properties'].keys).to match_array(integration_instance.api_field_names)
|
||||
expect(json_response['properties'].keys).not_to include(*integration_instance.secret_fields)
|
||||
|
||||
unless integration_instance.secret_fields.empty?
|
||||
expect(json_response['properties'].keys).not_to include(*integration_instance.secret_fields)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue