Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2025-05-01 00:07:20 +00:00
parent 326c35a5b7
commit de122df1e7
88 changed files with 287 additions and 311 deletions

View File

@ -238,9 +238,9 @@ gem 'seed-fu', '~> 2.3.7', feature_category: :shared
gem 'elasticsearch-model', '~> 7.2', feature_category: :global_search
gem 'elasticsearch-rails', '~> 7.2', require: 'elasticsearch/rails/instrumentation', feature_category: :global_search
gem 'elasticsearch-api', '7.17.11', feature_category: :global_search
gem 'aws-sdk-core', '~> 3.215.0', feature_category: :global_search
gem 'aws-sdk-core', '~> 3.222.0', feature_category: :global_search
gem 'aws-sdk-cloudformation', '~> 1', feature_category: :global_search
gem 'aws-sdk-s3', '~> 1.177.0', feature_category: :global_search
gem 'aws-sdk-s3', '~> 1.183.0', feature_category: :global_search
gem 'faraday-typhoeus', '~> 1.1', feature_category: :global_search
gem 'faraday_middleware-aws-sigv4', '~> 1.0.1', feature_category: :global_search
# Used with Elasticsearch to support http keep-alive connections
@ -505,7 +505,7 @@ end
group :development, :test do
gem 'deprecation_toolkit', '~> 1.5.1', require: false, feature_category: :shared
gem 'bullet', '~> 7.2.0', feature_category: :shared
gem 'parser', '= 3.3.7.1', feature_category: :shared
gem 'parser', '= 3.3.7.4', feature_category: :shared
gem 'pry-byebug', feature_category: :shared
gem 'pry-rails', '~> 0.3.9', feature_category: :shared
gem 'pry-shell', '~> 0.6.4', feature_category: :shared
@ -728,7 +728,7 @@ gem 'cvss-suite', '~> 3.3.0', require: 'cvss_suite', feature_category: :software
gem 'arr-pm', '~> 0.0.12', feature_category: :package_registry
# Remote Development
gem 'devfile', '~> 0.4.0', feature_category: :workspaces
gem 'devfile', '~> 0.4.3', feature_category: :workspaces
# Apple plist parsing
gem 'CFPropertyList', '~> 3.0.0', feature_category: :mobile_devops

View File

@ -36,9 +36,9 @@
{"name":"aws-eventstream","version":"1.3.0","platform":"ruby","checksum":"f1434cc03ab2248756eb02cfa45e900e59a061d7fbdc4a9fd82a5dd23d796d3f"},
{"name":"aws-partitions","version":"1.1001.0","platform":"ruby","checksum":"2979f3317d3a757508d35d0f322839f422cbc8459589b7cc4a3889d0085a8307"},
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
{"name":"aws-sdk-core","version":"3.215.0","platform":"ruby","checksum":"214168fb749e1209a87ecf3ed1c93277976f9739883263b7478d052ccc6e4e49"},
{"name":"aws-sdk-core","version":"3.222.2","platform":"ruby","checksum":"0639070595c6d123fc371d773a2a86f9fc208466ec88e9763d7af924a757c8d1"},
{"name":"aws-sdk-kms","version":"1.76.0","platform":"ruby","checksum":"e7f75013cba9ba357144f66bbc600631c192e2cda9dd572794be239654e2cf49"},
{"name":"aws-sdk-s3","version":"1.177.0","platform":"ruby","checksum":"bf2fd9210c3e534f2bd4d90cf855429859bdbfc1911f4d61045a3aaa3ff2f183"},
{"name":"aws-sdk-s3","version":"1.183.0","platform":"ruby","checksum":"8c06b0330c76fc57b4a04a94aec25a8474160b05b2436334d9f8e57ea2799f4c"},
{"name":"aws-sigv4","version":"1.9.1","platform":"ruby","checksum":"7753e320c39f80f82f9e0883b30de0e7b99e756adbaedc80c50b6ad59d49c379"},
{"name":"axe-core-api","version":"4.10.3","platform":"ruby","checksum":"6e10f3ed1c031804f16e8154d9d5dc658564d10850cee860e125fe665c3f0148"},
{"name":"axe-core-rspec","version":"4.10.3","platform":"ruby","checksum":"ca21d0111e2d0fcd0f1da922c9071337336732aa6a3a8dc21bed94c9a701527e"},
@ -113,10 +113,10 @@
{"name":"deprecation_toolkit","version":"1.5.1","platform":"ruby","checksum":"a8a1ab1a19ae40ea12560b65010e099f3459ebde390b76621ef0c21c516a04ba"},
{"name":"derailed_benchmarks","version":"2.2.1","platform":"ruby","checksum":"654280664fded41c9cd8fc27fc0fcfaf096023afab90eb4ac1185ba70c5d4439"},
{"name":"descendants_tracker","version":"0.0.4","platform":"ruby","checksum":"e9c41dd4cfbb85829a9301ea7e7c48c2a03b26f09319db230e6479ccdc780897"},
{"name":"devfile","version":"0.4.0","platform":"aarch64-linux","checksum":"ca9a030210755023608e8f853794c0006ebd1acff2b0f54a47202a9bf98a8bce"},
{"name":"devfile","version":"0.4.0","platform":"arm64-darwin","checksum":"99588818b3833373236af0cf0559932a4dac4ee6fa017fa7f8885e6acb83a7e3"},
{"name":"devfile","version":"0.4.0","platform":"ruby","checksum":"885b7728dae945582321364346f5bb59c4f92457f6cea2231c30ad1e5a168af9"},
{"name":"devfile","version":"0.4.0","platform":"x86_64-linux","checksum":"942fb20bce2a13a58ec58632ce1c7a1323cc7e95819e39b548529044b4ad89bc"},
{"name":"devfile","version":"0.4.3","platform":"aarch64-linux","checksum":"c603ed2d57eef71b7233b68816c37dffc3ce51a2ac3ce81ea4cb1e00c2fb9cb7"},
{"name":"devfile","version":"0.4.3","platform":"arm64-darwin","checksum":"12f0eec17100b11c3f6afa573b0dd49aa84b4cfd1d25401f6b9aa05fd975123e"},
{"name":"devfile","version":"0.4.3","platform":"ruby","checksum":"5ab6061a9848f6f7f387767326f6f77ef068347656ea63db0917a3f5fbdd74fd"},
{"name":"devfile","version":"0.4.3","platform":"x86_64-linux","checksum":"d3077aaa037dd35b5ff92c5d3b2f167f98a5844fb1db4b76afbe4500ad6fbe03"},
{"name":"device_detector","version":"1.0.0","platform":"ruby","checksum":"b800fb3150b00c23e87b6768011808ac1771fffaae74c3238ebaf2b782947a7d"},
{"name":"devise","version":"4.9.4","platform":"ruby","checksum":"920042fe5e704c548aa4eb65ebdd65980b83ffae67feb32c697206bfd975a7f8"},
{"name":"devise-two-factor","version":"4.1.1","platform":"ruby","checksum":"c95f5b07533e62217aaed3c386874d94e2d472fb5f2b6598afe8600fc17a8b95"},
@ -509,7 +509,7 @@
{"name":"pact-support","version":"1.20.0","platform":"ruby","checksum":"41c343a3124fb379684b9ad9f1a0766c5fa18d3b78d433a52e5552d8b9475871"},
{"name":"paper_trail","version":"15.1.0","platform":"ruby","checksum":"0dbccd97e9d26c54aaea256d2566eaa040a3535601f49a62d79187e77d9ba9f9"},
{"name":"parallel","version":"1.26.3","platform":"ruby","checksum":"d86babb7a2b814be9f4b81587bf0b6ce2da7d45969fab24d8ae4bf2bb4d4c7ef"},
{"name":"parser","version":"3.3.7.1","platform":"ruby","checksum":"7dbe61618025519024ac72402a6677ead02099587a5538e84371b76659e6aca1"},
{"name":"parser","version":"3.3.7.4","platform":"ruby","checksum":"2b26282274280e13f891080dc4ef3f65ce658d62e13255b246b28ec6754e98ab"},
{"name":"parslet","version":"1.8.2","platform":"ruby","checksum":"08d1ab3721cd3f175bfbee8788b2ddff71f92038f2d69bd65454c22bb9fbd98a"},
{"name":"pastel","version":"0.8.0","platform":"ruby","checksum":"481da9fb7d2f6e6b1a08faf11fa10363172dc40fd47848f096ae21209f805a75"},
{"name":"pdf-core","version":"0.10.0","platform":"ruby","checksum":"0a5d101e2063c01e3f941e1ee47cbb97f1adfc1395b58372f4f65f1300f3ce91"},

View File

@ -358,16 +358,18 @@ GEM
aws-sdk-cloudformation (1.41.0)
aws-sdk-core (~> 3, >= 3.99.0)
aws-sigv4 (~> 1.1)
aws-sdk-core (3.215.0)
aws-sdk-core (3.222.2)
aws-eventstream (~> 1, >= 1.3.0)
aws-partitions (~> 1, >= 1.992.0)
aws-sigv4 (~> 1.9)
base64
jmespath (~> 1, >= 1.6.1)
logger
aws-sdk-kms (1.76.0)
aws-sdk-core (~> 3, >= 3.188.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.177.0)
aws-sdk-core (~> 3, >= 3.210.0)
aws-sdk-s3 (1.183.0)
aws-sdk-core (~> 3, >= 3.216.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.5)
aws-sigv4 (1.9.1)
@ -533,7 +535,7 @@ GEM
thor (>= 0.19, < 2)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
devfile (0.4.0)
devfile (0.4.3)
device_detector (1.0.0)
devise (4.9.4)
bcrypt (~> 3.0)
@ -1458,7 +1460,7 @@ GEM
activerecord (>= 6.1)
request_store (~> 1.4)
parallel (1.26.3)
parser (3.3.7.1)
parser (3.3.7.4)
ast (~> 2.4.1)
racc
parslet (1.8.2)
@ -2078,8 +2080,8 @@ DEPENDENCIES
attr_encrypted (~> 3.2.4)!
awesome_print
aws-sdk-cloudformation (~> 1)
aws-sdk-core (~> 3.215.0)
aws-sdk-s3 (~> 1.177.0)
aws-sdk-core (~> 3.222.0)
aws-sdk-s3 (~> 1.183.0)
axe-core-rspec (~> 4.10.0)
babosa (~> 2.0)
base32 (~> 0.3.0)
@ -2114,7 +2116,7 @@ DEPENDENCIES
declarative_policy (~> 1.1.0)
deprecation_toolkit (~> 1.5.1)
derailed_benchmarks
devfile (~> 0.4.0)
devfile (~> 0.4.3)
device_detector
devise (~> 4.9.3)
devise-pbkdf2-encryptable (~> 0.0.0)!
@ -2316,7 +2318,7 @@ DEPENDENCIES
pact (~> 1.64)
paper_trail (~> 15.0)
parallel (~> 1.19)
parser (= 3.3.7.1)
parser (= 3.3.7.4)
parslet (~> 1.8)
peek (~> 1.1)
pg (~> 1.5.6)

View File

@ -36,9 +36,9 @@
{"name":"aws-eventstream","version":"1.3.0","platform":"ruby","checksum":"f1434cc03ab2248756eb02cfa45e900e59a061d7fbdc4a9fd82a5dd23d796d3f"},
{"name":"aws-partitions","version":"1.1001.0","platform":"ruby","checksum":"2979f3317d3a757508d35d0f322839f422cbc8459589b7cc4a3889d0085a8307"},
{"name":"aws-sdk-cloudformation","version":"1.41.0","platform":"ruby","checksum":"31e47539719734413671edf9b1a31f8673fbf9688549f50c41affabbcb1c6b26"},
{"name":"aws-sdk-core","version":"3.215.0","platform":"ruby","checksum":"214168fb749e1209a87ecf3ed1c93277976f9739883263b7478d052ccc6e4e49"},
{"name":"aws-sdk-core","version":"3.222.2","platform":"ruby","checksum":"0639070595c6d123fc371d773a2a86f9fc208466ec88e9763d7af924a757c8d1"},
{"name":"aws-sdk-kms","version":"1.76.0","platform":"ruby","checksum":"e7f75013cba9ba357144f66bbc600631c192e2cda9dd572794be239654e2cf49"},
{"name":"aws-sdk-s3","version":"1.177.0","platform":"ruby","checksum":"bf2fd9210c3e534f2bd4d90cf855429859bdbfc1911f4d61045a3aaa3ff2f183"},
{"name":"aws-sdk-s3","version":"1.183.0","platform":"ruby","checksum":"8c06b0330c76fc57b4a04a94aec25a8474160b05b2436334d9f8e57ea2799f4c"},
{"name":"aws-sigv4","version":"1.9.1","platform":"ruby","checksum":"7753e320c39f80f82f9e0883b30de0e7b99e756adbaedc80c50b6ad59d49c379"},
{"name":"axe-core-api","version":"4.10.3","platform":"ruby","checksum":"6e10f3ed1c031804f16e8154d9d5dc658564d10850cee860e125fe665c3f0148"},
{"name":"axe-core-rspec","version":"4.10.3","platform":"ruby","checksum":"ca21d0111e2d0fcd0f1da922c9071337336732aa6a3a8dc21bed94c9a701527e"},
@ -113,10 +113,10 @@
{"name":"deprecation_toolkit","version":"1.5.1","platform":"ruby","checksum":"a8a1ab1a19ae40ea12560b65010e099f3459ebde390b76621ef0c21c516a04ba"},
{"name":"derailed_benchmarks","version":"2.2.1","platform":"ruby","checksum":"654280664fded41c9cd8fc27fc0fcfaf096023afab90eb4ac1185ba70c5d4439"},
{"name":"descendants_tracker","version":"0.0.4","platform":"ruby","checksum":"e9c41dd4cfbb85829a9301ea7e7c48c2a03b26f09319db230e6479ccdc780897"},
{"name":"devfile","version":"0.4.0","platform":"aarch64-linux","checksum":"ca9a030210755023608e8f853794c0006ebd1acff2b0f54a47202a9bf98a8bce"},
{"name":"devfile","version":"0.4.0","platform":"arm64-darwin","checksum":"99588818b3833373236af0cf0559932a4dac4ee6fa017fa7f8885e6acb83a7e3"},
{"name":"devfile","version":"0.4.0","platform":"ruby","checksum":"885b7728dae945582321364346f5bb59c4f92457f6cea2231c30ad1e5a168af9"},
{"name":"devfile","version":"0.4.0","platform":"x86_64-linux","checksum":"942fb20bce2a13a58ec58632ce1c7a1323cc7e95819e39b548529044b4ad89bc"},
{"name":"devfile","version":"0.4.3","platform":"aarch64-linux","checksum":"c603ed2d57eef71b7233b68816c37dffc3ce51a2ac3ce81ea4cb1e00c2fb9cb7"},
{"name":"devfile","version":"0.4.3","platform":"arm64-darwin","checksum":"12f0eec17100b11c3f6afa573b0dd49aa84b4cfd1d25401f6b9aa05fd975123e"},
{"name":"devfile","version":"0.4.3","platform":"ruby","checksum":"5ab6061a9848f6f7f387767326f6f77ef068347656ea63db0917a3f5fbdd74fd"},
{"name":"devfile","version":"0.4.3","platform":"x86_64-linux","checksum":"d3077aaa037dd35b5ff92c5d3b2f167f98a5844fb1db4b76afbe4500ad6fbe03"},
{"name":"device_detector","version":"1.0.0","platform":"ruby","checksum":"b800fb3150b00c23e87b6768011808ac1771fffaae74c3238ebaf2b782947a7d"},
{"name":"devise","version":"4.9.4","platform":"ruby","checksum":"920042fe5e704c548aa4eb65ebdd65980b83ffae67feb32c697206bfd975a7f8"},
{"name":"devise-two-factor","version":"4.1.1","platform":"ruby","checksum":"c95f5b07533e62217aaed3c386874d94e2d472fb5f2b6598afe8600fc17a8b95"},
@ -509,7 +509,7 @@
{"name":"pact-support","version":"1.20.0","platform":"ruby","checksum":"41c343a3124fb379684b9ad9f1a0766c5fa18d3b78d433a52e5552d8b9475871"},
{"name":"paper_trail","version":"15.1.0","platform":"ruby","checksum":"0dbccd97e9d26c54aaea256d2566eaa040a3535601f49a62d79187e77d9ba9f9"},
{"name":"parallel","version":"1.26.3","platform":"ruby","checksum":"d86babb7a2b814be9f4b81587bf0b6ce2da7d45969fab24d8ae4bf2bb4d4c7ef"},
{"name":"parser","version":"3.3.7.1","platform":"ruby","checksum":"7dbe61618025519024ac72402a6677ead02099587a5538e84371b76659e6aca1"},
{"name":"parser","version":"3.3.7.4","platform":"ruby","checksum":"2b26282274280e13f891080dc4ef3f65ce658d62e13255b246b28ec6754e98ab"},
{"name":"parslet","version":"1.8.2","platform":"ruby","checksum":"08d1ab3721cd3f175bfbee8788b2ddff71f92038f2d69bd65454c22bb9fbd98a"},
{"name":"pastel","version":"0.8.0","platform":"ruby","checksum":"481da9fb7d2f6e6b1a08faf11fa10363172dc40fd47848f096ae21209f805a75"},
{"name":"pdf-core","version":"0.10.0","platform":"ruby","checksum":"0a5d101e2063c01e3f941e1ee47cbb97f1adfc1395b58372f4f65f1300f3ce91"},

View File

@ -358,16 +358,18 @@ GEM
aws-sdk-cloudformation (1.41.0)
aws-sdk-core (~> 3, >= 3.99.0)
aws-sigv4 (~> 1.1)
aws-sdk-core (3.215.0)
aws-sdk-core (3.222.2)
aws-eventstream (~> 1, >= 1.3.0)
aws-partitions (~> 1, >= 1.992.0)
aws-sigv4 (~> 1.9)
base64
jmespath (~> 1, >= 1.6.1)
logger
aws-sdk-kms (1.76.0)
aws-sdk-core (~> 3, >= 3.188.0)
aws-sigv4 (~> 1.1)
aws-sdk-s3 (1.177.0)
aws-sdk-core (~> 3, >= 3.210.0)
aws-sdk-s3 (1.183.0)
aws-sdk-core (~> 3, >= 3.216.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.5)
aws-sigv4 (1.9.1)
@ -533,7 +535,7 @@ GEM
thor (>= 0.19, < 2)
descendants_tracker (0.0.4)
thread_safe (~> 0.3, >= 0.3.1)
devfile (0.4.0)
devfile (0.4.3)
device_detector (1.0.0)
devise (4.9.4)
bcrypt (~> 3.0)
@ -1458,7 +1460,7 @@ GEM
activerecord (>= 6.1)
request_store (~> 1.4)
parallel (1.26.3)
parser (3.3.7.1)
parser (3.3.7.4)
ast (~> 2.4.1)
racc
parslet (1.8.2)
@ -2078,8 +2080,8 @@ DEPENDENCIES
attr_encrypted (~> 3.2.4)!
awesome_print
aws-sdk-cloudformation (~> 1)
aws-sdk-core (~> 3.215.0)
aws-sdk-s3 (~> 1.177.0)
aws-sdk-core (~> 3.222.0)
aws-sdk-s3 (~> 1.183.0)
axe-core-rspec (~> 4.10.0)
babosa (~> 2.0)
base32 (~> 0.3.0)
@ -2114,7 +2116,7 @@ DEPENDENCIES
declarative_policy (~> 1.1.0)
deprecation_toolkit (~> 1.5.1)
derailed_benchmarks
devfile (~> 0.4.0)
devfile (~> 0.4.3)
device_detector
devise (~> 4.9.3)
devise-pbkdf2-encryptable (~> 0.0.0)!
@ -2316,7 +2318,7 @@ DEPENDENCIES
pact (~> 1.64)
paper_trail (~> 15.0)
parallel (~> 1.19)
parser (= 3.3.7.1)
parser (= 3.3.7.4)
parslet (~> 1.8)
peek (~> 1.1)
pg (~> 1.5.6)

View File

@ -137,11 +137,11 @@ export default {
parent_pipeline: s__('JobSource|Parent Pipeline'),
pipeline: s__('JobSource|Pipeline'),
pipeline_execution_policy: s__('JobSource|Pipeline Execution Policy'),
pipeline_execution_policy_schedule: s__('JobSource|Pipeline Execution Policy Schedule'),
pipeline_execution_policy_schedule: s__('JobSource|Scheduled Pipeline Execution Policy'),
push: s__('JobSource|Push'),
scan_execution_policy: s__('JobSource|Scan Execution Policy'),
schedule: s__('JobSource|Schedule'),
security_orchestration_policy: s__('JobSource|Security Orchestration Policy'),
security_orchestration_policy: s__('JobSource|Scheduled Scan Execution Policy'),
trigger: s__('JobSource|Trigger'),
web: s__('JobSource|Web'),
webide: s__('JobSource|Web IDE'),

View File

@ -129,7 +129,7 @@ export default {
<gl-sprintf
:message="
s__(
`BulkImport|Because of settings on the source GitLab instance or group, you can't import projects with this group. To permit importing projects with this group, reconfigure the source GitLab instance or group. %{linkStart}Learn more.%{linkEnd}`,
`BulkImport|You cannot import projects with this group. To import projects, reconfigure the source GitLab instance or group. %{linkStart}Learn more.%{linkEnd}`,
)
"
>

View File

@ -277,10 +277,13 @@ export default {
},
unavailableFeaturesAlertTitle() {
return sprintf(s__('BulkImport| %{host} is running outdated GitLab version (v%{version})'), {
host: this.sourceUrl,
version: this.bulkImportSourceGroups.versionValidation.features.sourceInstanceVersion,
});
return sprintf(
s__('BulkImport|%{host} is running an outdated GitLab version (v%{version})'),
{
host: this.sourceUrl,
version: this.bulkImportSourceGroups.versionValidation.features.sourceInstanceVersion,
},
);
},
pageInfo() {
@ -701,7 +704,7 @@ export default {
<gl-sprintf
:message="
s__(
'BulkImport|Following data will not be migrated: %{bullets} Contact system administrator of %{host} to upgrade GitLab if you need this data in your migration',
'BulkImport|The following items are not migrated: %{bullets} To include these items, ask the administrator of %{host} to upgrade GitLab.',
)
"
>
@ -764,9 +767,7 @@ export default {
<help-popover :options="$options.popoverOptions">
<gl-sprintf
:message="
s__(
'BulkImport|Only groups that you have the %{role} role for are listed as groups you can import.',
)
s__('BulkImport|Only groups you have the %{role} role for are listed for import.')
"
>
<template #role>

View File

@ -5,13 +5,13 @@ export const i18n = {
ERROR_INVALID_FORMAT: s__(
'GroupSettings|Please choose a group URL with no special characters or spaces.',
),
ERROR_NAME_ALREADY_EXISTS: s__('BulkImport|Name already exists.'),
ERROR_NAME_ALREADY_EXISTS: s__('BulkImport|This name already exists.'),
ERROR_REQUIRED: __('This field is required.'),
ERROR_NAME_ALREADY_USED_IN_SUGGESTION: s__(
'BulkImport|Name already used as a target for another group.',
'BulkImport|This name is already used for another group.',
),
ERROR_IMPORT: s__('BulkImport|Importing the group failed.'),
ERROR_IMPORT_COMPLETED: s__('BulkImport|Import is finished. Pick another name for re-import'),
ERROR_IMPORT_COMPLETED: s__('BulkImport|Enter another name to re-import.'),
ERROR_TOO_MANY_REQUESTS: s__(
'Bulkmport|Over six imports in one minute were attempted. Wait at least one minute and try again.',
),

View File

@ -54,13 +54,13 @@ const sourcesItems = [
},
{
value: SOURCE_SECURITY_ORCHESTRATION_POLICY,
text: s__('PipelineSource|Security Orchestration Policy'),
text: s__('PipelineSource|Scheduled Scan Execution Policy'),
},
{ value: SOURCE_CONTAINER_REGISTRY_PUSH, text: s__('PipelineSource|Container Registry Push') },
{ value: SOURCE_DUO_WORKFLOW, text: s__('PipelineSource|Duo Workflow') },
{
value: SOURCE_PIPELINE_EXECUTION_POLICY_SCHEDULE,
text: s__('PipelineSource|Pipeline Execution Policy Schedule'),
text: s__('PipelineSource|Scheduled Pipeline Execution Policy'),
},
{ value: SOURCE_UNKNOWN, text: s__('PipelineSource|Unknown') },
];

View File

@ -242,7 +242,7 @@ class BulkImports::Entity < ApplicationRecord
def validate_only_one_sharding_key_present
return if [group, project, organization].compact.one?
errors.add(:base, s_("BulkImport|Import failed: Must have exactly one of organization, group or project."))
errors.add(:base, s_("BulkImport|Import failed. The bulk import entity must belong to only one organization, group, or project."))
end
def validate_parent_is_a_group
@ -275,7 +275,7 @@ class BulkImports::Entity < ApplicationRecord
if source.self_and_descendants.any? { |namespace| namespace.full_path == destination_namespace }
errors.add(
:base,
s_('BulkImport|Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
s_('BulkImport|Import failed. The destination cannot be a subgroup of the source group. Change the destination and try again.')
)
end
end

View File

@ -13,16 +13,6 @@ module EncryptedUserPassword
BCRYPT_STRATEGY = :bcrypt
PBKDF2_SHA512_STRATEGY = :pbkdf2_sha512
class_methods do
def stretches
prior_stretches = Rails.env.test? ? 1 : 10
return prior_stretches unless Feature.enabled?(:increase_password_storage_stretches) # rubocop:disable Gitlab/FeatureFlagWithoutActor -- required to enable FFing a Class method, which is required to FF the Stretches config
Rails.env.test? ? 5 : 13
end
end
# Use Devise DatabaseAuthenticatable#authenticatable_salt
# unless encrypted password is PBKDF2+SHA512.
def authenticatable_salt
@ -94,7 +84,6 @@ module EncryptedUserPassword
if password_strategy == encryptor
if BCRYPT_STRATEGY == password_strategy
return true if Feature.disabled?(:increase_password_storage_stretches) # rubocop:disable Gitlab/FeatureFlagWithoutActor -- required to enable FFing a Class method, which is required to FF the Stretches config
return true if bcrypt_password_matches_current_stretches?
elsif PBKDF2_SHA512_STRATEGY == password_strategy
return true if pbkdf2_password_matches_salt_length?

View File

@ -23,6 +23,8 @@ module Ci
validate_source!
ref = @create_branch ? create_repository_branch : default_branch
@workload.set_branch(ref)
service = ::Ci::CreatePipelineService.new(@project, @current_user, ref: ref)
service.execute(
@source,

View File

@ -50,7 +50,7 @@
= f.number_field :max_import_remote_file_size, class: 'form-control gl-form-input', title: s_('Import|Maximum remote file size for imports from external object storages. For example, AWS S3.'), data: { toggle: 'tooltip', container: 'body' }
%span.form-text.gl-text-subtle= _('Set to 0 for no size limit.')
.form-group
= f.label :bulk_import_max_download_file_size, s_('BulkImport|Direct transfer maximum download file size (MiB)'), class: 'label-light'
= f.label :bulk_import_max_download_file_size, s_('BulkImport|Maximum download file size (MiB)'), class: 'label-light'
= f.number_field :bulk_import_max_download_file_size, class: 'form-control gl-form-input', title: s_('BulkImport|Maximum download file size when importing from source GitLab instances by direct transfer.'), data: { toggle: 'tooltip', container: 'body' }
.form-group
= f.label :max_decompressed_archive_size, s_('Import|Maximum decompressed file size for archives from imports (MiB)'), class: 'label-light'

View File

@ -45,5 +45,5 @@
%td= s_('Import|Maximum import remote file size (MiB)')
%td= instance_configuration_human_size_cell(size_limits[:max_import_remote_file_size])
%tr
%td= s_('BulkImport|Direct transfer maximum download file size (MiB)')
%td= s_('BulkImport|Maximum download file size (MiB)')
%td= instance_configuration_human_size_cell(size_limits[:bulk_import_max_download_file_size])

View File

@ -11,24 +11,24 @@
- if @has_errors || @skipped_count > 0
%p{ style: text_style }
= safe_format(s_('BulkImport|Items assigned to placeholder users were reassigned to users in %{strong_open}%{group}%{strong_close} according to the uploaded CSV file.'), strong_tag_pair, group: @group.name)
= safe_format(s_('BulkImport|Items assigned to placeholder users have been reassigned to users in %{strong_open}%{group}%{strong_close}.'), strong_tag_pair, group: @group.name)
- else
%p{ style: text_style }
= safe_format(s_('BulkImport|All items assigned to placeholder users were reassigned to users in %{strong_open}%{group}%{strong_close} according to the uploaded CSV file.'), strong_tag_pair, group: @group.name)
= safe_format(s_('BulkImport|All items assigned to placeholder users have been reassigned to users in %{strong_open}%{group}%{strong_close}.'), strong_tag_pair, group: @group.name)
%ul
%li
%span= safe_format(ns_('BulkImport|%{count} placeholder user matched to user.', 'BulkImport|%{count} placeholder users matched to users.', @success_count), count: @success_count)
%span= safe_format(ns_('BulkImport|%{count} placeholder user has been matched to a user.', 'BulkImport|%{count} placeholder users have been matched to users.', @success_count), count: @success_count)
- if @has_errors
%li{ style: error_style }
%strong= safe_format(ns_('BulkImport|%{count} placeholder user not matched to user.', 'BulkImport|%{count} placeholder users not matched to users.', @failed_count), count: @failed_count)
%strong= safe_format(ns_('BulkImport|%{count} placeholder user has not been matched to a user.', 'BulkImport|%{count} placeholder users have not been matched to users.', @failed_count), count: @failed_count)
- if @skipped_count > 0
%li{ style: emphasis_style }
= safe_format(ns_('BulkImport|%{count} placeholder user skipped.', 'BulkImport|%{count} placeholder users skipped.', @skipped_count), count: @skipped_count)
= safe_format(ns_('BulkImport|%{count} placeholder user has been skipped.', 'BulkImport|%{count} placeholder users have been skipped.', @skipped_count), count: @skipped_count)
- if @has_errors
%p{ style: text_style }
= s_('BulkImport|A CSV file containing a list of placeholder reassignment errors has been attached to this email.')
= s_('BulkImport|A CSV file with a list of placeholder reassignment errors is attached to this email.')
%p{ style: text_style }
- extra_params = @has_errors ? { status: 'failed' } : {}

View File

@ -1,21 +1,21 @@
<%= @title %>
<% if @has_errors || @skipped_count > 0%>
<%= safe_format(s_('BulkImport|Items assigned to placeholder users were reassigned to users in %{strong_open}%{group}%{strong_close} according to the uploaded CSV file.'), strong_open: '', strong_close: '', group: @group.name) %>
<%= safe_format(s_('BulkImport|Items assigned to placeholder users have been reassigned to users in %{strong_open}%{group}%{strong_close}.'), strong_open: '', strong_close: '', group: @group.name) %>
<% else %>
<%= safe_format(s_('BulkImport|All items assigned to placeholder users were reassigned to users in %{strong_open}%{group}%{strong_close} according to the uploaded CSV file.'), strong_open: '', strong_close: '', group: @group.name) %>
<%= safe_format(s_('BulkImport|All items assigned to placeholder users have been reassigned to users in %{strong_open}%{group}%{strong_close}.'), strong_open: '', strong_close: '', group: @group.name) %>
<% end %>
- <%= safe_format(ns_('BulkImport|%{count} placeholder user matched to user.', 'BulkImport|%{count} placeholder users matched to users.', @success_count), count: @success_count) %>
- <%= safe_format(ns_('BulkImport|%{count} placeholder user has been matched to a user.', 'BulkImport|%{count} placeholder users have been matched to users.', @success_count), count: @success_count) %>
<% if @has_errors %>
- <%= safe_format(ns_('BulkImport|%{count} placeholder user not matched to user.', 'BulkImport|%{count} placeholder users not matched to users.', @failed_count), count: @failed_count) %>
- <%= safe_format(ns_('BulkImport|%{count} placeholder user has not been matched to a user.', 'BulkImport|%{count} placeholder users have not been matched to users.', @failed_count), count: @failed_count) %>
<% end %>
<% if @skipped_count > 0 %>
- <%= safe_format(ns_('BulkImport|%{count} placeholder user skipped.', 'BulkImport|%{count} placeholder users skipped.', @skipped_count), count: @skipped_count) %>
- <%= safe_format(ns_('BulkImport|%{count} placeholder user has been skipped.', 'BulkImport|%{count} placeholder users have been skipped.', @skipped_count), count: @skipped_count) %>
<% end %>
<% if @has_errors %>
<%= s_('BulkImport|A CSV file containing a list of placeholder reassignment errors has been attached to this email.') %>
<%= s_('BulkImport|A CSV file with a list of placeholder reassignment errors is attached to this email.') %>
<% end %>
<% extra_params = @has_errors ? { status: 'failed' } : {} %>

View File

@ -1,9 +0,0 @@
---
name: increase_password_storage_stretches
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/222481
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/177154
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/511397
milestone: '17.9'
group: group::authentication
type: gitlab_com_derisk
default_enabled: false

View File

@ -77,17 +77,12 @@ Devise.setup do |config|
# For bcrypt, this is the cost for hashing the password and defaults to 10. If
# using other encryptors, it sets how many times you want the password re-encrypted.
#
# Limiting the stretches to just one in testing will increase the performance of
# Limiting the stretches to just 5 in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments.
# a value less than 10 in other environments. (Note that Devise will ignore
# a value of 1 for stretches.)
# The bcrypt gem does not allow stretches to be set less than 4 (it will ignore it).
# To allow password WF upgrade testing (spec/models/concerns/encrypted_user_password_spec.rb),
# changing the test-side configuration to 5 to give the test something to do,
# along with changing the production value to 13 for https://gitlab.com/gitlab-org/gitlab/-/issues/222481.
# config.stretches = Rails.env.test? ? 5 : 13
# NOTE: This is being overridden in the `encrypted_user_password.rb` concern, behind an FF
config.stretches = Rails.env.test? ? 1 : 10
config.stretches = Rails.env.test? ? 5 : 13
# Set up a pepper to generate the encrypted password.
# config.pepper = "2ef62d549c4ff98a5d3e0ba211e72cff592060247e3bbbb9f499af1222f876f53d39b39b823132affb32858168c79c1d7741d26499901b63c6030a42129924ef"

View File

@ -291,7 +291,7 @@ The configuration is applied during the next maintenance window.
### Enable SCIM provisioning for your IP allowlist
You can use SCIM with external identity providers to automatically provision and manage users. To use SCIM, your identity provider must be able to access the [instance SCIM API](../../../development/internal_api/_index.md#instance-scim-api) endpoints. By default, IP allowlisting blocks communication to these endpoints.
You can use SCIM with external identity providers to automatically provision and manage users. To use SCIM, your identity provider must be able to access the instance SCIM API endpoints. By default, IP allowlisting blocks communication to these endpoints.
To enable SCIM while maintaining your IP allowlist:

View File

@ -13,7 +13,7 @@ title: Enable and disable GitLab features deployed behind feature flags
{{< /details >}}
GitLab adopted [feature flags strategies](../development/feature_flags/_index.md)
GitLab adopted feature flags strategies
to deploy features in an early stage of development so that they can be
incrementally rolled out.

View File

@ -175,7 +175,7 @@ Confirm the following are all true:
successfully creates the project but doesn't create the README.
- When [tailing the logs](https://docs.gitlab.com/omnibus/settings/logs.html#tail-logs-in-a-console-on-the-server)
on a Gitaly client and reproducing the error, you get `401` errors
when reaching the [`/api/v4/internal/allowed`](../../development/internal_api/_index.md) endpoint:
when reaching the `/api/v4/internal/allowed` endpoint:
```shell
# api_json.log

View File

@ -368,7 +368,7 @@ If the [Praefect `dataloss`](recovery.md#check-for-data-loss) shows [repositorie
### Alternate directory does not exists
GitLab uses the [Git alternates mechanism for deduplication](../../development/git_object_deduplication.md). `alternates` is a text file that points to the `objects` directory on
GitLab uses the Git alternates mechanism for deduplication. `alternates` is a text file that points to the `objects` directory on
a `@pool` repository to fetch objects. If this file points to an invalid path, replication can fail with one of the following the errors:
- `"error":"no alternates directory exists", "warning","msg":"alternates file does not point to valid git repository"`
@ -384,7 +384,7 @@ To investigate the cause of this error:
project.pool_repository
```
1. Check if the pool repository path exists on disk and that it matches [the `alternates` file](../../development/git_object_deduplication.md) content.
1. Check if the path in the [`alternates` file](../../development/git_object_deduplication.md) is reachable from the `objects` directory in the project.
1. Check if the pool repository path exists on disk and that it matches the `alternates` file content.
1. Check if the path in the `alternates` file is reachable from the `objects` directory in the project.
After performing these checks, reach out to GitLab Support with the information collected.

View File

@ -1086,7 +1086,7 @@ An upper and lower limit applies to each of these:
The lower limits result in additional diffs being collapsed. The higher limits
prevent any more changes from rendering. For more information about these limits,
[read the development documentation](../development/merge_request_concepts/diffs/_index.md#diff-limits).
read the GitLab development documentation about working with diffs.
### Diff version limit

View File

@ -524,7 +524,6 @@ You can see the total storage used for LFS objects for groups and projects in:
- Blog post: [Getting started with Git LFS](https://about.gitlab.com/blog/2017/01/30/getting-started-with-git-lfs-tutorial/)
- User documentation: [Git Large File Storage (LFS)](../../topics/git/lfs/_index.md)
- [Git LFS developer information](../../development/lfs.md)
## Troubleshooting

View File

@ -72,9 +72,9 @@ From left to right, the performance bar displays:
its `X-Request-Id` header.
- **Download**: a link to download the raw JSON used to generate the Performance Bar reports.
- **Memory Report**: a link that generates a
[memory profiling](../../../development/performance.md#using-memory-profiler)
memory profiling
report of the current URL.
- **Flamegraph** with mode: a link to generate a [flamegraph](../../../development/profiling.md#speedscope-flamegraphs)
- **Flamegraph** with mode: a link to generate a flamegraph
of the current URL with the selected [Stackprof mode](https://github.com/tmm1/stackprof#sampling):
- The **Wall** mode samples every *interval* of the time on a clock on a wall. The interval is set to `10100` microseconds.
- The **CPU** mode samples every *interval* of CPU activity. The interval is set to `10100` microseconds.

View File

@ -69,7 +69,7 @@ Configuring the object storage using the consolidated form has a number of advan
- It [uploads files to S3 with proper `Content-MD5` headers](https://gitlab.com/gitlab-org/gitlab-workhorse/-/issues/222).
When the consolidated form is used,
[direct upload](../development/uploads/_index.md#direct-upload) is enabled
direct upload is enabled
automatically. Thus, only the following providers can be used:
- [Amazon S3-compatible providers](#amazon-s3)

View File

@ -177,7 +177,7 @@ file for the environment, as it isn't generated dynamically.
### Additional documentation
Additional technical documentation for `gitlab-sshd` may be found in the
[GitLab Shell documentation](../../development/gitlab_shell/_index.md).
GitLab Shell documentation.
## Troubleshooting

View File

@ -420,7 +420,7 @@ separate Rails process to debug the issue:
### GitLab: API is not accessible
This often occurs when GitLab Shell attempts to request authorization via the
[internal API](../../development/internal_api/_index.md) (for example, `http://localhost:8080/api/v4/internal/allowed`), and
internal API (for example, `http://localhost:8080/api/v4/internal/allowed`), and
something in the check fails. There are many reasons why this may happen:
1. Timeout connecting to a database (for example, PostgreSQL or Redis)
@ -437,7 +437,7 @@ strace -ttTfyyy -s 1024 -p <PID of puma worker> -o /tmp/puma.txt
If you cannot isolate which Puma worker is the issue, try to run `strace`
on all the Puma workers to see where the
[`/internal/allowed`](../../development/internal_api/_index.md) endpoint gets stuck:
`/internal/allowed` endpoint gets stuck:
```shell
ps auwx | grep puma | awk '{ print " -p " $2}' | xargs strace -ttTfyyy -s 1024 -o /tmp/puma.txt

View File

@ -289,7 +289,7 @@ Benchmark.bm do |x|
end
```
For more information, review [our developer documentation about benchmarks](../../development/performance.md#benchmarks).
For more information, review our developer documentation about benchmarks.
## Active Record objects

View File

@ -81,11 +81,13 @@ See the table below for some examples:
When deprecation is announced and removal target set, the milestone for the issue
should be changed to match the removal target version.
The final comment in the issue **has to have**:
The final comment in the issue must have:
1. Text snippet for the release blog post section
1. Documentation MR ( or snippet ) for introducing the change
1. Draft MR removing the configuration or details on what must be done. See [Adding deprecation messages](https://docs.gitlab.com/omnibus/development/adding-deprecation-messages.html) for more on this
- A text snippet for the release blog post section.
- A link to a documentation merge request (or documentation snippet) that introduces the change.
- Either:
- A link to a draft merge request that removes the configuration.
- Details on what must be done.
## Example

View File

@ -59,7 +59,7 @@ Linux package.
and thus should be checked for how it is used as a part of the Linux package
to ensure compliance.
This list is sourced from the [GitLab development documentation on licensing](https://gitlab.com/gitlab-org/gitlab-foss/blob/master/doc/development/licensing.md).
This list is sourced from the GitLab development documentation on licensing.
However, due to the nature of the Linux package, the licenses may not apply
in the same way. Such as with `git` and `rsync`. See the [GNU License FAQ](https://www.gnu.org/licenses/gpl-faq.en.html#MereAggregation)

View File

@ -32,7 +32,7 @@ application is more complex and has multiple components. If these components are
not present or are incorrectly configured, GitLab does not work or it works
unpredictably.
The [GitLab Architecture Overview](../../development/architecture.md) shows some of these components and how they
The GitLab Architecture Overview in the GitLab development documentation shows some of these components and how they
interact. Each of these components needs to be configured and kept up to date.
Most of the components also have external dependencies. For example, the Rails

View File

@ -29,7 +29,7 @@ The following Rake tasks are available for use with GitLab:
| [Access token expiration tasks](tokens/_index.md) | Bulk extend or remove expiration dates for access tokens. |
| [Back up and restore](../backup_restore/_index.md) | Back up, restore, and migrate GitLab instances between servers. |
| [Clean up](cleanup.md) | Clean up unneeded items from GitLab instances. |
| [Development](../../development/rake_tasks.md) | Tasks for GitLab contributors. |
| Development | Tasks for GitLab contributors. For more information, see the development documentation. |
| [Elasticsearch](../../integration/advanced_search/elasticsearch.md#gitlab-advanced-search-rake-tasks) | Maintain Elasticsearch in a GitLab instance. |
| [General maintenance](maintenance.md) | General maintenance and self-check tasks. |
| [GitHub import](github_import.md) | Retrieve and import repositories from GitHub. |
@ -47,7 +47,7 @@ The following Rake tasks are available for use with GitLab:
| [Reset user passwords](../../security/reset_user_password.md#use-a-rake-task) | Reset user passwords using Rake. |
| [Uploads migrate](uploads/migrate.md) | Migrate uploads between local storage and object storage. |
| [Uploads sanitize](uploads/sanitize.md) | Remove EXIF data from images uploaded to earlier versions of GitLab. |
| [Service Data](../../development/internal_analytics/service_ping/troubleshooting.md#generate-service-ping) | Generate and troubleshoot [Service Ping](../../development/internal_analytics/service_ping/_index.md). |
| Service Data | Generate and troubleshoot Service Ping. For more information, see Service Ping development documentation. |
| [User management](user_management.md) | Perform user management tasks. |
| [Webhook administration](web_hooks.md) | Maintain project webhooks. |
| [X.509 signatures](x509_signatures.md) | Update X.509 commit signatures, which can be useful if the certificate store changed. |

View File

@ -96,4 +96,4 @@ If you're attempting to import a large project into a development environment, G
Error importing repository into qa-perf-testing/gitlabhq - GitalyClient#call called 31 times from single request. Potential n+1?
```
This error is due to a [n+1 calls limit for development setups](../../development/gitaly.md#toomanyinvocationserror-errors). To resolve this error, set `GITALY_DISABLE_REQUEST_LIMITS=1` as an environment variable. Then restart your development environment and import again.
This error is due to a n+1 calls limit for development setups. To resolve this error, set `GITALY_DISABLE_REQUEST_LIMITS=1` as an environment variable. Then restart your development environment and import again.

View File

@ -360,7 +360,7 @@ database: gitlabhq_production
```
Starting with GitLab 17.1, migrations are executed in an
[order](../../development/database/migration_ordering.md#171-logic) that conforms to the GitLab release cadence.
order that conforms to the GitLab release cadence.
## Run incomplete database migrations
@ -383,7 +383,7 @@ status in the output of the `sudo gitlab-rake db:migrate:status` command.
```
Starting with GitLab 17.1, migrations are executed in an
[order](../../development/database/migration_ordering.md#171-logic) that conforms to the GitLab release cadence.
order that conforms to the GitLab release cadence.
## Rebuild database indexes

View File

@ -676,7 +676,7 @@ persistence classes.
| `actioncable` | Pub/Sub queue backend for ActionCable. |
| `trace_chunks` | Store [CI trace chunks](../cicd/job_logs.md#incremental-logging) data. |
| `rate_limiting` | Store [rate limiting](../settings/user_and_ip_rate_limits.md) state. |
| `sessions` | Store [sessions](../../development/session.md#gitlabsession). |
| `sessions` | Store sessions. |
| `repository_cache` | Store cache data specific to repositories. |
To make this work with Sentinel:

View File

@ -129,7 +129,7 @@ The output includes the project ID and the project name. For example:
Object pools are repositories used to deduplicate [forks of public and internal projects](../user/project/repository/forking_workflow.md) and
contain the objects from the source project. Using `objects/info/alternates`, the source project and
forks use the object pool for shared objects. For more information, see
[How Git object deduplication works in GitLab](../development/git_object_deduplication.md).
Git object deduplication information in the GitLab development documentation.
Objects are moved from the source project to the object pool when housekeeping is run on the source
project. Object pool repositories are stored similarly to regular repositories in a directory called `@pools` instead of `@hashed`

View File

@ -77,7 +77,7 @@ For GitLab.com push size limits, see [accounts and limit settings](../../user/gi
When you [add files to a repository](../../user/project/repository/web_editor.md#create-a-file)
through the web UI, the maximum **attachment** size is the limiting factor. This happens
because the [web server](../../development/architecture.md#components)
because the web server
must receive the file before GitLab can generate the commit.
Use [Git LFS](../../topics/git/lfs/_index.md) to add large files to a repository.
This setting does not apply when pushing Git LFS objects.

View File

@ -188,7 +188,7 @@ To modify this setting:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Settings > General**.
1. Expand **Import and export settings**.
1. Increase or decrease by changing the value in **Maximum import remote file size (MiB)**. Set to `0` to set no file size limit.
1. In **Maximum import remote file size (MiB)**, enter a value. Set to `0` for no file size limit.
## Maximum download file size for imports by direct transfer
@ -205,7 +205,7 @@ To modify this setting:
1. On the left sidebar, at the bottom, select **Admin**.
1. Select **Settings > General**.
1. Expand **Import and export settings**.
1. Increase or decrease by changing the value in **Direct transfer maximum download file size (MiB)**. Set to `0` to set no download file size limit.
1. In **Maximum download file size (MiB)**, enter a value. Set to `0` for no file size limit.
## Maximum decompressed file size for imported archives

View File

@ -159,7 +159,7 @@ Instead of a queue, a queue namespace can also be provided, to have the process
automatically listen on all queues in that namespace without needing to
explicitly list all the queue names. For more information about queue namespaces,
see the relevant section in the
[Sidekiq development documentation](../../development/sidekiq/_index.md#queue-namespaces).
Sidekiq development part of the GitLab development documentation.
### Monitor the `sidekiq-cluster` command

View File

@ -117,7 +117,7 @@ employed by routing rules. A query includes two components:
### Available attributes
Queue matching query works upon the worker attributes, described in
[Sidekiq style guide](../../development/sidekiq/_index.md). We support querying
the Sidekiq style guide in the GitLab development documentation. We support querying
based on a subset of worker attributes:
- `feature_category` - the

View File

@ -103,4 +103,4 @@ Moved to [Geo replication troubleshooting](../geo/replication/troubleshooting/sy
## Generate Service Ping
This content has been moved to [Service Ping Troubleshooting](../../development/internal_analytics/service_ping/troubleshooting.md).
This content has been moved to Troubleshooting Service Ping in the GitLab development documentation.

View File

@ -13,7 +13,7 @@ title: Sidekiq queues administration API
{{< /details >}}
Delete jobs from a Sidekiq queue that match the given
[metadata](../development/logging.md#logging-context-metadata-through-rails-or-grape-requests).
metadata.
The response has three fields:

View File

@ -1546,17 +1546,11 @@ Parameters:
{{< history >}}
- `unique_project_download_limit`, `unique_project_download_limit_interval_in_seconds`, and `unique_project_download_limit_allowlist` [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/92970) in GitLab 15.3 [with a flag](../administration/feature_flags.md) named `limit_unique_project_downloads_per_namespace_user`. Disabled by default.
- [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/365724) in GitLab 15.6.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183101) in GitLab 18.0. Feature flag `limit_unique_project_downloads_per_namespace_user` removed.
{{< /history >}}
{{< alert type="flag" >}}
On GitLab Self-Managed, by default `unique_project_download_limit`, `unique_project_download_limit_interval_in_seconds`, `unique_project_download_limit_allowlist` and `auto_ban_user_on_excessive_projects_download` are not available.
To make them available, an administrator can [enable the feature flag](../administration/feature_flags.md)
named `limit_unique_project_downloads_per_namespace_user`.
{{< /alert >}}
Updates the project group. Only available to group owners and administrators.
```plaintext

View File

@ -1783,7 +1783,7 @@ returned by the API or viewed through the UI. When these limits impact the resul
field contains a value of `true`. Retrieve the diff data without these limits by
adding the `access_raw_diffs` parameter, which accesses diffs not from the database, but from Gitaly directly.
This approach is generally slower and more resource-intensive, but isn't subject to size limits
placed on database-backed diffs. [Limits inherent to Gitaly](../development/merge_request_concepts/diffs/_index.md#diff-limits)
placed on database-backed diffs. Limits inherent to Gitaly
still apply.
Example response:

View File

@ -188,7 +188,7 @@ curl --header "PRIVATE-TOKEN: <your_access_token>" \
This endpoint can be accessed without authentication if the repository is
publicly accessible. Diffs can have an empty diff string if
[diff limits](../development/merge_request_concepts/diffs/_index.md#diff-limits) are reached.
diff limits are reached.
```plaintext
GET /projects/:id/repository/compare
@ -579,4 +579,3 @@ Example response:
## Related topics
- User documentation for [changelogs](../user/project/changelogs.md)
- Developer documentation for [changelog entries](../development/changelog.md) in GitLab

View File

@ -275,7 +275,7 @@ The source attribute can have the following values:
- `push`: Job initiated by a code push.
- `scan_execution_policy`: Job initiated by a scan execution policy.
- `schedule`: Job initiated by a scheduled pipeline.
- `security_orchestration_policy`: Job initiated by a security orchestration policy.
- `security_orchestration_policy`: Job initiated by a scheduled scan execution policy.
- `trigger`: Job initiated by another job or pipeline.
- `unknown`: Job initiated by an unknown source.
- `web`: Job initiated by a user from the GitLab UI.

View File

@ -227,7 +227,7 @@ Use the `CI_PIPELINE_SOURCE` variable to control when to add jobs for these pipe
| `pipeline` | For [multi-project pipelines](../pipelines/downstream_pipelines.md#multi-project-pipelines) created by [using the API with `CI_JOB_TOKEN`](../pipelines/downstream_pipelines.md#trigger-a-multi-project-pipeline-by-using-the-api), or the [`trigger`](../yaml/_index.md#trigger) keyword. |
| `push` | For pipelines triggered by a Git push event, including for branches and tags. |
| `schedule` | For [scheduled pipelines](../pipelines/schedules.md). |
| `security_orchestration_policy` | For [security orchestration policy](../../user/application_security/policies/_index.md) pipelines. |
| `security_orchestration_policy` | For [scheduled scan execution policies](../../user/application_security/policies/scan_execution_policies.md) pipelines. |
| `trigger` | For pipelines created by using a [trigger token](../triggers/_index.md#configure-cicd-jobs-to-run-in-triggered-pipelines). |
| `web` | For pipelines created by selecting **New pipeline** in the GitLab UI, from the project's **Build > Pipelines** section. |
| `webide` | For pipelines created by using the [Web IDE](../../user/project/web_ide/_index.md). |

View File

@ -86,7 +86,7 @@ of GitLab and `config.yml` of GitLab Shell.
It is not necessary to create these directories manually now, and doing so can cause errors later in the installation.
For a more in-depth overview, see the [GitLab architecture doc](../development/architecture.md).
For a more in-depth overview, see the GitLab architecture in the development documentation.
## Overview

View File

@ -113,9 +113,9 @@ The decision on whether backporting a change is performed is done at the discret
[current release managers](https://about.gitlab.com/community/release-managers/),
based on *all* of the following:
1. Estimated [severity](../development/labels/_index.md#severity-labels) of the bug:
1. Estimated severity of the bug:
Highest possible impact to users based on the current definition of severity.
1. Estimated [priority](../development/labels/_index.md#priority-labels) of the bug:
1. Estimated priority of the bug:
Immediate impact on all impacted users based on the above estimated severity.
1. Potentially incurring data loss and/or security breach.
1. Potentially affecting one or more strategic accounts due to a proven inability by the user to upgrade to the current stable version.
@ -125,7 +125,7 @@ the current stable release, and two previous monthly releases. In rare cases a r
For instance, if we release `13.2.1` with a fix for a severe bug introduced in
`13.0.0`, we could backport the fix to a new `13.0.x`, and `13.1.x` patch release.
Note that [severity](../development/labels/_index.md#severity-labels) 3 and lower
Note that severity 3 and lower
requests are automatically turned down.
To request backporting to more than one stable release for consideration, raise an issue in the
@ -136,5 +136,5 @@ To request backporting to more than one stable release for consideration, raise
You may also want to read our:
- [Release documentation](https://gitlab.com/gitlab-org/release/docs) describing release procedures
- [Deprecation guidelines](../development/deprecation_guidelines/_index.md)
- Deprecation guidelines in the development documentation.
- [Responsible Disclosure Policy](https://about.gitlab.com/security/disclosure/)

View File

@ -640,9 +640,9 @@ scanners [support CycloneDX formatted reports](../ci/yaml/artifacts_reports.md#a
Compliance programs based on NIST 800-53, such as FedRAMP, require FIPS
compliance for all applicable cryptographic modules. GitLab has released
FIPS versions of its container images and provides guidance on
[how to configure GitLab to meet FIPS compliance standards](../development/fips_gitlab.md).
how to configure GitLab to meet FIPS compliance standards.
It is important to note that
[certain features are not available or supported in FIPS mode](../development/fips_gitlab.md#unsupported-features-in-fips-mode).
certain features are not available or supported in FIPS mode.
While GitLab provides FIPS-compliant images, it is the responsibility of
the customer to configure underlying infrastructure and evaluate the

View File

@ -34,7 +34,7 @@ For more information, see the [GitLab release and maintenance policy](../../poli
### Zero-downtime upgrades
Deployments follow the process for [zero-downtime upgrades](../../update/zero_downtime.md) to ensure [backward compatibility](../../development/multi_version_compatibility.md) during an upgrade. When no infrastructure changes or maintenance tasks require downtime, using the instance during an upgrade is possible and safe.
Deployments follow the process for [zero-downtime upgrades](../../update/zero_downtime.md) to ensure backward compatibility during an upgrade. When no infrastructure changes or maintenance tasks require downtime, using the instance during an upgrade is possible and safe.
During a GitLab version update, static assets may change and are only available in one of the two versions. To mitigate this situation, three techniques are adopted:

View File

@ -162,7 +162,6 @@ the total size of your repository, see
- Use Git LFS to set up [exclusive file locks](../file_management.md#configure-file-locks).
- Blog post: [Getting started with Git LFS](https://about.gitlab.com/blog/2017/01/30/getting-started-with-git-lfs-tutorial/)
- [Git LFS with Git](../file_management.md#git-lfs)
- [Git LFS developer information](../../../development/lfs.md)
- [GitLab Git Large File Storage (LFS) Administration](../../../administration/lfs/_index.md) for GitLab Self-Managed
- [Troubleshooting Git LFS](troubleshooting.md)
- [The `.gitattributes` file](../../../user/project/repository/files/git_attributes.md)

View File

@ -127,7 +127,7 @@ a copy of your repository, and download it.
```
For more information about references, see
[Git references used by Gitaly](../../development/gitaly.md#git-references-used-by-gitaly).
Git references used by Gitaly.
{{< alert type="note" >}}

View File

@ -385,5 +385,4 @@ After you provide your feedback, tidy up.
- [Conventional comments](https://conventionalcomments.org/) provide helpful structure for comments.
- [Code review guidelines](https://handbook.gitlab.com/handbook/engineering/workflow/code-review/) in the GitLab handbook
- [Merge request coaches](https://handbook.gitlab.com/job-families/expert/merge-request-coach/) in the GitLab handbook
- [Merge requests workflow](../../development/contributing/merge_request_workflow.md) for GitLab team members
- [Efficient code review tips](https://about.gitlab.com/blog/2020/09/08/efficient-code-review-tips/)

View File

@ -15,7 +15,7 @@ title: 'Tutorial: Update Git commit messages'
Occasionally, after you've made a few commits to your branch, you realize you need
to update one or more commit messages. Perhaps you found a typo, or some automation warned you
that your commit message didn't completely align with a project's
[commit message guidelines](../../development/contributing/merge_request_workflow.md#commit-messages-guidelines).
commit message guidelines.
Updating the message can be tricky if you don't have much practice using Git
from the command-line interface (CLI). But don't worry, even if you have only ever worked in

View File

@ -68,7 +68,7 @@ Six repositories are maintained:
[Enterprise Edition](https://about.gitlab.com/pricing/) ones.
- [`gitlab/gitlab-ce`](https://packages.gitlab.com/gitlab/gitlab-ce): A stripped
down package that contains only the Community Edition features.
- [`gitlab/gitlab-fips`](https://packages.gitlab.com/gitlab/gitlab-fips): [FIPS-compliant](../../development/fips_gitlab.md) builds.
- [`gitlab/gitlab-fips`](https://packages.gitlab.com/gitlab/gitlab-fips): FIPS-compliant builds.
- [`gitlab/unstable`](https://packages.gitlab.com/gitlab/unstable): Release candidates and other unstable versions.
- [`gitlab/nightly-builds`](https://packages.gitlab.com/gitlab/nightly-builds): Nightly builds.
- [`gitlab/raspberry-pi2`](https://packages.gitlab.com/gitlab/raspberry-pi2): Official Community Edition releases built for [Raspberry Pi](https://www.raspberrypi.org) packages.

View File

@ -872,7 +872,7 @@ Specific information applies to installations using Geo:
- [Container registry](../../administration/packages/container_registry.md)
- [Gitaly](../../administration/gitaly/tls_support.md)
- [GitLab Pages](../../user/project/pages/custom_domains_ssl_tls_certification/_index.md#manual-addition-of-ssltls-certificates)
- [Workhorse](../../development/workhorse/configuration.md#tls-support)
- Workhorse
You should check the size of your RSA keys (`openssl rsa -in <your-key-file> -text -noout | grep "Key:"`)
for any of the applications above before

View File

@ -398,7 +398,7 @@ ensure that your proxy server does not alter or remove signed HTTP headers.
- Git 2.47.0 and later is required by Gitaly. For self-compiled installations, you should use the [Git version provided by Gitaly](../../install/installation.md#git).
- FIPS Linux packages now use the system Libgcrypt, except FIPS Linux packages for AmazonLinux 2. Previous versions of the FIPS Linux packages used the
same Libgcrypt used by the regular Linux packages, which was a bug. For more information, see
[the FIPS documentation](../../development/fips_gitlab.md#system-libgcrypt).
the GitLab development documentation about FIPS.
- Linux `gitlab-runner` packages have broken out `gitlab-runner-helper-images` as a new required dependency. If you manually install `gitlab-runner` packages for upgrades,
be sure to also [download the helper images manually](https://docs.gitlab.com/runner/install/linux-manually/#download).

View File

@ -45,7 +45,7 @@ The zero-downtime upgrade process has the following requirements:
- Any of these components that are not deployed in a HA fashion need to be upgraded separately with downtime.
- For databases, the [Linux package only supports HA for the main GitLab database](https://gitlab.com/groups/gitlab-org/-/epics/7814). For any other databases, such as the [Praefect database](#praefect-gitaly-cluster), a third party database solution is required to achieve HA and subsequently to avoid downtime.
- **You can only upgrade one minor release at a time**. So from `16.1` to `16.2`, not to `16.3`. If you skip releases, database modifications may be run in the wrong sequence [and leave the database schema in a broken state](https://gitlab.com/gitlab-org/gitlab/-/issues/321542).
- You have to use [post-deployment migrations](../development/database/post_deployment_migrations.md).
- You have to use post-deployment migrations.
- [Zero-downtime upgrades are not available with the GitLab Charts](https://docs.gitlab.com/charts/installation/upgrade.html). Support is available with the [GitLab Operator](https://docs.gitlab.com/operator/gitlab_upgrades.html) but there are [known limitations](https://docs.gitlab.com/operator/#known-issues) with this deployment method and as such it's not covered in this guide at this time.
In addition to the above, please be aware of the following considerations:
@ -229,7 +229,7 @@ nodes to be a deploy node. This target node will be configured to run migrations
### Rails
Rails as a webserver consists primarily of [Puma](../administration/operations/puma.md), [Workhorse](../development/workhorse/_index.md), and [NGINX](../development/architecture.md#nginx).
Rails as a webserver consists primarily of [Puma](../administration/operations/puma.md), Workhorse, and NGINX.
Each of these components have different behaviours when it comes to doing a live upgrade. While Puma can allow
for a graceful reload, Workhorse doesn't. The best approach is to drain the node gracefully through other means,

View File

@ -212,6 +212,8 @@ For Gradle projects use either of the following methods to create a dependency g
##### Dependency Lock Plugin
This method gives information about dependencies which are direct.
To enable the CI/CD component on a Gradle project:
1. Edit the `build.gradle` or `build.gradle.kts` to use the
@ -256,10 +258,11 @@ build:
##### HtmlDependencyReportTask
This method gives information about dependencies which are both transitive and direct.
The [HtmlDependencyReportTask](https://docs.gradle.org/current/dsl/org.gradle.api.reporting.dependencies.HtmlDependencyReportTask.html)
is an alternative way to get the list of dependencies for a Gradle project (tested with `gradle`
versions 4 through 8). This method gives information about dependencies which are both transitive
and direct. To enable use of this method with dependency scanning the artifact from running the
versions 4 through 8). To enable use of this method with dependency scanning the artifact from running the
`gradle htmlDependencyReport` task needs to be available.
```yaml

View File

@ -86,7 +86,7 @@ the instructions for
By default, the application security jobs are configured to run for branch pipelines only.
To use them with [merge request pipelines](../../../ci/pipelines/merge_request_pipelines.md),
you must set the CI/CD variable `AST_ENABLE_MR_PIPELINES` to `"true"` (introduced in 17.11).
you must set the CI/CD variable `AST_ENABLE_MR_PIPELINES` to `"true"` ([introduced in 18.0](https://gitlab.com/gitlab-org/gitlab/-/issues/410880)).
Alternatively, you can use the [`latest` edition template](#template-editions) which enables merge request pipelines by default.

View File

@ -25,6 +25,8 @@ This topic is specifically related to user moderation in groups. For information
{{< history >}}
- [Introduced](https://gitlab.com/gitlab-org/modelops/anti-abuse/team-tasks/-/issues/155) in GitLab 15.8 [with a flag](../../administration/feature_flags.md) named `limit_unique_project_downloads_per_namespace_user`. Disabled by default.
- [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/365724) in GitLab 15.6.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183101) in GitLab 18.0. Feature flag `limit_unique_project_downloads_per_namespace_user` removed.
{{< /history >}}

View File

@ -15,16 +15,11 @@ title: Git abuse rate limit
{{< history >}}
- [Introduced](https://gitlab.com/groups/gitlab-org/-/epics/8066) in GitLab 15.2 [with a flag](../../../administration/feature_flags.md) named `limit_unique_project_downloads_per_namespace_user`. Disabled by default.
- [Enabled on GitLab.com](https://gitlab.com/gitlab-org/gitlab/-/issues/365724) in GitLab 15.6.
- [Generally available](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/183101) in GitLab 18.0. Feature flag `limit_unique_project_downloads_per_namespace_user` removed.
{{< /history >}}
{{< alert type="flag" >}}
The availability of this feature is controlled by a feature flag.
For more information, see the history.
{{< /alert >}}
This is the group-level documentation. For GitLab Self-Managed instances, see the [administration documentation](../../../administration/reporting/git_abuse_rate_limit.md).
Git abuse rate limiting is a feature to automatically ban users who download, clone, pull, fetch, or fork more than a specified number of repositories of a group in a given time frame. Banned users cannot access the top-level group or any of its non-public subgroups through HTTP or SSH. The rate limit also applies to users who authenticate with [personal](../../profile/personal_access_tokens.md) or [group access tokens](../settings/group_access_tokens.md), as well as [CI/CD job tokens](../../../ci/jobs/ci_job_token.md). Access to unrelated groups is unaffected.
@ -37,7 +32,7 @@ GitLab team members can view more information in this confidential epic:
## Automatic ban notifications
If the `limit_unique_project_downloads_per_namespace_user` feature flag is enabled, selected users receive an email when a user is about to be banned.
Selected users receive an email notification when a user is banned.
If automatic banning is disabled, a user is not banned automatically when they exceed the limit. However, notifications are still sent. You can use this setup to determine the correct values of the rate limit settings before enabling automatic banning.

View File

@ -139,6 +139,5 @@ All changes from users without the **Allowed to push** permission must be routed
- [`CODEOWNERS` syntax](reference.md)
- [Advanced `CODEOWNERS` configuration](advanced.md)
- [Development guidelines](../../../development/code_owners/_index.md)
- [Protected branches](../repository/branches/protected.md)
- [Troubleshooting Code Owners](troubleshooting.md)

View File

@ -405,8 +405,3 @@ This comment can also be a thread.
1. Select the location where you want to comment.
GitLab shows an icon and a comment field on the image.
## Resources
- For technical details on how GitLab calculates the diff between the two revisions,
see [Working with diffs](../../../development/merge_request_concepts/diffs/_index.md).

View File

@ -66,5 +66,4 @@ For more information, see how to [show or filter system notes on a merge request
## Related topics
- [Merge request diffs for developers](../../../development/merge_request_concepts/diffs/_index.md)
- [Merge request diff storage for administrators](../../../administration/merge_request_diffs.md)

View File

@ -315,7 +315,7 @@ However, Code Suggestions might generate suggestions that are:
- Potentially insecure.
- Offensive or insensitive.
When using Code Suggestions, [code review best practice](../../../../development/code_review.md) still applies.
When using Code Suggestions, code review best practices still apply.
## How the prompt is built
@ -336,7 +336,7 @@ Code Suggestions is powered by a generative AI model.
- For code generation, algorithms or large code blocks might take more than five seconds to generate.
Your personal access token enables a secure API connection to GitLab.com or to your GitLab instance.
This API connection securely transmits a context window from your IDE/editor to the [GitLab AI gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist), a GitLab hosted service. The [gateway](../../../../development/ai_architecture.md) calls the large language model APIs, and then the generated suggestion is transmitted back to your IDE/editor.
This API connection securely transmits a context window from your IDE/editor to the [GitLab AI gateway](https://gitlab.com/gitlab-org/modelops/applied-ml/code-suggestions/ai-assist), a GitLab hosted service. The gateway calls the large language model APIs, and then the generated suggestion is transmitted back to your IDE/editor.
### Streaming

View File

@ -189,7 +189,7 @@ to share objects with another repository:
## Check a fork's storage usage
Your fork uses a [deduplication strategy](../../../development/git_object_deduplication.md)
Your fork uses a deduplication strategy
to reduce the storage space it needs. Your fork can access the object pool connected to the source repository.
For more information and to check the storage use, see [View project fork storage usage](../../storage_usage_quotas.md#view-project-fork-storage-usage).

View File

@ -387,13 +387,14 @@ see [access token expiration](../../../integration/oauth_provider.md#access-toke
{{< /details >}}
On GitLab Self-Managed, [Workhorse](../../../development/workhorse/_index.md) must be installed
and running in front of the GitLab Rails server.
Otherwise, you might encounter issues when you open the Web IDE or
use certain features like Markdown preview.
On GitLab Self-Managed, Workhorse must be installed and running in front of the GitLab Rails
server. If it is not, you might encounter issues when you open the Web IDE or use certain
features like Markdown preview.
For more information about this dependency,
see [features that rely on Workhorse](../../../development/workhorse/gitlab_features.md#5-web-ide).
For security, some parts of the Web IDE must run in a separate origin. To support this
approach, the Web IDE uses Workhorse to route requests appropriately to and from Web IDE
assets. The Web IDE assets are static frontend assets, so its unnecessary overhead to rely
on Rails for this effort.
### Report a problem

View File

@ -148,7 +148,7 @@ components:
{{< alert type="note" >}}
This container `image` is updated regularly. `[VERSION_TAG]` is a placeholder only. For the latest version, see the
[default devfile](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/remote_development/settings/default_devfile.yaml) file.
[default `default_devfile.yaml`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/remote_development/settings/default_devfile.yaml).
{{< /alert >}}

View File

@ -24,7 +24,7 @@ module Ai
end
def artifacts_path
['workflow.diff']
[]
end
def variables_without_expand
@ -37,6 +37,7 @@ module Ai
def variables
{
DUO_WORKFLOW_BASE_PATH: './',
DUO_WORKFLOW_DEFINITION: @params[:workflow_definition],
DUO_WORKFLOW_GOAL: @params[:goal],
DUO_WORKFLOW_WORKFLOW_ID: String(@params[:workflow_id]),
GITLAB_OAUTH_TOKEN: @params[:workflow_oauth_token],
@ -56,13 +57,13 @@ module Ai
def commands
[
%(echo $DUO_WORKFLOW_DEFINITION),
%(echo $DUO_WORKFLOW_GOAL),
%(git checkout #{@branch}),
%(wget #{Gitlab::DuoWorkflow::Executor.executor_binary_url} -O /tmp/duo-workflow-executor.tar.gz),
%(tar xf /tmp/duo-workflow-executor.tar.gz --directory /tmp),
%(chmod +x /tmp/duo-workflow-executor),
%(/tmp/duo-workflow-executor),
%(git add .),
%(git diff --staged),
%(git diff --staged > workflow.diff)
%(/tmp/duo-workflow-executor)
]
end
end

View File

@ -17,12 +17,12 @@ module BulkImports
end
def self.destination_namespace_validation_failure(destination_namespace)
self.new(format(s_("BulkImport|Import failed. Destination '%{destination}' is invalid, " \
"or you don't have permission."), destination: destination_namespace))
self.new(format(s_("BulkImport|Import failed. '%{destination}' is invalid, " \
"or you do not have permission."), destination: destination_namespace))
end
def self.destination_slug_validation_failure
self.new(format(s_("BulkImport|Import failed. Destination URL %{url}"),
self.new(format(s_("BulkImport|Import failed. The destination URL %{url}"),
url: Gitlab::Regex.oci_repository_path_regex_message))
end
@ -37,13 +37,13 @@ module BulkImports
end
def self.not_authorized(full_path)
self.new(format(s_("BulkImport|Import failed. You don't have permission to export '%{path}'."),
self.new(format(s_("BulkImport|Import failed. You do not have permission to export '%{path}'."),
path: full_path))
end
def self.setting_not_enabled
self.new(s_("BulkImport|Migration by direct transfer disabled on source or destination instance. " \
"Ask an administrator to enable it on both instances and try again."))
self.new(s_("BulkImport|Migration by direct transfer is disabled on the source or destination instance. " \
"Ask an administrator to enable this feature on both instances and try again."))
end
end
end

View File

@ -6,6 +6,10 @@ module Ci
def job
raise "not implemented"
end
def set_branch(branch)
@branch = branch
end
end
end
end

View File

@ -9,7 +9,7 @@ module Gitlab
extend ::Gitlab::Utils::Override
SNOWPLOW_NAMESPACE = 'gl'
PRODUCT_USAGE_EVENT_COLLECT_ENDPOINT = 'events.gitlab.net'
PRODUCT_USAGE_EVENT_COLLECT_ENDPOINT = 'events-stg.gitlab.net'
def initialize
@event_eligibility_checker = Gitlab::Tracking::EventEligibilityChecker.new

View File

@ -11135,39 +11135,36 @@ msgstr ""
msgid "Bulk update"
msgstr ""
msgid "BulkImport| %{host} is running outdated GitLab version (v%{version})"
msgstr ""
msgid "BulkImport|%{count} placeholder user matched to user."
msgid_plural "BulkImport|%{count} placeholder users matched to users."
msgid "BulkImport|%{count} placeholder user has been matched to a user."
msgid_plural "BulkImport|%{count} placeholder users have been matched to users."
msgstr[0] ""
msgstr[1] ""
msgid "BulkImport|%{count} placeholder user not matched to user."
msgid_plural "BulkImport|%{count} placeholder users not matched to users."
msgid "BulkImport|%{count} placeholder user has been skipped."
msgid_plural "BulkImport|%{count} placeholder users have been skipped."
msgstr[0] ""
msgstr[1] ""
msgid "BulkImport|%{count} placeholder user skipped."
msgid_plural "BulkImport|%{count} placeholder users skipped."
msgid "BulkImport|%{count} placeholder user has not been matched to a user."
msgid_plural "BulkImport|%{count} placeholder users have not been matched to users."
msgstr[0] ""
msgstr[1] ""
msgid "BulkImport|%{feature} (require v%{version})"
msgstr ""
msgid "BulkImport|A CSV file containing a list of placeholder reassignment errors has been attached to this email."
msgid "BulkImport|%{host} is running an outdated GitLab version (v%{version})"
msgstr ""
msgid "BulkImport|All items assigned to placeholder users were reassigned to users in %{strong_open}%{group}%{strong_close} according to the uploaded CSV file."
msgid "BulkImport|A CSV file with a list of placeholder reassignment errors is attached to this email."
msgstr ""
msgid "BulkImport|All items assigned to placeholder users have been reassigned to users in %{strong_open}%{group}%{strong_close}."
msgstr ""
msgid "BulkImport|Be aware of %{visibilityLinkStart}visibility rules%{visibilityLinkEnd} and %{placeholdersLinkStart}placeholder user limits%{placeholdersLinkEnd} when importing groups."
msgstr ""
msgid "BulkImport|Because of settings on the source GitLab instance or group, you can't import projects with this group. To permit importing projects with this group, reconfigure the source GitLab instance or group. %{linkStart}Learn more.%{linkEnd}"
msgstr ""
msgid "BulkImport|Bulk reassignment failed"
msgstr ""
@ -11180,16 +11177,16 @@ msgstr ""
msgid "BulkImport|Direct transfer"
msgstr ""
msgid "BulkImport|Direct transfer maximum download file size (MiB)"
msgid "BulkImport|Enter another name to re-import."
msgstr ""
msgid "BulkImport|Filter by source group"
msgstr ""
msgid "BulkImport|Following data will not be migrated: %{bullets} Contact system administrator of %{host} to upgrade GitLab if you need this data in your migration"
msgid "BulkImport|Import completed"
msgstr ""
msgid "BulkImport|Import completed"
msgid "BulkImport|Import failed. '%{destination}' is invalid, or you do not have permission."
msgstr ""
msgid "BulkImport|Import failed. '%{path}' already exists. Change the destination and try again."
@ -11198,27 +11195,21 @@ msgstr ""
msgid "BulkImport|Import failed. '%{path}' not found."
msgstr ""
msgid "BulkImport|Import failed. Destination '%{destination}' is invalid, or you don't have permission."
msgid "BulkImport|Import failed. The bulk import entity must belong to only one organization, group, or project."
msgstr ""
msgid "BulkImport|Import failed. Destination URL %{url}"
msgid "BulkImport|Import failed. The destination URL %{url}"
msgstr ""
msgid "BulkImport|Import failed. You don't have permission to export '%{path}'."
msgid "BulkImport|Import failed. The destination cannot be a subgroup of the source group. Change the destination and try again."
msgstr ""
msgid "BulkImport|Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again."
msgstr ""
msgid "BulkImport|Import failed: Must have exactly one of organization, group or project."
msgid "BulkImport|Import failed. You do not have permission to export '%{path}'."
msgstr ""
msgid "BulkImport|Import groups by direct transfer"
msgstr ""
msgid "BulkImport|Import is finished. Pick another name for re-import"
msgstr ""
msgid "BulkImport|Import user memberships"
msgstr ""
@ -11237,7 +11228,7 @@ msgstr ""
msgid "BulkImport|Invalid source URL. Enter only the base URL of the source GitLab instance."
msgstr ""
msgid "BulkImport|Items assigned to placeholder users were reassigned to users in %{strong_open}%{group}%{strong_close} according to the uploaded CSV file."
msgid "BulkImport|Items assigned to placeholder users have been reassigned to users in %{strong_open}%{group}%{strong_close}."
msgstr ""
msgid "BulkImport|Items that failed to be imported for %{id}"
@ -11246,10 +11237,13 @@ msgstr ""
msgid "BulkImport|Last imported to %{link}"
msgstr ""
msgid "BulkImport|Maximum download file size (MiB)"
msgstr ""
msgid "BulkImport|Maximum download file size when importing from source GitLab instances by direct transfer."
msgstr ""
msgid "BulkImport|Migration by direct transfer disabled on source or destination instance. Ask an administrator to enable it on both instances and try again."
msgid "BulkImport|Migration by direct transfer is disabled on the source or destination instance. Ask an administrator to enable this feature on both instances and try again."
msgstr ""
msgid "BulkImport|Migration details"
@ -11258,12 +11252,6 @@ msgstr ""
msgid "BulkImport|Migration history"
msgstr ""
msgid "BulkImport|Name already exists."
msgstr ""
msgid "BulkImport|Name already used as a target for another group."
msgstr ""
msgid "BulkImport|New group"
msgstr ""
@ -11279,7 +11267,7 @@ msgstr ""
msgid "BulkImport|No parent"
msgstr ""
msgid "BulkImport|Only groups that you have the %{role} role for are listed as groups you can import."
msgid "BulkImport|Only groups you have the %{role} role for are listed for import."
msgstr ""
msgid "BulkImport|Path of the new group."
@ -11333,12 +11321,21 @@ msgstr ""
msgid "BulkImport|Template / File-based import / Direct transfer"
msgstr ""
msgid "BulkImport|The following items are not migrated: %{bullets} To include these items, ask the administrator of %{host} to upgrade GitLab."
msgstr ""
msgid "BulkImport|The import you started on %{start_date} from %{strong_open}%{hostname}%{strong_close} has completed. You can now review your import results."
msgstr ""
msgid "BulkImport|This %{importable} was imported from another instance."
msgstr ""
msgid "BulkImport|This name already exists."
msgstr ""
msgid "BulkImport|This name is already used for another group."
msgstr ""
msgid "BulkImport|Unable to process the CSV file for %{strong_open}%{group}%{strong_close} to reassign placeholders. Try to upload the file again."
msgstr ""
@ -11357,6 +11354,9 @@ msgstr ""
msgid "BulkImport|View placeholders"
msgstr ""
msgid "BulkImport|You cannot import projects with this group. To import projects, reconfigure the source GitLab instance or group. %{linkStart}Learn more.%{linkEnd}"
msgstr ""
msgid "BulkImport|Your imported groups and projects will appear here."
msgstr ""
@ -34350,9 +34350,6 @@ msgstr ""
msgid "JobSource|Pipeline Execution Policy"
msgstr ""
msgid "JobSource|Pipeline Execution Policy Schedule"
msgstr ""
msgid "JobSource|Push"
msgstr ""
@ -34362,7 +34359,10 @@ msgstr ""
msgid "JobSource|Schedule"
msgstr ""
msgid "JobSource|Security Orchestration Policy"
msgid "JobSource|Scheduled Pipeline Execution Policy"
msgstr ""
msgid "JobSource|Scheduled Scan Execution Policy"
msgstr ""
msgid "JobSource|Trigger"
@ -44361,16 +44361,16 @@ msgstr ""
msgid "PipelineSource|Pipeline"
msgstr ""
msgid "PipelineSource|Pipeline Execution Policy Schedule"
msgstr ""
msgid "PipelineSource|Push"
msgstr ""
msgid "PipelineSource|Schedule"
msgstr ""
msgid "PipelineSource|Security Orchestration Policy"
msgid "PipelineSource|Scheduled Pipeline Execution Policy"
msgstr ""
msgid "PipelineSource|Scheduled Scan Execution Policy"
msgstr ""
msgid "PipelineSource|Trigger"
@ -55147,6 +55147,9 @@ msgstr ""
msgid "SecurityOrchestration|Variable option"
msgstr ""
msgid "SecurityOrchestration|Variables override configuration has invalid structure."
msgstr ""
msgid "SecurityOrchestration|Variables that can be overridden:"
msgstr ""
@ -66875,6 +66878,9 @@ msgstr ""
msgid "Vulnerability|A solution is available for this vulnerability"
msgstr ""
msgid "Vulnerability|Active secret"
msgstr ""
msgid "Vulnerability|Activity"
msgstr ""
@ -66992,6 +66998,9 @@ msgstr ""
msgid "Vulnerability|Image: %{linkStart}%{image}%{linkEnd}"
msgstr ""
msgid "Vulnerability|Inactive secret"
msgstr ""
msgid "Vulnerability|Information related to how the vulnerability was discovered and its impact on the system."
msgstr ""
@ -67004,6 +67013,9 @@ msgstr ""
msgid "Vulnerability|Namespace:"
msgstr ""
msgid "Vulnerability|Possibly active secret"
msgstr ""
msgid "Vulnerability|Project"
msgstr ""

View File

@ -94,7 +94,6 @@ spec/frontend/design_management/components/design_notes/design_reply_form_spec.j
spec/frontend/design_management/components/design_overlay_spec.js
spec/frontend/design_management/pages/design/index_spec.js
spec/frontend/design_management/pages/index_spec.js
spec/frontend/diffs/components/diff_line_note_form_spec.js
spec/frontend/editor/components/source_editor_toolbar_spec.js
spec/frontend/editor/extensions/source_editor_toolbar_ext_spec.js
spec/frontend/error_tracking/components/error_details_spec.js

View File

@ -92,11 +92,11 @@ describe('Job Sidebar Details Container', () => {
['parent_pipeline', 'Source: Parent Pipeline'],
['pipeline', 'Source: Pipeline'],
['pipeline_execution_policy', 'Source: Pipeline Execution Policy'],
['pipeline_execution_policy_schedule', 'Source: Pipeline Execution Policy Schedule'],
['pipeline_execution_policy_schedule', 'Source: Scheduled Pipeline Execution Policy'],
['push', 'Source: Push'],
['scan_execution_policy', 'Source: Scan Execution Policy'],
['schedule', 'Source: Schedule'],
['security_orchestration_policy', 'Source: Security Orchestration Policy'],
['security_orchestration_policy', 'Source: Scheduled Scan Execution Policy'],
['trigger', 'Source: Trigger'],
['web', 'Source: Web'],
['webide', 'Source: Web IDE'],

View File

@ -114,7 +114,7 @@ describe('DiffLineNoteForm', () => {
findNoteForm().vm.$emit('cancelForm', true, true);
await nextTick();
expect(confirmAction).toHaveBeenCalled();
await nextTick();
await waitForPromises();
expect(useLegacyDiffs().cancelCommentForm).toHaveBeenCalledWith({
lineCode: diffLines[1].line_code,
@ -126,7 +126,7 @@ describe('DiffLineNoteForm', () => {
findNoteForm().vm.$emit('cancelForm', true, true);
await nextTick();
expect(confirmAction).toHaveBeenCalled();
await nextTick();
await waitForPromises();
expect(clearDraft).toHaveBeenCalledWith(
`Note/Issue/${noteableDataMock.id}//DiffNote//${diffLines[1].line_code}`,

View File

@ -81,10 +81,10 @@ describe('PipelinesDashboardClickhouseFilters', () => {
'Parent Pipeline',
'On-Demand DAST Scan',
'On-Demand DAST Validation',
'Security Orchestration Policy',
'Scheduled Scan Execution Policy',
'Container Registry Push',
'Duo Workflow',
'Pipeline Execution Policy Schedule',
'Scheduled Pipeline Execution Policy',
'Unknown',
]);
});

View File

@ -280,7 +280,7 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow, :do_not_stub_snowplow_b
end
it 'returns product usage event collection hostname' do
expect(subject.hostname).to eq('events.gitlab.net')
expect(subject.hostname).to eq('events-stg.gitlab.net')
end
end
end

View File

@ -79,10 +79,11 @@ RSpec.describe Emails::Imports, feature_category: :importers do
it 'sends success email with skipped rows info' do
is_expected.to have_subject("#{group.name} | Placeholder reassignments completed successfully")
is_expected.to have_content("Items assigned to placeholder users were reassigned to users in #{group.name}")
is_expected.to have_content('1 placeholder user matched to user.')
is_expected.to have_content('1 placeholder user skipped.')
is_expected.not_to have_content('placeholder users not matched to users.')
is_expected.to have_content(
"Items assigned to placeholder users have been reassigned to users in #{group.name}")
is_expected.to have_content('1 placeholder user has been matched to a user.')
is_expected.to have_content('1 placeholder user has been skipped.')
is_expected.not_to have_content('placeholder users have not been matched to users.')
is_expected.to have_body_text(group_group_members_url(group, tab: 'placeholders'))
end
end
@ -98,11 +99,10 @@ RSpec.describe Emails::Imports, feature_category: :importers do
is_expected.to have_subject("#{group.name} | Placeholder reassignments completed with errors")
is_expected.to have_content('Placeholder reassignments completed with errors')
is_expected.to have_content(
"Items assigned to placeholder users were reassigned to users in #{group.name}"
)
is_expected.to have_content('689 placeholder users matched to users.')
is_expected.to have_content('1 placeholder user not matched to user.')
is_expected.to have_content('25 placeholder users skipped.')
"Items assigned to placeholder users have been reassigned to users in #{group.name}")
is_expected.to have_content('689 placeholder users have been matched to users.')
is_expected.to have_content('1 placeholder user has not been matched to a user.')
is_expected.to have_content('25 placeholder users have been skipped.')
is_expected.to have_body_text(group_group_members_url(group, tab: 'placeholders', status: 'failed'))
end
end
@ -115,11 +115,10 @@ RSpec.describe Emails::Imports, feature_category: :importers do
is_expected.to have_subject("#{group.name} | Placeholder reassignments completed with errors")
is_expected.to have_content('Placeholder reassignments completed with errors')
is_expected.to have_content(
"Items assigned to placeholder users were reassigned to users in #{group.name}"
)
is_expected.to have_content('689 placeholder users matched to users.')
is_expected.to have_content('362 placeholder users not matched to users.')
is_expected.not_to have_content('placeholder users skipped.')
"Items assigned to placeholder users have been reassigned to users in #{group.name}")
is_expected.to have_content('689 placeholder users have been matched to users.')
is_expected.to have_content('362 placeholder users have not been matched to users.')
is_expected.not_to have_content('placeholder users have been skipped.')
is_expected.to have_body_text(group_group_members_url(group, tab: 'placeholders', status: 'failed'))
end
end

View File

@ -45,7 +45,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).not_to be_valid
expect(entity.errors[:base])
.to include('Import failed: Must have exactly one of organization, group or project.')
.to include('Import failed. The bulk import entity must belong to only one organization, group, or project.')
end
end
@ -56,7 +56,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).not_to be_valid
expect(entity.errors[:base])
.to include('Import failed: Must have exactly one of organization, group or project.')
.to include('Import failed. The bulk import entity must belong to only one organization, group, or project.')
end
end
@ -67,7 +67,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).not_to be_valid
expect(entity.errors[:base])
.to include('Import failed: Must have exactly one of organization, group or project.')
.to include('Import failed. The bulk import entity must belong to only one organization, group, or project.')
end
end
@ -78,7 +78,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).not_to be_valid
expect(entity.errors[:base])
.to include('Import failed: Must have exactly one of organization, group or project.')
.to include('Import failed. The bulk import entity must belong to only one organization, group, or project.')
end
end
@ -201,7 +201,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).not_to be_valid
expect(entity.errors).to include(:base)
expect(entity.errors[:base])
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
.to include('Import failed. The destination cannot be a subgroup of the source group. Change the destination and try again.')
end
it 'is invalid if destination namespace is a descendant of the source' do
@ -218,7 +218,7 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(entity).not_to be_valid
expect(entity.errors[:base])
.to include('Import failed: Destination cannot be a subgroup of the source group. Change the destination and try again.')
.to include('Import failed. The destination cannot be a subgroup of the source group. Change the destination and try again.')
end
end

View File

@ -55,30 +55,11 @@ RSpec.describe User, feature_category: :system_access do
context 'when the default encryption method is BCrypt' do
context 'when the user password is hashed with work factor 4' do
let(:encrypted_password) { "$2a$04$ThzqXSFnlW3uH86uQ79puOU7vARSFuuNzb1nUGfsBeYtCLkdymAQW" }
let(:increase_password_storage_stretches) { nil }
before do
stub_feature_flags(increase_password_storage_stretches: increase_password_storage_stretches)
end
context 'when feature flag is set to true' do
let(:increase_password_storage_stretches) { true }
it 'upgrades stretches' do
expect(user.encrypted_password).to start_with('$2a$04$')
user.valid_password?('security')
expect(user.encrypted_password).to start_with('$2a$05$')
end
end
context 'when feature flag is set to false' do
let(:increase_password_storage_stretches) { false }
it 'does not upgrade stretches' do
expect(user.encrypted_password).to start_with('$2a$04$')
user.valid_password?('security')
expect(user.encrypted_password).to start_with('$2a$04$')
end
it 'upgrades stretches' do
expect(user.encrypted_password).to start_with('$2a$04$')
user.valid_password?('security')
expect(user.encrypted_password).to start_with('$2a$05$')
end
end

View File

@ -348,7 +348,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq("Import failed. Destination 'invalid-destination-namespace' is invalid, or you don't have permission.")
expect(json_response['message']).to eq("Import failed. 'invalid-destination-namespace' is invalid, or you do not have permission.")
end
end
@ -411,7 +411,7 @@ RSpec.describe API::BulkImports, feature_category: :importers do
request
expect(json_response['message']).to eq("Import failed. You don't have permission to export 'full_path'.")
expect(json_response['message']).to eq("Import failed. You do not have permission to export 'full_path'.")
end
end
@ -466,8 +466,8 @@ RSpec.describe API::BulkImports, feature_category: :importers do
request
expect(json_response['message']).to include("Migration by direct transfer disabled on source or destination instance. " \
"Ask an administrator to enable it on both instances and try again.")
expect(json_response['message']).to include("Migration by direct transfer is disabled on the source or destination instance. " \
"Ask an administrator to enable this feature on both instances and try again.")
end
end

View File

@ -122,7 +122,7 @@ RSpec.describe BulkImports::CreateService, :clean_gitlab_redis_shared_state, fea
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message).to eq("Import failed. You don't have permission to export 'full/path/to/group1'.")
expect(result.message).to eq("Import failed. You do not have permission to export 'full/path/to/group1'.")
end
end
@ -150,8 +150,8 @@ RSpec.describe BulkImports::CreateService, :clean_gitlab_redis_shared_state, fea
expect(result).to be_error
expect(result.message)
.to eq(
"Migration by direct transfer disabled on source or destination instance. " \
"Ask an administrator to enable it on both instances and try again."
"Migration by direct transfer is disabled on the source or destination instance. " \
"Ask an administrator to enable this feature on both instances and try again."
)
end
end
@ -799,8 +799,8 @@ RSpec.describe BulkImports::CreateService, :clean_gitlab_redis_shared_state, fea
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq("Import failed. Destination 'destination-namespace' is invalid, " \
"or you don't have permission.")
.to eq("Import failed. 'destination-namespace' is invalid, " \
"or you do not have permission.")
end
end
@ -826,8 +826,8 @@ RSpec.describe BulkImports::CreateService, :clean_gitlab_redis_shared_state, fea
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq("Import failed. Destination '#{parent_group.path}' is invalid, " \
"or you don't have permission.")
.to eq("Import failed. '#{parent_group.path}' is invalid, " \
"or you do not have permission.")
end
end
@ -853,8 +853,8 @@ RSpec.describe BulkImports::CreateService, :clean_gitlab_redis_shared_state, fea
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq("Import failed. Destination '#{parent_group.path}' is invalid, " \
"or you don't have permission.")
.to eq("Import failed. '#{parent_group.path}' is invalid, " \
"or you do not have permission.")
end
end
end
@ -881,7 +881,7 @@ RSpec.describe BulkImports::CreateService, :clean_gitlab_redis_shared_state, fea
expect(result).to be_error
expect(result.message)
.to eq(
"Import failed. Destination URL " \
"Import failed. The destination URL " \
"can only include non-accented letters, digits, '_', '-' and '.'. " \
"It must not start with '-', '_', or '.', nor end with '-', '_', '.', '.git', or '.atom'."
)

View File

@ -59,6 +59,13 @@ RSpec.describe Ci::Workloads::RunWorkloadService, feature_category: :continuous_
expect(build.variables.map(&:key)).not_to include('A_INSTANCE_VARIABLE')
end
it 'sets the branch on the workload to the project default_branch' do
pipeline = execute.payload
expect(pipeline.ref).to eq("master")
expect(pipeline.ref).to eq(workload.instance_variable_get(:@branch))
end
context 'when create_branch: true' do
let(:create_branch) { true }
@ -72,6 +79,11 @@ RSpec.describe Ci::Workloads::RunWorkloadService, feature_category: :continuous_
pipeline = execute.payload
expect(pipeline.ref).to match(%r{workloads/\w+})
end
it 'sets the branch on the workload to the created branch' do
pipeline = execute.payload
expect(pipeline.ref).to eq(workload.instance_variable_get(:@branch))
end
end
end