Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-04-25 00:08:36 +00:00
parent 3d911c6c1f
commit 15e5a05bcd
75 changed files with 1214 additions and 545 deletions

View File

@ -239,9 +239,6 @@ gem 'rainbow', '~> 3.0'
# Progress bar
gem 'ruby-progressbar', '~> 1.10'
# GitLab settings
gem 'settingslogic', '~> 2.0.9'
# Linear-time regex library for untrusted regular expressions
gem 're2', '~> 1.6.0'

View File

@ -1418,7 +1418,6 @@ GEM
sentry-ruby (~> 5.8.0)
sidekiq (>= 3.0)
set (1.0.1)
settingslogic (2.0.9)
sexp_processor (4.16.1)
shellany (0.0.1)
shoulda-matchers (5.1.0)
@ -1920,7 +1919,6 @@ DEPENDENCIES
sentry-raven (~> 3.1)
sentry-ruby (~> 5.8.0)
sentry-sidekiq (~> 5.8.0)
settingslogic (~> 2.0.9)
shoulda-matchers (~> 5.1.0)
sidekiq (~> 6.5.7)
sidekiq-cron (~> 1.8.0)

View File

@ -1,5 +1,5 @@
<script>
import { GlAlert } from '@gitlab/ui';
import { GlAlert, GlIntersectionObserver, GlLoadingIcon } from '@gitlab/ui';
import { __ } from '~/locale';
import { queryToObject } from '~/lib/utils/url_utility';
import { validateQueryString } from '~/jobs/components/filtered_search/utils';
@ -14,6 +14,7 @@ import CancelableJobs from './graphql/queries/get_cancelable_jobs_count.query.gr
export default {
i18n: {
jobsFetchErrorMsg: __('There was an error fetching the jobs.'),
loadingAriaLabel: __('Loading'),
},
components: {
JobsSkeletonLoader,
@ -21,6 +22,8 @@ export default {
GlAlert,
JobsTable,
JobsTableTabs,
GlIntersectionObserver,
GlLoadingIcon,
},
inject: {
jobStatuses: {
@ -87,6 +90,9 @@ export default {
this.jobs.list.length === 0 && !this.scope && !this.loading && !this.filterSearchTriggered
);
},
hasNextPage() {
return this.jobs?.pageInfo?.hasNextPage;
},
variables() {
return { ...this.validatedQueryString };
},
@ -122,6 +128,18 @@ export default {
this.$apollo.queries.jobs.refetch({ statuses: scope });
},
fetchMoreJobs() {
if (!this.loading) {
this.infiniteScrollingTriggered = true;
const parameters = this.variables;
parameters.after = this.jobs?.pageInfo?.endCursor;
this.$apollo.queries.jobs.fetchMore({
variables: parameters,
});
}
},
},
};
</script>
@ -149,5 +167,13 @@ export default {
:table-fields="DEFAULT_FIELDS_ADMIN"
class="gl-table-no-top-border"
/>
<gl-intersection-observer v-if="hasNextPage" @appear="fetchMoreJobs">
<gl-loading-icon
v-if="showLoadingSpinner"
size="lg"
:aria-label="$options.i18n.loadingAriaLabel"
/>
</gl-intersection-observer>
</div>
</template>

View File

@ -5,16 +5,16 @@ require_relative '../object_store_settings'
require_relative '../smime_signature_settings'
# Default settings
Settings['shared'] ||= Settingslogic.new({})
Settings['shared'] ||= {}
Settings.shared['path'] = Settings.absolute(Settings.shared['path'] || "shared")
Settings['encrypted_settings'] ||= Settingslogic.new({})
Settings['encrypted_settings'] ||= {}
Settings.encrypted_settings['path'] ||= File.join(Settings.shared['path'], "encrypted_settings")
Settings.encrypted_settings['path'] = Settings.absolute(Settings.encrypted_settings['path'])
Settings['ldap'] ||= Settingslogic.new({})
Settings['ldap'] ||= {}
Settings.ldap['enabled'] = false if Settings.ldap['enabled'].nil?
Settings.ldap['servers'] ||= Settingslogic.new({})
Settings.ldap['servers'] ||= {}
Settings.ldap['prevent_ldap_sign_in'] = false if Settings.ldap['prevent_ldap_sign_in'].blank?
Settings.ldap['secret_file'] = Settings.absolute(Settings.ldap['secret_file'] || File.join(Settings.encrypted_settings['path'], "ldap.yaml.enc"))
@ -37,8 +37,6 @@ if Settings.ldap['enabled'] || Rails.env.test?
end
Settings.ldap['servers'].each do |key, server|
server = Settingslogic.new(server)
server['label'] ||= 'LDAP'
server['timeout'] ||= 10.seconds
server['block_auto_created_users'] = false if server['block_auto_created_users'].nil?
@ -87,7 +85,7 @@ if Settings.ldap['enabled'] || Rails.env.test?
end
end
Settings['omniauth'] ||= Settingslogic.new({})
Settings['omniauth'] ||= {}
Settings.omniauth['enabled'] = true if Settings.omniauth['enabled'].nil?
Settings.omniauth['auto_sign_in_with_provider'] = false if Settings.omniauth['auto_sign_in_with_provider'].nil?
Settings.omniauth['allow_single_sign_on'] = false if Settings.omniauth['allow_single_sign_on'].nil?
@ -114,9 +112,9 @@ if Settings.omniauth['sync_email_from_provider']
end
Settings.omniauth['providers'] ||= []
Settings.omniauth['cas3'] ||= Settingslogic.new({})
Settings.omniauth['cas3'] ||= {}
Settings.omniauth.cas3['session_duration'] ||= 8.hours
Settings.omniauth['session_tickets'] ||= Settingslogic.new({})
Settings.omniauth['session_tickets'] ||= {}
Settings.omniauth.session_tickets['cas3'] = 'ticket'
# Handle backward compatibility with the renamed kerberos_spnego provider
@ -152,7 +150,7 @@ if github_settings
github_settings['url'] = github_default_url
end
github_settings["args"] ||= Settingslogic.new({})
github_settings["args"] ||= {}
github_settings["args"]["client_options"] =
if github_settings["url"].include?(github_default_url)
@ -172,7 +170,7 @@ saml_provider_enabled = Settings.omniauth.providers.any? do |provider|
end
if Gitlab.ee? && Rails.env.test? && !saml_provider_enabled
Settings.omniauth.providers << Settingslogic.new({ 'name' => 'group_saml' })
Settings.omniauth.providers << GitlabSettings::Options.build({ 'name' => 'group_saml' })
end
Settings['issues_tracker'] ||= {}
@ -180,7 +178,7 @@ Settings['issues_tracker'] ||= {}
#
# GitLab
#
Settings['gitlab'] ||= Settingslogic.new({})
Settings['gitlab'] ||= {}
Settings.gitlab['default_project_creation'] ||= ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS
Settings.gitlab['default_project_deletion_protection'] ||= false
Settings.gitlab['default_projects_limit'] ||= 100000
@ -254,7 +252,7 @@ end
# Elasticseacrh
#
Gitlab.ee do
Settings['elasticsearch'] ||= Settingslogic.new({})
Settings['elasticsearch'] ||= {}
Settings.elasticsearch['enabled'] = false if Settings.elasticsearch['enabled'].nil?
Settings.elasticsearch['url'] = ENV['ELASTIC_URL'] || "http://localhost:9200"
Settings.elasticsearch['indexer_path'] ||= Gitlab::Utils.which('gitlab-elasticsearch-indexer')
@ -263,7 +261,7 @@ end
#
# CI
#
Settings['gitlab_ci'] ||= Settingslogic.new({})
Settings['gitlab_ci'] ||= {}
Settings.gitlab_ci['shared_runners_enabled'] = true if Settings.gitlab_ci['shared_runners_enabled'].nil?
Settings.gitlab_ci['builds_path'] = Settings.absolute(Settings.gitlab_ci['builds_path'] || "builds/")
Settings.gitlab_ci['url'] ||= Settings.__send__(:build_gitlab_ci_url)
@ -272,7 +270,7 @@ Settings.gitlab_ci['component_fqdn'] ||= Settings.__send__(:build_ci_compon
#
# CI Secure Files
#
Settings['ci_secure_files'] ||= Settingslogic.new({})
Settings['ci_secure_files'] ||= {}
Settings.ci_secure_files['enabled'] = true if Settings.ci_secure_files['enabled'].nil?
Settings.ci_secure_files['storage_path'] = Settings.absolute(Settings.ci_secure_files['storage_path'] || File.join(Settings.shared['path'], "ci_secure_files"))
Settings.ci_secure_files['object_store'] = ObjectStoreSettings.legacy_parse(Settings.ci_secure_files['object_store'], 'secure_files')
@ -280,7 +278,7 @@ Settings.ci_secure_files['object_store'] = ObjectStoreSettings.legacy_parse(Sett
#
# Reply by email
#
Settings['incoming_email'] ||= Settingslogic.new({})
Settings['incoming_email'] ||= {}
Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil?
Settings.incoming_email['inbox_method'] ||= 'imap'
Settings.incoming_email['encrypted_secret_file'] = Settings.absolute(Settings.incoming_email['encrypted_secret_file'] || File.join(Settings.encrypted_settings['path'], "incoming_email.yaml.enc"))
@ -288,14 +286,14 @@ Settings.incoming_email['encrypted_secret_file'] = Settings.absolute(Settings.in
#
# Service desk email
#
Settings['service_desk_email'] ||= Settingslogic.new({})
Settings['service_desk_email'] ||= {}
Settings.service_desk_email['enabled'] = false if Settings.service_desk_email['enabled'].nil?
Settings.service_desk_email['encrypted_secret_file'] = Settings.absolute(Settings.service_desk_email['encrypted_secret_file'] || File.join(Settings.encrypted_settings['path'], "service_desk_email.yaml.enc"))
#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings['artifacts'] ||= {}
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
# Settings.artifact['path'] is deprecated, use `storage_path` instead
@ -306,7 +304,7 @@ Settings.artifacts['object_store'] = ObjectStoreSettings.legacy_parse(Settings.a
#
# Registry
#
Settings['registry'] ||= Settingslogic.new({})
Settings['registry'] ||= {}
Settings.registry['enabled'] ||= false
Settings.registry['host'] ||= "example.com"
Settings.registry['port'] ||= nil
@ -320,7 +318,7 @@ Settings.registry['notifications'] ||= []
#
# Error Reporting and Logging with Sentry
#
Settings['sentry'] ||= Settingslogic.new({})
Settings['sentry'] ||= {}
Settings.sentry['enabled'] ||= false
Settings.sentry['dsn'] ||= nil
Settings.sentry['environment'] ||= nil
@ -329,7 +327,7 @@ Settings.sentry['clientside_dsn'] ||= nil
#
# Pages
#
Settings['pages'] ||= Settingslogic.new({})
Settings['pages'] ||= {}
Settings['pages'] = ::Gitlab::Pages::Settings.new(Settings.pages) # For path access detection https://gitlab.com/gitlab-org/gitlab/-/issues/230702
Settings.pages['enabled'] = false if Settings.pages['enabled'].nil?
Settings.pages['access_control'] = false if Settings.pages['access_control'].nil?
@ -347,14 +345,14 @@ Settings.pages['secret_file'] ||= Rails.root.join('.gitlab_pages_secret')
# this will allow us to easier migrate existing instances with NFS
Settings.pages['storage_path'] = Settings.pages['path']
Settings.pages['object_store'] = ObjectStoreSettings.legacy_parse(Settings.pages['object_store'], 'pages')
Settings.pages['local_store'] ||= Settingslogic.new({})
Settings.pages['local_store'] ||= {}
Settings.pages['local_store']['path'] = Settings.absolute(Settings.pages['local_store']['path'] || File.join(Settings.shared['path'], "pages"))
Settings.pages['local_store']['enabled'] = true if Settings.pages['local_store']['enabled'].nil?
#
# GitLab documentation
#
Settings['gitlab_docs'] ||= Settingslogic.new({})
Settings['gitlab_docs'] ||= {}
Settings.gitlab_docs['enabled'] ||= false
Settings.gitlab_docs['host'] = nil unless Settings.gitlab_docs.enabled
@ -362,28 +360,28 @@ Settings.gitlab_docs['host'] = nil unless Settings.gitlab_docs.enabled
# Geo
#
Gitlab.ee do
Settings['geo'] ||= Settingslogic.new({})
Settings['geo'] ||= {}
# For backwards compatibility, default to gitlab_url and if so, ensure it ends with "/"
Settings.geo['node_name'] = Settings.geo['node_name'].presence || Settings.gitlab['url'].chomp('/').concat('/')
#
# Registry replication
#
Settings.geo['registry_replication'] ||= Settingslogic.new({})
Settings.geo['registry_replication'] ||= {}
Settings.geo.registry_replication['enabled'] ||= false
end
#
# Unleash
#
Settings['feature_flags'] ||= Settingslogic.new({})
Settings.feature_flags['unleash'] ||= Settingslogic.new({})
Settings['feature_flags'] ||= {}
Settings.feature_flags['unleash'] ||= {}
Settings.feature_flags.unleash['enabled'] = false if Settings.feature_flags.unleash['enabled'].nil?
#
# External merge request diffs
#
Settings['external_diffs'] ||= Settingslogic.new({})
Settings['external_diffs'] ||= {}
Settings.external_diffs['enabled'] = false if Settings.external_diffs['enabled'].nil?
Settings.external_diffs['when'] = 'always' if Settings.external_diffs['when'].nil?
Settings.external_diffs['storage_path'] = Settings.absolute(Settings.external_diffs['storage_path'] || File.join(Settings.shared['path'], 'external-diffs'))
@ -392,7 +390,7 @@ Settings.external_diffs['object_store'] = ObjectStoreSettings.legacy_parse(Setti
#
# Git LFS
#
Settings['lfs'] ||= Settingslogic.new({})
Settings['lfs'] ||= {}
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] = ObjectStoreSettings.legacy_parse(Settings.lfs['object_store'], 'lfs')
@ -400,7 +398,7 @@ Settings.lfs['object_store'] = ObjectStoreSettings.legacy_parse(Settings.lfs['ob
#
# Uploads
#
Settings['uploads'] ||= Settingslogic.new({})
Settings['uploads'] ||= {}
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
Settings.uploads['object_store'] = ObjectStoreSettings.legacy_parse(Settings.uploads['object_store'], 'uploads')
@ -409,7 +407,7 @@ Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
#
# Packages
#
Settings['packages'] ||= Settingslogic.new({})
Settings['packages'] ||= {}
Settings.packages['enabled'] = true if Settings.packages['enabled'].nil?
Settings.packages['dpkg_deb_path'] = '/usr/bin/dpkg-deb' if Settings.packages['dpkg_deb_path'].nil?
Settings.packages['storage_path'] = Settings.absolute(Settings.packages['storage_path'] || File.join(Settings.shared['path'], "packages"))
@ -418,7 +416,7 @@ Settings.packages['object_store'] = ObjectStoreSettings.legacy_parse(Settings.p
#
# Dependency Proxy
#
Settings['dependency_proxy'] ||= Settingslogic.new({})
Settings['dependency_proxy'] ||= {}
Settings.dependency_proxy['enabled'] = true if Settings.dependency_proxy['enabled'].nil?
Settings.dependency_proxy['storage_path'] = Settings.absolute(Settings.dependency_proxy['storage_path'] || File.join(Settings.shared['path'], "dependency_proxy"))
Settings.dependency_proxy['object_store'] = ObjectStoreSettings.legacy_parse(Settings.dependency_proxy['object_store'], 'dependency_proxy')
@ -432,7 +430,7 @@ Settings.dependency_proxy['enabled'] = false unless Gitlab::Runtime.puma?
#
# Terraform state
#
Settings['terraform_state'] ||= Settingslogic.new({})
Settings['terraform_state'] ||= {}
Settings.terraform_state['enabled'] = true if Settings.terraform_state['enabled'].nil?
Settings.terraform_state['storage_path'] = Settings.absolute(Settings.terraform_state['storage_path'] || File.join(Settings.shared['path'], "terraform_state"))
Settings.terraform_state['object_store'] = ObjectStoreSettings.legacy_parse(Settings.terraform_state['object_store'], 'terraform_state')
@ -440,14 +438,14 @@ Settings.terraform_state['object_store'] = ObjectStoreSettings.legacy_parse(Sett
#
# Mattermost
#
Settings['mattermost'] ||= Settingslogic.new({})
Settings['mattermost'] ||= {}
Settings.mattermost['enabled'] = false if Settings.mattermost['enabled'].nil?
Settings.mattermost['host'] = nil unless Settings.mattermost.enabled
#
# Jira Connect (GitLab for Jira Cloud App)
#
Settings['jira_connect'] ||= Settingslogic.new({})
Settings['jira_connect'] ||= {}
Settings.jira_connect['atlassian_js_url'] ||= 'https://connect-cdn.atl-paas.net/all.js'
Settings.jira_connect['enforce_jira_base_url_https'] = true if Settings.jira_connect['enforce_jira_base_url_https'].nil?
@ -456,7 +454,7 @@ Settings.jira_connect['additional_iframe_ancestors'] ||= []
#
# Gravatar
#
Settings['gravatar'] ||= Settingslogic.new({})
Settings['gravatar'] ||= {}
Settings.gravatar['enabled'] = true if Settings.gravatar['enabled'].nil?
Settings.gravatar['plain_url'] ||= 'https://www.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon'
Settings.gravatar['ssl_url'] ||= 'https://secure.gravatar.com/avatar/%{hash}?s=%{size}&d=identicon'
@ -465,395 +463,395 @@ Settings.gravatar['host'] = Settings.host_without_www(Settings.gravatar[
#
# Cron Jobs
#
Settings['cron_jobs'] ||= Settingslogic.new({})
Settings['cron_jobs'] ||= {}
if Gitlab.ee? && Settings['ee_cron_jobs']
Settings.cron_jobs.merge!(Settings.ee_cron_jobs)
end
Settings.cron_jobs['poll_interval'] ||= nil
Settings.cron_jobs['stuck_ci_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_ci_jobs_worker'] ||= {}
Settings.cron_jobs['stuck_ci_jobs_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['stuck_ci_jobs_worker']['job_class'] = 'StuckCiJobsWorker'
Settings.cron_jobs['pipeline_schedule_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['pipeline_schedule_worker'] ||= {}
Settings.cron_jobs['pipeline_schedule_worker']['cron'] ||= '3-59/10 * * * *'
Settings.cron_jobs['pipeline_schedule_worker']['job_class'] = 'PipelineScheduleWorker'
Settings.cron_jobs['expire_build_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['expire_build_artifacts_worker'] ||= {}
Settings.cron_jobs['expire_build_artifacts_worker']['cron'] ||= '*/7 * * * *'
Settings.cron_jobs['expire_build_artifacts_worker']['job_class'] = 'ExpireBuildArtifactsWorker'
Settings.cron_jobs['update_locked_unknown_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['update_locked_unknown_artifacts_worker'] ||= {}
Settings.cron_jobs['update_locked_unknown_artifacts_worker']['cron'] ||= '*/7 * * * *'
Settings.cron_jobs['update_locked_unknown_artifacts_worker']['job_class'] = 'Ci::UpdateLockedUnknownArtifactsWorker'
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker'] ||= {}
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker']['cron'] ||= '*/23 * * * *'
Settings.cron_jobs['ci_pipelines_expire_artifacts_worker']['job_class'] = 'Ci::PipelineArtifacts::ExpireArtifactsWorker'
Settings.cron_jobs['ci_schedule_delete_objects_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_schedule_delete_objects_worker'] ||= {}
Settings.cron_jobs['ci_schedule_delete_objects_worker']['cron'] ||= '*/16 * * * *'
Settings.cron_jobs['ci_schedule_delete_objects_worker']['job_class'] = 'Ci::ScheduleDeleteObjectsCronWorker'
Settings.cron_jobs['environments_auto_stop_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['environments_auto_stop_cron_worker'] ||= {}
Settings.cron_jobs['environments_auto_stop_cron_worker']['cron'] ||= '24 * * * *'
Settings.cron_jobs['environments_auto_stop_cron_worker']['job_class'] = 'Environments::AutoStopCronWorker'
Settings.cron_jobs['environments_auto_delete_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['environments_auto_delete_cron_worker'] ||= {}
Settings.cron_jobs['environments_auto_delete_cron_worker']['cron'] ||= '34 * * * *'
Settings.cron_jobs['environments_auto_delete_cron_worker']['job_class'] = 'Environments::AutoDeleteCronWorker'
Settings.cron_jobs['repository_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['repository_check_worker'] ||= {}
Settings.cron_jobs['repository_check_worker']['cron'] ||= '20 * * * *'
Settings.cron_jobs['repository_check_worker']['job_class'] = 'RepositoryCheck::DispatchWorker'
Settings.cron_jobs['admin_email_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['admin_email_worker'] ||= {}
Settings.cron_jobs['admin_email_worker']['cron'] ||= '0 0 * * 0'
Settings.cron_jobs['admin_email_worker']['job_class'] = 'AdminEmailWorker'
Settings.cron_jobs['personal_access_tokens_expiring_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['personal_access_tokens_expiring_worker'] ||= {}
Settings.cron_jobs['personal_access_tokens_expiring_worker']['cron'] ||= '0 1 * * *'
Settings.cron_jobs['personal_access_tokens_expiring_worker']['job_class'] = 'PersonalAccessTokens::ExpiringWorker'
Settings.cron_jobs['personal_access_tokens_expired_notification_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['personal_access_tokens_expired_notification_worker'] ||= {}
Settings.cron_jobs['personal_access_tokens_expired_notification_worker']['cron'] ||= '0 2 * * *'
Settings.cron_jobs['personal_access_tokens_expired_notification_worker']['job_class'] = 'PersonalAccessTokens::ExpiredNotificationWorker'
Settings.cron_jobs['repository_archive_cache_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['repository_archive_cache_worker'] ||= {}
Settings.cron_jobs['repository_archive_cache_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['repository_archive_cache_worker']['job_class'] = 'RepositoryArchiveCacheWorker'
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= {}
Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
Settings.cron_jobs['ci_archive_traces_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_archive_traces_cron_worker'] ||= {}
Settings.cron_jobs['ci_archive_traces_cron_worker']['cron'] ||= '17 * * * *'
Settings.cron_jobs['ci_archive_traces_cron_worker']['job_class'] = 'Ci::ArchiveTracesCronWorker'
Settings.cron_jobs['remove_expired_members_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_expired_members_worker'] ||= {}
Settings.cron_jobs['remove_expired_members_worker']['cron'] ||= '10 0 * * *'
Settings.cron_jobs['remove_expired_members_worker']['job_class'] = 'RemoveExpiredMembersWorker'
Settings.cron_jobs['remove_expired_group_links_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_expired_group_links_worker'] ||= {}
Settings.cron_jobs['remove_expired_group_links_worker']['cron'] ||= '10 0 * * *'
Settings.cron_jobs['remove_expired_group_links_worker']['job_class'] = 'RemoveExpiredGroupLinksWorker'
Settings.cron_jobs['remove_unaccepted_member_invites_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_unaccepted_member_invites_worker'] ||= {}
Settings.cron_jobs['remove_unaccepted_member_invites_worker']['cron'] ||= '10 15 * * *'
Settings.cron_jobs['remove_unaccepted_member_invites_worker']['job_class'] = 'RemoveUnacceptedMemberInvitesWorker'
Settings.cron_jobs['prune_old_events_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['prune_old_events_worker'] ||= {}
Settings.cron_jobs['prune_old_events_worker']['cron'] ||= '0 */6 * * *'
Settings.cron_jobs['prune_old_events_worker']['job_class'] = 'PruneOldEventsWorker'
Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker'] ||= {}
Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker']['cron'] ||= '30 3 * * */7'
Settings.cron_jobs['gitlab_export_prune_project_export_jobs_worker']['job_class'] = 'Gitlab::Export::PruneProjectExportJobsWorker'
Settings.cron_jobs['trending_projects_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['trending_projects_worker'] ||= {}
Settings.cron_jobs['trending_projects_worker']['cron'] = '0 1 * * *'
Settings.cron_jobs['trending_projects_worker']['job_class'] = 'TrendingProjectsWorker'
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker'] ||= {}
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['cron'] ||= '20 0 * * *'
Settings.cron_jobs['remove_unreferenced_lfs_objects_worker']['job_class'] = 'RemoveUnreferencedLfsObjectsWorker'
Settings.cron_jobs['bulk_imports_stuck_import_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['bulk_imports_stuck_import_worker'] ||= {}
Settings.cron_jobs['bulk_imports_stuck_import_worker']['cron'] ||= '0 */4 * * *'
Settings.cron_jobs['bulk_imports_stuck_import_worker']['job_class'] = 'BulkImports::StuckImportWorker'
Settings.cron_jobs['import_stuck_project_import_jobs'] ||= Settingslogic.new({})
Settings.cron_jobs['import_stuck_project_import_jobs'] ||= {}
Settings.cron_jobs['import_stuck_project_import_jobs']['cron'] ||= '15 * * * *'
Settings.cron_jobs['import_stuck_project_import_jobs']['job_class'] = 'Gitlab::Import::StuckProjectImportJobsWorker'
Settings.cron_jobs['jira_import_stuck_jira_import_jobs'] ||= Settingslogic.new({})
Settings.cron_jobs['jira_import_stuck_jira_import_jobs'] ||= {}
Settings.cron_jobs['jira_import_stuck_jira_import_jobs']['cron'] ||= '* 0/15 * * *'
Settings.cron_jobs['jira_import_stuck_jira_import_jobs']['job_class'] = 'Gitlab::JiraImport::StuckJiraImportJobsWorker'
Settings.cron_jobs['stuck_export_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_export_jobs_worker'] ||= {}
Settings.cron_jobs['stuck_export_jobs_worker']['cron'] ||= '30 * * * *'
Settings.cron_jobs['stuck_export_jobs_worker']['job_class'] = 'StuckExportJobsWorker'
Settings.cron_jobs['gitlab_service_ping_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['gitlab_service_ping_worker'] ||= {}
Settings.cron_jobs['gitlab_service_ping_worker']['cron'] ||= nil # This is dynamically loaded in the sidekiq initializer
Settings.cron_jobs['gitlab_service_ping_worker']['job_class'] = 'GitlabServicePingWorker'
Settings.cron_jobs['stuck_merge_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['stuck_merge_jobs_worker'] ||= {}
Settings.cron_jobs['stuck_merge_jobs_worker']['cron'] ||= '0 */2 * * *'
Settings.cron_jobs['stuck_merge_jobs_worker']['job_class'] = 'StuckMergeJobsWorker'
Settings.cron_jobs['pages_domain_verification_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['pages_domain_verification_cron_worker'] ||= {}
Settings.cron_jobs['pages_domain_verification_cron_worker']['cron'] ||= '*/15 * * * *'
Settings.cron_jobs['pages_domain_verification_cron_worker']['job_class'] = 'PagesDomainVerificationCronWorker'
Settings.cron_jobs['pages_domain_removal_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['pages_domain_removal_cron_worker'] ||= {}
Settings.cron_jobs['pages_domain_removal_cron_worker']['cron'] ||= '47 0 * * *'
Settings.cron_jobs['pages_domain_removal_cron_worker']['job_class'] = 'PagesDomainRemovalCronWorker'
Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker'] ||= {}
Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['pages_domain_ssl_renewal_cron_worker']['job_class'] = 'PagesDomainSslRenewalCronWorker'
Settings.cron_jobs['issue_due_scheduler_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['issue_due_scheduler_worker'] ||= {}
Settings.cron_jobs['issue_due_scheduler_worker']['cron'] ||= '50 00 * * *'
Settings.cron_jobs['issue_due_scheduler_worker']['job_class'] = 'IssueDueSchedulerWorker'
Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker'] ||= {}
Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker']['cron'] ||= '0 1 * * *'
Settings.cron_jobs['metrics_dashboard_schedule_annotations_prune_worker']['job_class'] = 'Metrics::Dashboard::ScheduleAnnotationsPruneWorker'
Settings.cron_jobs['schedule_migrate_external_diffs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['schedule_migrate_external_diffs_worker'] ||= {}
Settings.cron_jobs['schedule_migrate_external_diffs_worker']['cron'] ||= '15 * * * *'
Settings.cron_jobs['schedule_migrate_external_diffs_worker']['job_class'] = 'ScheduleMigrateExternalDiffsWorker'
Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker'] ||= {}
Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker']['cron'] ||= '5 1 * * *'
Settings.cron_jobs['namespaces_prune_aggregation_schedules_worker']['job_class'] = 'Namespaces::PruneAggregationSchedulesWorker'
Settings.cron_jobs['container_expiration_policy_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['container_expiration_policy_worker'] ||= {}
Settings.cron_jobs['container_expiration_policy_worker']['cron'] ||= '50 * * * *'
Settings.cron_jobs['container_expiration_policy_worker']['job_class'] = 'ContainerExpirationPolicyWorker'
Settings.cron_jobs['container_registry_migration_guard_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['container_registry_migration_guard_worker'] ||= {}
Settings.cron_jobs['container_registry_migration_guard_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['container_registry_migration_guard_worker']['job_class'] = 'ContainerRegistry::Migration::GuardWorker'
Settings.cron_jobs['container_registry_migration_observer_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['container_registry_migration_observer_worker'] ||= {}
Settings.cron_jobs['container_registry_migration_observer_worker']['cron'] ||= '*/30 * * * *'
Settings.cron_jobs['container_registry_migration_observer_worker']['job_class'] = 'ContainerRegistry::Migration::ObserverWorker'
Settings.cron_jobs['container_registry_migration_enqueuer_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['container_registry_migration_enqueuer_worker'] ||= {}
Settings.cron_jobs['container_registry_migration_enqueuer_worker']['cron'] ||= '15,45 */1 * * *'
Settings.cron_jobs['container_registry_migration_enqueuer_worker']['job_class'] = 'ContainerRegistry::Migration::EnqueuerWorker'
Settings.cron_jobs['cleanup_container_registry_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['cleanup_container_registry_worker'] ||= {}
Settings.cron_jobs['cleanup_container_registry_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['cleanup_container_registry_worker']['job_class'] = 'ContainerRegistry::CleanupWorker'
Settings.cron_jobs['image_ttl_group_policy_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['image_ttl_group_policy_worker'] ||= {}
Settings.cron_jobs['image_ttl_group_policy_worker']['cron'] ||= '40 0 * * *'
Settings.cron_jobs['image_ttl_group_policy_worker']['job_class'] = 'DependencyProxy::ImageTtlGroupPolicyWorker'
Settings.cron_jobs['cleanup_dependency_proxy_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['cleanup_dependency_proxy_worker'] ||= {}
Settings.cron_jobs['cleanup_dependency_proxy_worker']['cron'] ||= '20 3,15 * * *'
Settings.cron_jobs['cleanup_dependency_proxy_worker']['job_class'] = 'DependencyProxy::CleanupDependencyProxyWorker'
Settings.cron_jobs['cleanup_package_registry_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['cleanup_package_registry_worker'] ||= {}
Settings.cron_jobs['cleanup_package_registry_worker']['cron'] ||= '20 * * * *'
Settings.cron_jobs['cleanup_package_registry_worker']['job_class'] = 'Packages::CleanupPackageRegistryWorker'
Settings.cron_jobs['x509_issuer_crl_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['x509_issuer_crl_check_worker'] ||= {}
Settings.cron_jobs['x509_issuer_crl_check_worker']['cron'] ||= '30 1 * * *'
Settings.cron_jobs['x509_issuer_crl_check_worker']['job_class'] = 'X509IssuerCrlCheckWorker'
Settings.cron_jobs['users_create_statistics_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_create_statistics_worker'] ||= {}
Settings.cron_jobs['users_create_statistics_worker']['cron'] ||= '2 15 * * *'
Settings.cron_jobs['users_create_statistics_worker']['job_class'] = 'Users::CreateStatisticsWorker'
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker'] ||= {}
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker']['cron'] ||= '45 1 1,15 * *'
Settings.cron_jobs['authorized_project_update_periodic_recalculate_worker']['job_class'] = 'AuthorizedProjectUpdate::PeriodicRecalculateWorker'
Settings.cron_jobs['update_container_registry_info_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['update_container_registry_info_worker'] ||= {}
Settings.cron_jobs['update_container_registry_info_worker']['cron'] ||= '0 0 * * *'
Settings.cron_jobs['update_container_registry_info_worker']['job_class'] = 'UpdateContainerRegistryInfoWorker'
Settings.cron_jobs['postgres_dynamic_partitions_manager'] ||= Settingslogic.new({})
Settings.cron_jobs['postgres_dynamic_partitions_manager'] ||= {}
Settings.cron_jobs['postgres_dynamic_partitions_manager']['cron'] ||= '21 */6 * * *'
Settings.cron_jobs['postgres_dynamic_partitions_manager']['job_class'] ||= 'Database::PartitionManagementWorker'
Settings.cron_jobs['postgres_dynamic_partitions_dropper'] ||= Settingslogic.new({})
Settings.cron_jobs['postgres_dynamic_partitions_dropper'] ||= {}
Settings.cron_jobs['postgres_dynamic_partitions_dropper']['cron'] ||= '45 12 * * *'
Settings.cron_jobs['postgres_dynamic_partitions_dropper']['job_class'] ||= 'Database::DropDetachedPartitionsWorker'
Settings.cron_jobs['ci_platform_metrics_update_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_platform_metrics_update_cron_worker'] ||= {}
Settings.cron_jobs['ci_platform_metrics_update_cron_worker']['cron'] ||= '47 9 * * *'
Settings.cron_jobs['ci_platform_metrics_update_cron_worker']['job_class'] = 'CiPlatformMetricsUpdateCronWorker'
Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker'] ||= {}
Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker']['cron'] ||= '50 23 */1 * *'
Settings.cron_jobs['analytics_usage_trends_count_job_trigger_worker']['job_class'] ||= 'Analytics::UsageTrends::CountJobTriggerWorker'
Settings.cron_jobs['member_invitation_reminder_emails_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['member_invitation_reminder_emails_worker'] ||= {}
Settings.cron_jobs['member_invitation_reminder_emails_worker']['cron'] ||= '0 0 * * *'
Settings.cron_jobs['member_invitation_reminder_emails_worker']['job_class'] = 'MemberInvitationReminderEmailsWorker'
Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker'] ||= {}
Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['schedule_merge_request_cleanup_refs_worker']['job_class'] = 'ScheduleMergeRequestCleanupRefsWorker'
Settings.cron_jobs['manage_evidence_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['manage_evidence_worker'] ||= {}
Settings.cron_jobs['manage_evidence_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['manage_evidence_worker']['job_class'] = 'Releases::ManageEvidenceWorker'
Settings.cron_jobs['user_status_cleanup_batch_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['user_status_cleanup_batch_worker'] ||= {}
Settings.cron_jobs['user_status_cleanup_batch_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['user_status_cleanup_batch_worker']['job_class'] = 'UserStatusCleanup::BatchWorker'
Settings.cron_jobs['ssh_keys_expired_notification_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ssh_keys_expired_notification_worker'] ||= {}
Settings.cron_jobs['ssh_keys_expired_notification_worker']['cron'] ||= '0 2,14 * * *'
Settings.cron_jobs['ssh_keys_expired_notification_worker']['job_class'] = 'SshKeys::ExpiredNotificationWorker'
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker'] ||= {}
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['cron'] ||= '0 16 * * *'
Settings.cron_jobs['namespaces_in_product_marketing_emails_worker']['job_class'] = 'Namespaces::InProductMarketingEmailsWorker'
Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker'] ||= {}
Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker']['cron'] ||= '0 1 * * *'
Settings.cron_jobs['ssh_keys_expiring_soon_notification_worker']['job_class'] = 'SshKeys::ExpiringSoonNotificationWorker'
Settings.cron_jobs['users_deactivate_dormant_users_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_deactivate_dormant_users_worker'] ||= {}
Settings.cron_jobs['users_deactivate_dormant_users_worker']['cron'] ||= '21,42 0-4 * * *'
Settings.cron_jobs['users_deactivate_dormant_users_worker']['job_class'] = 'Users::DeactivateDormantUsersWorker'
Settings.cron_jobs['ci_delete_unit_tests_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_delete_unit_tests_worker'] ||= {}
Settings.cron_jobs['ci_delete_unit_tests_worker']['cron'] ||= '0 0 * * *'
Settings.cron_jobs['ci_delete_unit_tests_worker']['job_class'] = 'Ci::DeleteUnitTestsWorker'
Settings.cron_jobs['batched_background_migrations_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['batched_background_migrations_worker'] ||= {}
Settings.cron_jobs['batched_background_migrations_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['batched_background_migrations_worker']['job_class'] = 'Database::BatchedBackgroundMigrationWorker'
Settings.cron_jobs['batched_background_migration_worker_ci_database'] ||= Settingslogic.new({})
Settings.cron_jobs['batched_background_migration_worker_ci_database'] ||= {}
Settings.cron_jobs['batched_background_migration_worker_ci_database']['cron'] ||= '* * * * *'
Settings.cron_jobs['batched_background_migration_worker_ci_database']['job_class'] = 'Database::BatchedBackgroundMigration::CiDatabaseWorker'
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances'] ||= Settingslogic.new({})
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances'] ||= {}
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances']['cron'] ||= '*/15 * * * *'
Settings.cron_jobs['issues_reschedule_stuck_issue_rebalances']['job_class'] = 'Issues::RescheduleStuckIssueRebalancesWorker'
Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker'] ||= {}
Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['clusters_integrations_check_prometheus_health_worker']['job_class'] = 'Clusters::Integrations::CheckPrometheusHealthWorker'
Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker'] ||= {}
Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker']['cron'] ||= '2/17 * * * *'
Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker']['job_class'] = 'Projects::ScheduleRefreshBuildArtifactsSizeStatisticsWorker'
Settings.cron_jobs['inactive_projects_deletion_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['inactive_projects_deletion_cron_worker'] ||= {}
Settings.cron_jobs['inactive_projects_deletion_cron_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['inactive_projects_deletion_cron_worker']['job_class'] = 'Projects::InactiveProjectsDeletionCronWorker'
Settings.cron_jobs['loose_foreign_keys_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['loose_foreign_keys_cleanup_worker'] ||= {}
Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['job_class'] = 'LooseForeignKeys::CleanupWorker'
Settings.cron_jobs['ci_runner_versions_reconciliation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_runner_versions_reconciliation_worker'] ||= {}
Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['cron'] ||= '@daily'
Settings.cron_jobs['ci_runner_versions_reconciliation_worker']['job_class'] = 'Ci::Runners::ReconcileExistingRunnerVersionsCronWorker'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker'] ||= {}
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['cron'] ||= '*/2 * * * *'
Settings.cron_jobs['users_migrate_records_to_ghost_user_in_batches_worker']['job_class'] = 'Users::MigrateRecordsToGhostUserInBatchesWorker'
Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker'] ||= {}
Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker']['cron'] ||= '36 4 * * *'
Settings.cron_jobs['ci_runners_stale_machines_cleanup_worker']['job_class'] = 'Ci::Runners::StaleMachinesCleanupCronWorker'
Settings.cron_jobs['cleanup_dangling_debian_package_files_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['cleanup_dangling_debian_package_files_worker'] ||= {}
Settings.cron_jobs['cleanup_dangling_debian_package_files_worker']['cron'] ||= '20 21 * * *'
Settings.cron_jobs['cleanup_dangling_debian_package_files_worker']['job_class'] = 'Packages::Debian::CleanupDanglingPackageFilesWorker'
Settings.cron_jobs['global_metrics_update_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['global_metrics_update_worker'] ||= {}
Settings.cron_jobs['global_metrics_update_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['global_metrics_update_worker']['job_class'] ||= 'Metrics::GlobalMetricsUpdateWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker']['cron'] ||= '0 1 * * *'
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker']['job_class'] = 'Analytics::DevopsAdoption::CreateAllSnapshotsWorker'
Settings.cron_jobs['analytics_cycle_analytics_incremental_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_cycle_analytics_incremental_worker'] ||= {}
Settings.cron_jobs['analytics_cycle_analytics_incremental_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['analytics_cycle_analytics_incremental_worker']['job_class'] = 'Analytics::CycleAnalytics::IncrementalWorker'
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker'] ||= {}
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker']['cron'] ||= '*/30 * * * *'
Settings.cron_jobs['analytics_cycle_analytics_consistency_worker']['job_class'] = 'Analytics::CycleAnalytics::ConsistencyWorker'
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker'] ||= {}
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker']['cron'] ||= '44 * * * *'
Settings.cron_jobs['analytics_cycle_analytics_reaggregation_worker']['job_class'] = 'Analytics::CycleAnalytics::ReaggregationWorker'
Settings.cron_jobs['active_user_count_threshold_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['active_user_count_threshold_worker'] ||= {}
Settings.cron_jobs['active_user_count_threshold_worker']['cron'] ||= '0 12 * * *'
Settings.cron_jobs['active_user_count_threshold_worker']['job_class'] = 'ActiveUserCountThresholdWorker'
Settings.cron_jobs['adjourned_group_deletion_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['adjourned_group_deletion_worker'] ||= {}
Settings.cron_jobs['adjourned_group_deletion_worker']['cron'] ||= '0 2 * * *'
Settings.cron_jobs['adjourned_group_deletion_worker']['job_class'] = 'AdjournedGroupDeletionWorker'
Settings.cron_jobs['clear_shared_runners_minutes_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['clear_shared_runners_minutes_worker'] ||= {}
Settings.cron_jobs['clear_shared_runners_minutes_worker']['cron'] ||= '0 0 1 * *'
Settings.cron_jobs['clear_shared_runners_minutes_worker']['job_class'] = 'ClearSharedRunnersMinutesWorker'
Settings.cron_jobs['adjourned_projects_deletion_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['adjourned_projects_deletion_cron_worker'] ||= {}
Settings.cron_jobs['adjourned_projects_deletion_cron_worker']['cron'] ||= '0 7 * * *'
Settings.cron_jobs['adjourned_projects_deletion_cron_worker']['job_class'] = 'AdjournedProjectsDeletionCronWorker'
Settings.cron_jobs['geo_verification_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_verification_cron_worker'] ||= {}
Settings.cron_jobs['geo_verification_cron_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['geo_verification_cron_worker']['job_class'] ||= 'Geo::VerificationCronWorker'
Settings.cron_jobs['geo_sync_timeout_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_sync_timeout_cron_worker'] ||= {}
Settings.cron_jobs['geo_sync_timeout_cron_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['geo_sync_timeout_cron_worker']['job_class'] ||= 'Geo::SyncTimeoutCronWorker'
Settings.cron_jobs['geo_secondary_usage_data_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_secondary_usage_data_cron_worker'] ||= {}
Settings.cron_jobs['geo_secondary_usage_data_cron_worker']['cron'] ||= '0 0 * * 0'
Settings.cron_jobs['geo_secondary_usage_data_cron_worker']['job_class'] ||= 'Geo::SecondaryUsageDataCronWorker'
Settings.cron_jobs['geo_registry_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_registry_sync_worker'] ||= {}
Settings.cron_jobs['geo_registry_sync_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_registry_sync_worker']['job_class'] ||= 'Geo::RegistrySyncWorker'
Settings.cron_jobs['geo_repository_registry_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_registry_sync_worker'] ||= {}
Settings.cron_jobs['geo_repository_registry_sync_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_registry_sync_worker']['job_class'] ||= 'Geo::RepositoryRegistrySyncWorker'
Settings.cron_jobs['geo_metrics_update_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_metrics_update_worker'] ||= {}
Settings.cron_jobs['geo_metrics_update_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_metrics_update_worker']['job_class'] ||= 'Geo::MetricsUpdateWorker'
Settings.cron_jobs['geo_prune_event_log_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_prune_event_log_worker'] ||= {}
Settings.cron_jobs['geo_prune_event_log_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['geo_prune_event_log_worker']['job_class'] ||= 'Geo::PruneEventLogWorker'
Settings.cron_jobs['geo_repository_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_sync_worker'] ||= {}
Settings.cron_jobs['geo_repository_sync_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_sync_worker']['job_class'] ||= 'Geo::RepositorySyncWorker'
Settings.cron_jobs['geo_secondary_registry_consistency_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_secondary_registry_consistency_worker'] ||= {}
Settings.cron_jobs['geo_secondary_registry_consistency_worker']['cron'] ||= '* * * * *'
Settings.cron_jobs['geo_secondary_registry_consistency_worker']['job_class'] ||= 'Geo::Secondary::RegistryConsistencyWorker'
Settings.cron_jobs['geo_repository_verification_primary_batch_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_verification_primary_batch_worker'] ||= {}
Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['job_class'] ||= 'Geo::RepositoryVerification::Primary::BatchWorker'
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= {}
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['job_class'] ||= 'Geo::RepositoryVerification::Secondary::SchedulerWorker'
Settings.cron_jobs['historical_data_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['historical_data_worker'] ||= {}
Settings.cron_jobs['historical_data_worker']['cron'] ||= '0 12 * * *'
Settings.cron_jobs['historical_data_worker']['job_class'] = 'HistoricalDataWorker'
Settings.cron_jobs['incident_sla_exceeded_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['incident_sla_exceeded_check_worker'] ||= {}
Settings.cron_jobs['incident_sla_exceeded_check_worker']['cron'] ||= '*/2 * * * *'
Settings.cron_jobs['incident_sla_exceeded_check_worker']['job_class'] = 'IncidentManagement::IncidentSlaExceededCheckWorker'
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker'] ||= {}
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['incident_management_persist_oncall_rotation_worker']['job_class'] = 'IncidentManagement::OncallRotations::PersistAllRotationsShiftsJob'
Settings.cron_jobs['incident_management_schedule_escalation_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['incident_management_schedule_escalation_check_worker'] ||= {}
Settings.cron_jobs['incident_management_schedule_escalation_check_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['incident_management_schedule_escalation_check_worker']['job_class'] = 'IncidentManagement::PendingEscalations::ScheduleCheckCronWorker'
Settings.cron_jobs['import_software_licenses_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_software_licenses_worker'] ||= {}
Settings.cron_jobs['import_software_licenses_worker']['cron'] ||= '0 3 * * 0'
Settings.cron_jobs['import_software_licenses_worker']['job_class'] = 'ImportSoftwareLicensesWorker'
Settings.cron_jobs['ldap_group_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ldap_group_sync_worker'] ||= {}
Settings.cron_jobs['ldap_group_sync_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['ldap_group_sync_worker']['job_class'] = 'LdapAllGroupsSyncWorker'
Settings.cron_jobs['ldap_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ldap_sync_worker'] ||= {}
Settings.cron_jobs['ldap_sync_worker']['cron'] ||= '30 1 * * *'
Settings.cron_jobs['ldap_sync_worker']['job_class'] = 'LdapSyncWorker'
Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_index_bulk_cron_worker'] ||= {}
Settings.cron_jobs['elastic_index_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['elastic_index_bulk_cron_worker']['job_class'] ||= 'ElasticIndexBulkCronWorker'
Settings.cron_jobs['elastic_index_initial_bulk_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_index_initial_bulk_cron_worker'] ||= {}
Settings.cron_jobs['elastic_index_initial_bulk_cron_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['elastic_index_initial_bulk_cron_worker']['job_class'] ||= 'ElasticIndexInitialBulkCronWorker'
Settings.cron_jobs['elastic_cluster_reindexing_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_cluster_reindexing_cron_worker'] ||= {}
Settings.cron_jobs['elastic_cluster_reindexing_cron_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['elastic_cluster_reindexing_cron_worker']['job_class'] ||= 'ElasticClusterReindexingCronWorker'
Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker'] ||= {}
Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker']['cron'] ||= '10 3 * * *'
Settings.cron_jobs['elastic_remove_expired_namespace_subscriptions_from_index_cron_worker']['job_class'] ||= 'ElasticRemoveExpiredNamespaceSubscriptionsFromIndexCronWorker'
Settings.cron_jobs['elastic_migration_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['elastic_migration_worker'] ||= {}
Settings.cron_jobs['elastic_migration_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['elastic_migration_worker']['job_class'] ||= 'Elastic::MigrationWorker'
Settings.cron_jobs['search_index_curation_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['search_index_curation_worker'] ||= {}
Settings.cron_jobs['search_index_curation_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['search_index_curation_worker']['job_class'] ||= 'Search::IndexCurationWorker'
Settings.cron_jobs['sync_seat_link_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['sync_seat_link_worker'] ||= {}
Settings.cron_jobs['sync_seat_link_worker']['cron'] ||= "#{rand(60)} #{rand(3..4)} * * * UTC"
Settings.cron_jobs['sync_seat_link_worker']['job_class'] = 'SyncSeatLinkWorker'
Settings.cron_jobs['users_create_statistics_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['users_create_statistics_worker'] ||= {}
Settings.cron_jobs['users_create_statistics_worker']['cron'] ||= '2 15 * * *'
Settings.cron_jobs['users_create_statistics_worker']['job_class'] = 'Users::CreateStatisticsWorker'
Settings.cron_jobs['iterations_update_status_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['iterations_update_status_worker'] ||= {}
Settings.cron_jobs['iterations_update_status_worker']['cron'] ||= '5 0 * * *'
Settings.cron_jobs['iterations_update_status_worker']['job_class'] = 'IterationsUpdateStatusWorker'
Settings.cron_jobs['iterations_generator_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['iterations_generator_worker'] ||= {}
Settings.cron_jobs['iterations_generator_worker']['cron'] ||= '5 0 * * *'
Settings.cron_jobs['iterations_generator_worker']['job_class'] = 'Iterations::Cadences::ScheduleCreateIterationsWorker'
Settings.cron_jobs['vulnerability_statistics_schedule_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['vulnerability_statistics_schedule_worker'] ||= {}
Settings.cron_jobs['vulnerability_statistics_schedule_worker']['cron'] ||= '15 1,20 * * *'
Settings.cron_jobs['vulnerability_statistics_schedule_worker']['job_class'] = 'Vulnerabilities::Statistics::ScheduleWorker'
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker'] ||= {}
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker']['cron'] ||= '15 3 * * *'
Settings.cron_jobs['vulnerability_historical_statistics_deletion_worker']['job_class'] = 'Vulnerabilities::HistoricalStatistics::DeletionWorker'
Settings.cron_jobs['security_create_orchestration_policy_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['security_create_orchestration_policy_worker'] ||= {}
Settings.cron_jobs['security_create_orchestration_policy_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['security_create_orchestration_policy_worker']['job_class'] = 'Security::CreateOrchestrationPolicyWorker'
Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker'] ||= {}
Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker']['cron'] ||= '*/15 * * * *'
Settings.cron_jobs['security_orchestration_policy_rule_schedule_worker']['job_class'] = 'Security::OrchestrationPolicyRuleScheduleWorker'
Settings.cron_jobs['security_scans_purge_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['security_scans_purge_worker'] ||= {}
Settings.cron_jobs['security_scans_purge_worker']['cron'] ||= '0 */4 * * 6,0'
Settings.cron_jobs['security_scans_purge_worker']['job_class'] = 'Security::Scans::PurgeWorker'
Settings.cron_jobs['app_sec_dast_profile_schedule_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['app_sec_dast_profile_schedule_worker'] ||= {}
Settings.cron_jobs['app_sec_dast_profile_schedule_worker']['cron'] ||= '7-59/15 * * * *'
Settings.cron_jobs['app_sec_dast_profile_schedule_worker']['job_class'] = 'AppSec::Dast::ProfileScheduleWorker'
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker'] ||= {}
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker']['cron'] ||= '*/4 * * * *'
Settings.cron_jobs['ci_namespace_mirrors_consistency_check_worker']['job_class'] = 'Database::CiNamespaceMirrorsConsistencyCheckWorker'
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker'] ||= {}
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker']['cron'] ||= '2-58/4 * * * *'
Settings.cron_jobs['ci_project_mirrors_consistency_check_worker']['job_class'] = 'Database::CiProjectMirrorsConsistencyCheckWorker'
Settings.cron_jobs['arkose_blocked_users_report_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['arkose_blocked_users_report_worker'] ||= {}
Settings.cron_jobs['arkose_blocked_users_report_worker']['cron'] ||= '0 6 * * *'
Settings.cron_jobs['arkose_blocked_users_report_worker']['job_class'] = 'Arkose::BlockedUsersReportWorker'
Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron'] ||= Settingslogic.new({})
Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron'] ||= {}
Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron']['cron'] ||= '30 * * * *'
Settings.cron_jobs['ci_runners_stale_group_runners_prune_worker_cron']['job_class'] = 'Ci::Runners::StaleGroupRunnersPruneCronWorker'
Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner'] ||= Settingslogic.new({})
Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner'] ||= {}
Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner']['cron'] ||= "0 0 * * *"
Settings.cron_jobs['licenses_reset_submit_license_usage_data_banner']['job_class'] = 'Licenses::ResetSubmitLicenseUsageDataBannerWorker'
Settings.cron_jobs['abandoned_trial_emails'] ||= Settingslogic.new({})
Settings.cron_jobs['abandoned_trial_emails'] ||= {}
Settings.cron_jobs['abandoned_trial_emails']['cron'] ||= "0 1 * * *"
Settings.cron_jobs['abandoned_trial_emails']['job_class'] = 'Emails::AbandonedTrialEmailsCronWorker'
Settings.cron_jobs['package_metadata_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['package_metadata_sync_worker'] ||= {}
Settings.cron_jobs['package_metadata_sync_worker']['cron'] ||= "*/5 * * * *"
Settings.cron_jobs['package_metadata_sync_worker']['job_class'] = 'PackageMetadata::SyncWorker'
Settings.cron_jobs['compliance_violations_consistency_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['compliance_violations_consistency_worker'] ||= {}
Settings.cron_jobs['compliance_violations_consistency_worker']['cron'] ||= '0 1 * * *'
Settings.cron_jobs['compliance_violations_consistency_worker']['job_class'] = 'ComplianceManagement::MergeRequests::ComplianceViolationsConsistencyWorker'
Gitlab.com do
Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker'] ||= {}
Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['free_user_cap_backfill_notification_jobs_worker']['job_class'] = 'Namespaces::FreeUserCap::BackfillNotificationJobsWorker'
Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag'] ||= Settingslogic.new({})
Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag'] ||= {}
Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag']['cron'] ||= '*/5 * * * *'
Settings.cron_jobs['free_user_cap_backfill_clear_notified_flag']['job_class'] ||= 'Namespaces::FreeUserCap::BackfillNotificationClearingJobsWorker'
Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects'] ||= Settingslogic.new({})
Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects'] ||= {}
Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects']['cron'] ||= "30 5 * * 0"
Settings.cron_jobs['disable_legacy_open_source_license_for_inactive_projects']['job_class'] = 'Projects::DisableLegacyOpenSourceLicenseForInactiveProjectsWorker'
Settings.cron_jobs['notify_seats_exceeded_batch_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['notify_seats_exceeded_batch_worker'] ||= {}
Settings.cron_jobs['notify_seats_exceeded_batch_worker']['cron'] ||= '0 3 * * *'
Settings.cron_jobs['notify_seats_exceeded_batch_worker']['job_class'] ||= 'GitlabSubscriptions::NotifySeatsExceededBatchWorker'
Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker'] ||= {}
Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker']['cron'] ||= "0 */6 * * *"
Settings.cron_jobs['gitlab_subscriptions_schedule_refresh_seats_worker']['job_class'] = 'GitlabSubscriptions::ScheduleRefreshSeatsWorker'
end
@ -862,14 +860,14 @@ end
#
# Sidekiq
#
Settings['sidekiq'] ||= Settingslogic.new({})
Settings['sidekiq'] ||= {}
Settings['sidekiq']['log_format'] ||= 'default'
Settings['sidekiq']['routing_rules'] = Settings.build_sidekiq_routing_rules(Settings['sidekiq']['routing_rules'])
#
# GitLab Shell
#
Settings['gitlab_shell'] ||= Settingslogic.new({})
Settings['gitlab_shell'] ||= {}
Settings.gitlab_shell['path'] = Settings.absolute(Settings.gitlab_shell['path'] || Settings.gitlab['user_home'] + '/gitlab-shell/')
Settings.gitlab_shell['hooks_path'] = :deprecated_use_gitlab_shell_path_instead
Settings.gitlab_shell['authorized_keys_file'] ||= File.join(Dir.home, '.ssh', 'authorized_keys')
@ -889,13 +887,13 @@ ObjectStoreSettings.new(Settings).parse!
#
# Workhorse
#
Settings['workhorse'] ||= Settingslogic.new({})
Settings['workhorse'] ||= {}
Settings.workhorse['secret_file'] ||= Rails.root.join('.gitlab_workhorse_secret')
#
# GitLab KAS
#
Settings['gitlab_kas'] ||= Settingslogic.new({})
Settings['gitlab_kas'] ||= {}
Settings.gitlab_kas['enabled'] ||= false
Settings.gitlab_kas['secret_file'] ||= Rails.root.join('.gitlab_kas_secret')
Settings.gitlab_kas['external_url'] ||= 'wss://kas.example.com'
@ -906,14 +904,14 @@ Settings.gitlab_kas['internal_url'] ||= 'grpc://localhost:8153'
# Suggested Reviewers
#
Gitlab.ee do
Settings['suggested_reviewers'] ||= Settingslogic.new({})
Settings['suggested_reviewers'] ||= {}
Settings.suggested_reviewers['secret_file'] ||= Rails.root.join('.gitlab_suggested_reviewers_secret')
end
#
# Repositories
#
Settings['repositories'] ||= Settingslogic.new({})
Settings['repositories'] ||= {}
Settings.repositories['storages'] ||= {}
Settings.repositories.storages.each do |key, storage|
@ -943,12 +941,12 @@ end
#
# Backup
#
Settings['backup'] ||= Settingslogic.new({})
Settings['backup'] ||= {}
Settings.backup['keep_time'] ||= 0
Settings.backup['pg_schema'] = nil
Settings.backup['path'] = Settings.absolute(Settings.backup['path'] || "tmp/backups/")
Settings.backup['archive_permissions'] ||= 0600
Settings.backup['upload'] ||= Settingslogic.new({ 'remote_directory' => nil, 'connection' => nil })
Settings.backup['upload'] ||= { 'remote_directory' => nil, 'connection' => nil }
Settings.backup['upload']['multipart_chunk_size'] ||= 104857600
Settings.backup['upload']['encryption'] ||= nil
Settings.backup['upload']['encryption_key'] ||= ENV['GITLAB_BACKUP_ENCRYPTION_KEY']
@ -958,19 +956,19 @@ Settings.backup['gitaly_backup_path'] ||= Gitlab::Utils.which('gitaly-backup')
#
# Git
#
Settings['git'] ||= Settingslogic.new({})
Settings['git'] ||= {}
Settings.git['bin_path'] ||= '/usr/bin/git'
# Important: keep the satellites.path setting until GitLab 9.0 at
# least. This setting is fed to 'rm -rf' in
# db/migrate/20151023144219_remove_satellites.rb
Settings['satellites'] ||= Settingslogic.new({})
Settings['satellites'] ||= {}
Settings.satellites['path'] = Settings.absolute(Settings.satellites['path'] || "tmp/repo_satellites/")
#
# Microsoft Graph Mailer
#
Settings['microsoft_graph_mailer'] ||= Settingslogic.new({})
Settings['microsoft_graph_mailer'] ||= {}
Settings.microsoft_graph_mailer['enabled'] = false if Settings.microsoft_graph_mailer['enabled'].nil?
Settings.microsoft_graph_mailer['user_id'] ||= nil
Settings.microsoft_graph_mailer['tenant'] ||= nil
@ -983,7 +981,7 @@ Settings.microsoft_graph_mailer['graph_endpoint'] ||= 'https://graph.microsoft.c
# Kerberos
#
Gitlab.ee do
Settings['kerberos'] ||= Settingslogic.new({})
Settings['kerberos'] ||= {}
Settings.kerberos['enabled'] = false if Settings.kerberos['enabled'].nil?
Settings.kerberos['keytab'] = nil if Settings.kerberos['keytab'].blank? # nil means use default keytab
Settings.kerberos['simple_ldap_linking_allowed_realms'] = [] if Settings.kerberos['simple_ldap_linking_allowed_realms'].blank?
@ -993,7 +991,7 @@ Gitlab.ee do
Settings.kerberos['port'] ||= Settings.kerberos.https ? 8443 : 8088
if Settings.kerberos['enabled'] && !Settings.omniauth.providers.map(&:name).include?('kerberos')
Settings.omniauth.providers << Settingslogic.new({ 'name' => 'kerberos' })
Settings.omniauth.providers << GitlabSettings::Options.build({ 'name' => 'kerberos' })
end
end
@ -1001,7 +999,7 @@ end
# Smartcard
#
Gitlab.ee do
Settings['smartcard'] ||= Settingslogic.new({})
Settings['smartcard'] ||= {}
Settings.smartcard['enabled'] = false if Settings.smartcard['enabled'].nil?
Settings.smartcard['client_certificate_required_host'] = Settings.gitlab.host if Settings.smartcard['client_certificate_required_host'].nil?
Settings.smartcard['client_certificate_required_port'] = 3444 if Settings.smartcard['client_certificate_required_port'].nil?
@ -1012,26 +1010,26 @@ end
#
# FortiAuthenticator
#
Settings['forti_authenticator'] ||= Settingslogic.new({})
Settings['forti_authenticator'] ||= {}
Settings.forti_authenticator['enabled'] = false if Settings.forti_authenticator['enabled'].nil?
Settings.forti_authenticator['port'] = 443 if Settings.forti_authenticator['port'].to_i == 0
#
# FortiToken Cloud
#
Settings['forti_token_cloud'] ||= Settingslogic.new({})
Settings['forti_token_cloud'] ||= {}
Settings.forti_token_cloud['enabled'] = false if Settings.forti_token_cloud['enabled'].nil?
#
# DuoAuth
#
Settings['duo_auth'] ||= Settingslogic.new({})
Settings['duo_auth'] ||= {}
Settings.duo_auth['enabled'] = false if Settings.duo_auth['enabled'].nil?
#
# Extra customization
#
Settings['extra'] ||= Settingslogic.new({})
Settings['extra'] ||= {}
Settings.extra['matomo_site_id'] ||= Settings.extra['piwik_site_id'] if Settings.extra['piwik_site_id'].present?
Settings.extra['matomo_url'] ||= Settings.extra['piwik_url'] if Settings.extra['piwik_url'].present?
Settings.extra['matomo_disable_cookies'] = false if Settings.extra['matomo_disable_cookies'].nil?
@ -1040,8 +1038,8 @@ Settings.extra['maximum_text_highlight_size_kilobytes'] = Settings.extra.fetch('
#
# Rack::Attack settings
#
Settings['rack_attack'] ||= Settingslogic.new({})
Settings.rack_attack['git_basic_auth'] ||= Settingslogic.new({})
Settings['rack_attack'] ||= {}
Settings.rack_attack['git_basic_auth'] ||= {}
Settings.rack_attack.git_basic_auth['enabled'] = false if Settings.rack_attack.git_basic_auth['enabled'].nil?
Settings.rack_attack.git_basic_auth['ip_whitelist'] ||= %w{127.0.0.1}
Settings.rack_attack.git_basic_auth['maxretry'] ||= 10
@ -1051,17 +1049,17 @@ Settings.rack_attack.git_basic_auth['bantime'] ||= 1.hour
#
# Gitaly
#
Settings['gitaly'] ||= Settingslogic.new({})
Settings['gitaly'] ||= {}
#
# Webpack settings
#
Settings['webpack'] ||= Settingslogic.new({})
Settings['webpack'] ||= {}
Settings.webpack['config_file'] ||= 'config/webpack.config.js'
Settings.webpack['output_dir'] ||= 'public/assets/webpack'
Settings.webpack['public_path'] ||= 'assets/webpack'
Settings.webpack['manifest_filename'] ||= 'manifest.json'
Settings.webpack['dev_server'] ||= Settingslogic.new({})
Settings.webpack['dev_server'] ||= {}
Settings.webpack.dev_server['enabled'] ||= false
Settings.webpack.dev_server['host'] ||= 'localhost'
Settings.webpack.dev_server['port'] ||= 3808
@ -1070,10 +1068,10 @@ Settings.webpack.dev_server['https'] ||= false
#
# Monitoring settings
#
Settings['monitoring'] ||= Settingslogic.new({})
Settings['monitoring'] ||= {}
Settings.monitoring['ip_whitelist'] ||= ['127.0.0.1/8']
Settings.monitoring['sidekiq_exporter'] ||= Settingslogic.new({})
Settings.monitoring['sidekiq_exporter'] ||= {}
Settings.monitoring.sidekiq_exporter['enabled'] ||= false
Settings.monitoring.sidekiq_exporter['log_enabled'] ||= false
Settings.monitoring.sidekiq_exporter['address'] ||= 'localhost'
@ -1082,12 +1080,12 @@ Settings.monitoring.sidekiq_exporter['tls_enabled'] ||= false
Settings.monitoring.sidekiq_exporter['tls_cert_path'] ||= nil
Settings.monitoring.sidekiq_exporter['tls_key_path'] ||= nil
Settings.monitoring['sidekiq_health_checks'] ||= Settingslogic.new({})
Settings.monitoring['sidekiq_health_checks'] ||= {}
Settings.monitoring.sidekiq_health_checks['enabled'] ||= false
Settings.monitoring.sidekiq_health_checks['address'] ||= 'localhost'
Settings.monitoring.sidekiq_health_checks['port'] ||= 8092
Settings.monitoring['web_exporter'] ||= Settingslogic.new({})
Settings.monitoring['web_exporter'] ||= {}
Settings.monitoring.web_exporter['enabled'] ||= false
Settings.monitoring.web_exporter['log_enabled'] ||= true
Settings.monitoring.web_exporter['address'] ||= 'localhost'
@ -1099,20 +1097,20 @@ Settings.monitoring.web_exporter['tls_key_path'] ||= nil
#
# Prometheus settings
#
Settings['prometheus'] ||= Settingslogic.new({})
Settings['prometheus'] ||= {}
Settings.prometheus['enabled'] ||= false
Settings.prometheus['server_address'] ||= nil
#
# Bullet settings
#
Settings['bullet'] ||= Settingslogic.new({})
Settings['bullet'] ||= {}
Settings.bullet['enabled'] ||= Rails.env.development?
#
# Shutdown settings
#
Settings['shutdown'] ||= Settingslogic.new({})
Settings['shutdown'] ||= {}
Settings.shutdown['blackout_seconds'] ||= 10
#

View File

@ -20,7 +20,7 @@ class ObjectStoreSettings
# Legacy parser
def self.legacy_parse(object_store, object_store_type)
object_store ||= Settingslogic.new({})
object_store ||= GitlabSettings::Options.build({})
object_store['enabled'] = false if object_store['enabled'].nil?
object_store['remote_directory'], object_store['bucket_prefix'] = split_bucket_prefix(
object_store['remote_directory']
@ -162,7 +162,7 @@ class ObjectStoreSettings
)
target_config['consolidated_settings'] = true
section['object_store'] = target_config
# Settingslogic internally stores data as a Hash, but it also
# GitlabSettings::Options internally stores data as a Hash, but it also
# creates a Settings object for every key. To avoid confusion, we should
# update both so that Settings.artifacts and Settings['artifacts'] return
# the same result.
@ -178,23 +178,11 @@ class ObjectStoreSettings
# 1. The common settings are defined
# 2. The legacy settings are not defined
def use_consolidated_settings?
# to_h is needed because we define `default` as a Gitaly storage name
# in stub_storage_settings. This causes Settingslogic to redefine Hash#default,
# which causes Hash#dig to fail when the key doesn't exist: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
settings_h = settings.to_h
return false unless settings_h.dig('object_store', 'enabled')
return false unless settings_h.dig('object_store', 'connection').present?
return false unless settings.dig('object_store', 'enabled')
return false unless settings.dig('object_store', 'connection').present?
WORKHORSE_ACCELERATED_TYPES.each do |store|
# to_h is needed because we define `default` as a Gitaly storage name
# in stub_storage_settings. This causes Settingslogic to redefine Hash#default,
# which causes Hash#dig to fail when the key doesn't exist: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
#
# (byebug) section.dig
# *** ArgumentError Exception: wrong number of arguments (given 0, expected 1+)
# (byebug) section.dig('object_store')
# *** ArgumentError Exception: wrong number of arguments (given 1, expected 0)
section = settings.try(store)&.to_h
section = settings.try(store)
next unless section

View File

@ -1,224 +1,222 @@
# frozen_string_literal: true
require 'settingslogic'
require_relative '../lib/gitlab_settings'
class Settings < Settingslogic
source ENV.fetch('GITLAB_CONFIG') { Pathname.new(File.expand_path('..', __dir__)).join('config/gitlab.yml') }
namespace ENV.fetch('GITLAB_ENV') { Rails.env }
file = ENV.fetch('GITLAB_CONFIG') { Rails.root.join('config/gitlab.yml') }
section = ENV.fetch('GITLAB_ENV') { Rails.env }
class << self
def gitlab_on_standard_port?
on_standard_port?(gitlab)
end
Settings = GitlabSettings.load(file, section) do
def gitlab_on_standard_port?
on_standard_port?(gitlab)
end
def build_ci_component_fqdn
custom_port = ":#{gitlab.port}" unless on_standard_port?(gitlab)
def build_ci_component_fqdn
custom_port = ":#{gitlab.port}" unless on_standard_port?(gitlab)
[
gitlab.host,
custom_port,
gitlab.relative_url_root,
'/'
].join('')
end
[
gitlab.host,
custom_port,
gitlab.relative_url_root,
'/'
].join('')
end
def host_without_www(url)
host(url).sub('www.', '')
end
def host_without_www(url)
host(url).sub('www.', '')
end
def build_gitlab_ci_url
custom_port =
if on_standard_port?(gitlab)
nil
else
":#{gitlab.port}"
end
[
gitlab.protocol,
"://",
gitlab.host,
custom_port,
gitlab.relative_url_root
].join('')
end
def build_pages_url
base_url(pages).join('')
end
def build_gitlab_shell_ssh_path_prefix
user = "#{gitlab_shell.ssh_user}@" unless gitlab_shell.ssh_user.empty?
user_host = "#{user}#{gitlab_shell.ssh_host}"
if gitlab_shell.ssh_port != 22
"ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
elsif gitlab_shell.ssh_host.include? ':'
"[#{user_host}]:"
def build_gitlab_ci_url
custom_port =
if on_standard_port?(gitlab)
nil
else
"#{user_host}:"
end
end
def build_base_gitlab_url
base_url(gitlab).join('')
end
def build_gitlab_url
(base_url(gitlab) + [gitlab.relative_url_root]).join('')
end
def build_gitlab_go_url
# "Go package paths are not URLs, and do not include port numbers"
# https://github.com/golang/go/issues/38213#issuecomment-607851460
"#{gitlab.host}#{gitlab.relative_url_root}"
end
def kerberos_protocol
kerberos.https ? "https" : "http"
end
def kerberos_port
kerberos.use_dedicated_port ? kerberos.port : gitlab.port
end
# Curl expects username/password for authentication. However when using GSS-Negotiate not credentials should be needed.
# By inserting in the Kerberos dedicated URL ":@", we give to curl an empty username and password and GSS auth goes ahead
# Known bug reported in http://sourceforge.net/p/curl/bugs/440/ and http://curl.haxx.se/docs/knownbugs.html
def build_gitlab_kerberos_url
[
kerberos_protocol,
"://:@",
gitlab.host,
":#{kerberos_port}",
gitlab.relative_url_root
].join('')
end
def alternative_gitlab_kerberos_url?
kerberos.enabled && (build_gitlab_kerberos_url != build_gitlab_url)
end
# check that values in `current` (string or integer) is a contant in `modul`.
def verify_constant_array(modul, current, default)
values = default || []
unless current.nil?
values = []
current.each do |constant|
values.push(verify_constant(modul, constant, nil))
end
values.delete_if { |value| value.nil? }
":#{gitlab.port}"
end
values
end
[
gitlab.protocol,
"://",
gitlab.host,
custom_port,
gitlab.relative_url_root
].join('')
end
# check that `current` (string or integer) is a contant in `modul`.
def verify_constant(modul, current, default)
constant = modul.constants.find { |name| modul.const_get(name, false) == current }
value = constant.nil? ? default : modul.const_get(constant, false)
if current.is_a? String
value = begin
modul.const_get(current.upcase, false)
rescue StandardError
default
end
end
def build_pages_url
base_url(pages).join('')
end
value
end
def build_gitlab_shell_ssh_path_prefix
user = "#{gitlab_shell.ssh_user}@" unless gitlab_shell.ssh_user.empty?
user_host = "#{user}#{gitlab_shell.ssh_host}"
def absolute(path)
File.expand_path(path, Rails.root)
end
# Don't use this in new code, use attr_encrypted_db_key_base_32 instead!
def attr_encrypted_db_key_base_truncated
Gitlab::Application.secrets.db_key_base[0..31]
end
# Ruby 2.4+ requires passing in the exact required length for OpenSSL keys
# (https://github.com/ruby/ruby/commit/ce635262f53b760284d56bb1027baebaaec175d1).
# Previous versions quietly truncated the input.
#
# Makes sure the key is exactly 32 bytes long, either by
# truncating or right-padding it with ASCII 0s. Use this when
# using :per_attribute_iv mode for attr_encrypted.
def attr_encrypted_db_key_base_32
Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 32.bytes)
end
def attr_encrypted_db_key_base_12
Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 12.bytes)
end
# This should be used for :per_attribute_salt_and_iv mode. There is no
# need to truncate the key because the encryptor will use the salt to
# generate a hash of the password:
# https://github.com/attr-encrypted/encryptor/blob/c3a62c4a9e74686dd95e0548f9dc2a361fdc95d1/lib/encryptor.rb#L77
def attr_encrypted_db_key_base
Gitlab::Application.secrets.db_key_base
end
def encrypted(path)
Gitlab::EncryptedConfiguration.new(
content_path: path,
base_key: Gitlab::Application.secrets.encrypted_settings_key_base,
previous_keys: Gitlab::Application.secrets.rotated_encrypted_settings_key_base || []
)
end
def load_dynamic_cron_schedules!
cron_jobs['gitlab_service_ping_worker']['cron'] ||= cron_for_service_ping
end
# Route jobs to queue based on worker name.
def build_sidekiq_routing_rules(rules)
return rules unless rules.nil? || rules&.empty?
[[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, nil]]
end
private
def base_url(config)
custom_port = on_standard_port?(config) ? nil : ":#{config.port}"
[
config.protocol,
"://",
config.host,
custom_port
]
end
def on_standard_port?(config)
config.port.to_i == (config.https ? 443 : 80)
end
# Extract the host part of the given +url+.
def host(url)
url = url.downcase
url = "http://#{url}" unless url.start_with?('http')
# Get rid of the path so that we don't even have to encode it
url_without_path = url.sub(%r{(https?://[^/]+)/?.*}, '\1')
URI.parse(url_without_path).host
end
# Runs at a consistent random time of day on a day of the week based on
# the instance UUID. This is to balance the load on the service receiving
# these pings. The sidekiq job handles temporary http failures.
def cron_for_service_ping
# Set a default UUID for the case when the UUID hasn't been initialized.
uuid = Gitlab::CurrentSettings.uuid || 'uuid-not-set'
minute = Digest::SHA256.hexdigest(uuid + 'minute').to_i(16) % 60
hour = Digest::SHA256.hexdigest(uuid + 'hour').to_i(16) % 24
day_of_week = Digest::SHA256.hexdigest(uuid).to_i(16) % 7
"#{minute} #{hour} * * #{day_of_week}"
if gitlab_shell.ssh_port != 22
"ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
elsif gitlab_shell.ssh_host.include? ':'
"[#{user_host}]:"
else
"#{user_host}:"
end
end
def build_base_gitlab_url
base_url(gitlab).join('')
end
def build_gitlab_url
(base_url(gitlab) + [gitlab.relative_url_root]).join('')
end
def build_gitlab_go_url
# "Go package paths are not URLs, and do not include port numbers"
# https://github.com/golang/go/issues/38213#issuecomment-607851460
"#{gitlab.host}#{gitlab.relative_url_root}"
end
def kerberos_protocol
kerberos.https ? "https" : "http"
end
def kerberos_port
kerberos.use_dedicated_port ? kerberos.port : gitlab.port
end
# Curl expects username/password for authentication. However when using GSS-Negotiate not credentials should be needed.
# By inserting in the Kerberos dedicated URL ":@", we give to curl an empty username and password and GSS auth goes ahead
# Known bug reported in http://sourceforge.net/p/curl/bugs/440/ and http://curl.haxx.se/docs/knownbugs.html
def build_gitlab_kerberos_url
[
kerberos_protocol,
"://:@",
gitlab.host,
":#{kerberos_port}",
gitlab.relative_url_root
].join('')
end
def alternative_gitlab_kerberos_url?
kerberos.enabled && (build_gitlab_kerberos_url != build_gitlab_url)
end
# check that values in `current` (string or integer) is a contant in `modul`.
def verify_constant_array(modul, current, default)
values = default || []
unless current.nil?
values = []
current.each do |constant|
values.push(verify_constant(modul, constant, nil))
end
values.delete_if { |value| value.nil? }
end
values
end
# check that `current` (string or integer) is a contant in `modul`.
def verify_constant(modul, current, default)
constant = modul.constants.find { |name| modul.const_get(name, false) == current }
value = constant.nil? ? default : modul.const_get(constant, false)
if current.is_a? String
value = begin
modul.const_get(current.upcase, false)
rescue StandardError
default
end
end
value
end
def absolute(path)
File.expand_path(path, Rails.root)
end
# Don't use this in new code, use attr_encrypted_db_key_base_32 instead!
def attr_encrypted_db_key_base_truncated
Gitlab::Application.secrets.db_key_base[0..31]
end
# Ruby 2.4+ requires passing in the exact required length for OpenSSL keys
# (https://github.com/ruby/ruby/commit/ce635262f53b760284d56bb1027baebaaec175d1).
# Previous versions quietly truncated the input.
#
# Makes sure the key is exactly 32 bytes long, either by
# truncating or right-padding it with ASCII 0s. Use this when
# using :per_attribute_iv mode for attr_encrypted.
def attr_encrypted_db_key_base_32
Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 32.bytes)
end
def attr_encrypted_db_key_base_12
Gitlab::Utils.ensure_utf8_size(attr_encrypted_db_key_base, bytes: 12.bytes)
end
# This should be used for :per_attribute_salt_and_iv mode. There is no
# need to truncate the key because the encryptor will use the salt to
# generate a hash of the password:
# https://github.com/attr-encrypted/encryptor/blob/c3a62c4a9e74686dd95e0548f9dc2a361fdc95d1/lib/encryptor.rb#L77
def attr_encrypted_db_key_base
Gitlab::Application.secrets.db_key_base
end
def encrypted(path)
Gitlab::EncryptedConfiguration.new(
content_path: path,
base_key: Gitlab::Application.secrets.encrypted_settings_key_base,
previous_keys: Gitlab::Application.secrets.rotated_encrypted_settings_key_base || []
)
end
def load_dynamic_cron_schedules!
cron_jobs['gitlab_service_ping_worker']['cron'] ||= cron_for_service_ping
end
# Route jobs to queue based on worker name.
def build_sidekiq_routing_rules(rules)
return rules unless rules.nil? || rules&.empty?
[[Gitlab::SidekiqConfig::WorkerMatcher::WILDCARD_MATCH, nil]]
end
private
def base_url(config)
custom_port = on_standard_port?(config) ? nil : ":#{config.port}"
[
config.protocol,
"://",
config.host,
custom_port
]
end
def on_standard_port?(config)
config.port.to_i == (config.https ? 443 : 80)
end
# Extract the host part of the given +url+.
def host(url)
url = url.downcase
url = "http://#{url}" unless url.start_with?('http')
# Get rid of the path so that we don't even have to encode it
url_without_path = url.sub(%r{(https?://[^/]+)/?.*}, '\1')
URI.parse(url_without_path).host
end
# Runs at a consistent random time of day on a day of the week based on
# the instance UUID. This is to balance the load on the service receiving
# these pings. The sidekiq job handles temporary http failures.
def cron_for_service_ping
# Set a default UUID for the case when the UUID hasn't been initialized.
uuid = Gitlab::CurrentSettings.uuid || 'uuid-not-set'
minute = Digest::SHA256.hexdigest(uuid + 'minute').to_i(16) % 60
hour = Digest::SHA256.hexdigest(uuid + 'hour').to_i(16) % 24
day_of_week = Digest::SHA256.hexdigest(uuid).to_i(16) % 7
"#{minute} #{hour} * * #{day_of_week}"
end
end

View File

@ -3,7 +3,7 @@
# Set default values for email_smime settings
class SmimeSignatureSettings
def self.parse(email_smime)
email_smime ||= Settingslogic.new({})
email_smime ||= GitlabSettings::Options.build({})
email_smime['enabled'] = false unless email_smime['enabled']
email_smime['key_file'] ||= Rails.root.join('.gitlab_smime_key')
email_smime['cert_file'] ||= Rails.root.join('.gitlab_smime_cert')

View File

@ -0,0 +1,29 @@
# frozen_string_literal: true
class EnsureAwardEmojiBigintBackfillIsFinishedForGitlabDotCom < Gitlab::Database::Migration[2.1]
include Gitlab::Database::MigrationHelpers::ConvertToBigint
restrict_gitlab_migration gitlab_schema: :gitlab_main
disable_ddl_transaction!
def up
return unless should_run?
ensure_batched_background_migration_is_finished(
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: 'award_emoji',
column_name: 'id',
job_arguments: [['awardable_id'], ['awardable_id_convert_to_bigint']]
)
end
def down
# no-op
end
private
def should_run?
com_or_dev_or_test_but_not_jh?
end
end

View File

@ -0,0 +1,56 @@
# frozen_string_literal: true
class SwapAwardEmojiNoteIdToBigintForGitlabDotCom < Gitlab::Database::Migration[2.1]
include Gitlab::Database::MigrationHelpers::ConvertToBigint
disable_ddl_transaction!
TABLE_NAME = 'award_emoji'
def up
return unless should_run?
swap
end
def down
return unless should_run?
swap
end
def swap
# This will replace the existing idx_award_emoji_on_user_emoji_name_awardable_type_awardable_id
add_concurrent_index TABLE_NAME, [:user_id, :name, :awardable_type, :awardable_id_convert_to_bigint],
name: 'tmp_award_emoji_on_user_emoji_name_awardable_type_awardable_id'
# This will replace the existing index_award_emoji_on_awardable_type_and_awardable_id
add_concurrent_index TABLE_NAME, [:awardable_type, :awardable_id_convert_to_bigint],
name: 'tmp_index_award_emoji_on_awardable_type_and_awardable_id'
with_lock_retries(raise_on_exhaustion: true) do
execute "LOCK TABLE #{TABLE_NAME} IN ACCESS EXCLUSIVE MODE"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN awardable_id TO awardable_id_tmp"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN awardable_id_convert_to_bigint TO awardable_id"
execute "ALTER TABLE #{TABLE_NAME} RENAME COLUMN awardable_id_tmp TO awardable_id_convert_to_bigint"
function_name = Gitlab::Database::UnidirectionalCopyTrigger
.on_table(TABLE_NAME, connection: connection)
.name(:awardable_id, :awardable_id_convert_to_bigint)
execute "ALTER FUNCTION #{quote_table_name(function_name)} RESET ALL"
execute 'DROP INDEX IF EXISTS idx_award_emoji_on_user_emoji_name_awardable_type_awardable_id'
rename_index TABLE_NAME, 'tmp_award_emoji_on_user_emoji_name_awardable_type_awardable_id',
'idx_award_emoji_on_user_emoji_name_awardable_type_awardable_id'
execute 'DROP INDEX IF EXISTS index_award_emoji_on_awardable_type_and_awardable_id'
rename_index TABLE_NAME, 'tmp_index_award_emoji_on_awardable_type_and_awardable_id',
'index_award_emoji_on_awardable_type_and_awardable_id'
end
end
def should_run?
com_or_dev_or_test_but_not_jh?
end
end

View File

@ -0,0 +1 @@
0600ca21c065ed0ec4a9bf8904fce64b7901d5f3960ebd1d7e50833d6024d71f

View File

@ -0,0 +1 @@
bfb2f8193f033172d8cbc7edd046d5921fb2c3a5992b00b451f495ad90c60325

View File

@ -12282,11 +12282,11 @@ CREATE TABLE award_emoji (
id integer NOT NULL,
name character varying,
user_id integer,
awardable_id integer,
awardable_id_convert_to_bigint integer,
awardable_type character varying,
created_at timestamp without time zone,
updated_at timestamp without time zone,
awardable_id_convert_to_bigint bigint
awardable_id bigint
);
CREATE SEQUENCE award_emoji_id_seq

View File

@ -38,10 +38,51 @@ GitLab creates your fork, and redirects you to the new fork's page.
## Update your fork
To copy the latest changes from the upstream repository into your fork, update it
from the command line. GitLab Premium and higher tiers can also
[configure forks as pull mirrors](#with-repository-mirroring)
of the upstream repository.
A fork can fall out of sync with its upstream repository, and require an update:
- **Ahead**: Your fork contains new commits not present in the upstream repository.
To sync your fork, create a merge request to push your changes to the upstream repository.
- **Behind**: The upstream repository contains new commits not present in your fork.
To sync your fork, pull the new commits into your fork.
- **Ahead and behind**: Both the upstream repository and your fork contain new commits
not present in the other. To fully sync your fork, create a merge request to push
your changes up, and pull the upstream repository's new changes into your fork.
To sync your fork with its upstream repository, update it from the GitLab UI
or the command line. GitLab Premium and higher tiers can also automate updates by
[configuring forks as pull mirrors](#with-repository-mirroring) of the upstream repository.
### From the UI
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/330243) in GitLab 15.11 [with a flag](../../../administration/feature_flags.md) named `synchronize_fork`. Disabled by default, but enabled for projects in the `gitlab-org/gitlab` and `gitlab-com/www-gitlab-com` namespaces only.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available,
ask an administrator to [enable the feature flag](../../../administration/feature_flags.md) named `synchronize_fork`.
On GitLab.com, this feature is available for projects in the `gitlab-org/gitlab` and `gitlab-com/www-gitlab-com` namespaces.
To update your fork from the GitLab UI:
1. On the top bar, select **Main menu > Projects > View all projects**.
1. On the secondary menu, select **Personal**.
1. Select the fork you want to update.
1. Below the dropdown list for branch name, find the **Forked from** (**{fork}**)
information box to determine if your fork is ahead, behind, or both. In this example,
the fork is behind the upstream repository:
![Information box for a fork 23552 commits behind the upstream repository](img/update-fork_v15_11.png)
1. If your fork is **ahead** of the upstream repository, select
**Create merge request** to propose adding your fork's changes to the upstream repository.
1. If your fork is **behind** the upstream repository, select **Update fork**
to pull changes from the upstream repository.
1. If your fork is **ahead and behind** the upstream repository, you can update from the UI
available only if no merge conflicts are detected:
- If your fork contains no merge conflicts, you can select **Create merge request**
to propose pushing your changes to the upstream repository, **Update fork**
to pull changes down to your fork, or both. The type of changes in your fork
determine which actions are appropriate.
- If your fork contains merge conflicts, update your fork from the command line.
### From the command line

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

View File

@ -38,7 +38,7 @@ module Banzai
# whenever the application settings are changed
def self.initialize_settings
application_settings = Gitlab::CurrentSettings.current_application_settings
Gitlab.config['asset_proxy'] ||= Settingslogic.new({})
Gitlab.config['asset_proxy'] ||= GitlabSettings::Options.build({})
if application_settings.respond_to?(:asset_proxy_enabled)
Gitlab.config.asset_proxy['enabled'] = application_settings.asset_proxy_enabled

View File

@ -12,7 +12,7 @@ module Gitlab
class << self
def api_url
Gitlab.config.consul.api_url.to_s.presence if Gitlab.config.consul
rescue Settingslogic::MissingSetting
rescue GitlabSettings::MissingSetting
Gitlab::AppLogger.error('Consul api_url is not present in config/gitlab.yml')
nil

View File

@ -74,7 +74,7 @@ module Gitlab
# An Array from the configuration will be expanded
provider_arguments.concat arguments
provider_arguments << defaults unless defaults.empty?
when Hash
when Hash, GitlabSettings::Options
hash_arguments = arguments.deep_symbolize_keys.deep_merge(defaults)
normalized = normalize_hash_arguments(hash_arguments)

View File

@ -27,7 +27,7 @@ module Gitlab
def self.server_address
Gitlab.config.prometheus.server_address.to_s if Gitlab.config.prometheus
rescue Settingslogic::MissingSetting
rescue GitlabSettings::MissingSetting
Gitlab::AppLogger.error('Prometheus server_address is not present in config/gitlab.yml')
nil
@ -35,7 +35,7 @@ module Gitlab
def self.prometheus_enabled?
Gitlab.config.prometheus.enabled if Gitlab.config.prometheus
rescue Settingslogic::MissingSetting
rescue GitlabSettings::MissingSetting
Gitlab::AppLogger.error('prometheus.enabled is not present in config/gitlab.yml')
false

View File

@ -57,13 +57,12 @@ module Gitlab
@cron_jobs ||= begin
Gitlab.config.load_dynamic_cron_schedules!
# Load recurring jobs from gitlab.yml
# UGLY Hack to get nested hash from settingslogic
jobs = Gitlab::Json.parse(Gitlab.config.cron_jobs.to_json)
jobs = Gitlab.config.cron_jobs.to_hash
jobs.delete('poll_interval') # Would be interpreted as a job otherwise
# UGLY hack: Settingslogic doesn't allow 'class' key
# Settingslogic (former gem used for yaml configuration) didn't allow 'class' key
# Therefore, we configure cron jobs with `job_class` as a workaround.
required_keys = %w[job_class cron]
jobs.each do |k, v|
if jobs[k] && required_keys.all? { |s| jobs[k].key?(s) }

View File

@ -59,7 +59,7 @@ module Gitlab
def snowplow_micro_enabled?
Rails.env.development? && Gitlab.config.snowplow_micro.enabled
rescue Settingslogic::MissingSetting
rescue GitlabSettings::MissingSetting
false
end

View File

@ -53,7 +53,7 @@ module Gitlab
url = Gitlab.config.snowplow_micro.address
scheme = Gitlab.config.gitlab.https ? 'https' : 'http'
"#{scheme}://#{url}"
rescue Settingslogic::MissingSetting
rescue GitlabSettings::MissingSetting
DEFAULT_URI
end
end

View File

@ -344,8 +344,7 @@ module Gitlab
next unless section_setting && section_setting['enabled']
# Use #to_h to avoid Settingslogic bug: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
object_store_setting = section_setting['object_store']&.to_h
object_store_setting = section_setting['object_store']
next unless object_store_setting && object_store_setting['enabled']

17
lib/gitlab_settings.rb Normal file
View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
require "active_support"
require "active_support/core_ext/hash"
require_relative 'gitlab_settings/settings'
require_relative 'gitlab_settings/options'
module GitlabSettings
MissingSetting = Class.new(StandardError)
def self.load(source = nil, section = nil, &block)
Settings
.new(source, section)
.extend(Module.new(&block))
end
end

View File

@ -0,0 +1,80 @@
# frozen_string_literal: true
module GitlabSettings
class Options
# Recursively build GitlabSettings::Options
def self.build(obj)
case obj
when Hash
new(obj.transform_values { |value| build(value) })
when Array
obj.map { |value| build(value) }
else
obj
end
end
def initialize(value)
@options = value.deep_stringify_keys
end
def [](key)
@options[key.to_s]
end
def []=(key, value)
@options[key.to_s] = Options.build(value)
end
def key?(name)
@options.key?(name.to_s) || @options.key?(name.to_sym)
end
alias_method :has_key?, :key?
def to_hash
@options.deep_transform_values do |option|
case option
when GitlabSettings::Options
option.to_hash
else
option
end
end
end
alias_method :to_h, :to_hash
def merge(other)
Options.build(to_hash.merge(other.deep_stringify_keys))
end
def deep_merge(other)
Options.build(to_hash.deep_merge(other.deep_stringify_keys))
end
def is_a?(klass)
return true if klass == Hash
super(klass)
end
def method_missing(name, *args, &block)
name_string = +name.to_s
if name_string.chomp!("=")
return self[name_string] = args.first if key?(name_string)
elsif key?(name_string)
return self[name_string]
end
return @options.public_send(name, *args, &block) if @options.respond_to?(name) # rubocop: disable GitlabSecurity/PublicSend
raise ::GitlabSettings::MissingSetting, "option '#{name}' not defined"
end
def respond_to_missing?(name, include_all = false)
return true if key?(name)
@options.respond_to?(name, include_all)
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module GitlabSettings
class Settings
attr_reader :source
def initialize(source, section)
raise(ArgumentError, 'config source is required') if source.blank?
raise(ArgumentError, 'config section is required') if section.blank?
@source = source
@section = section
reload!
end
def reload!
yaml = ActiveSupport::ConfigurationFile.parse(source)
all_configs = yaml.deep_stringify_keys
configs = all_configs[section]
@config = Options.build(configs)
end
def method_missing(name, *args)
config.public_send(name, *args) # rubocop: disable GitlabSecurity/PublicSend
end
def respond_to_missing?(name, include_all = false)
config.respond_to?(name, include_all)
end
private
attr_reader :config, :section
end
end

View File

@ -260,7 +260,7 @@ module ObjectStorage
end
def connection
@connection ||= ::Fog::Storage.new(credentials)
@connection ||= ::Fog::Storage.new(credentials.to_hash)
end
end
end

View File

@ -162,7 +162,7 @@ class MetricsServer # rubocop:disable Gitlab/NamespacedClass
when 'puma'
Gitlab::Metrics::Exporter::WebExporter.instance(**default_opts)
when 'sidekiq'
settings = Settings.new(Settings.monitoring.sidekiq_exporter)
settings = GitlabSettings::Options.build(Settings.monitoring.sidekiq_exporter)
Gitlab::Metrics::Exporter::SidekiqExporter.instance(settings, **default_opts)
end

View File

@ -68,6 +68,8 @@ module QA
end
def go_to_applications
return click_element(:nav_item_link, submenu_item: 'Applications') if Runtime::Env.super_sidebar_enabled?
click_element(:sidebar_menu_link, menu_item: 'Applications')
end

View File

@ -12,6 +12,7 @@ module QA
prepend Page::SubMenus::SuperSidebar::Settings
prepend SubMenus::SuperSidebar::Main
prepend SubMenus::SuperSidebar::Build
prepend SubMenus::SuperSidebar::Operate
end
def click_group_members_item
@ -73,6 +74,8 @@ module QA
end
def go_to_group_packages
return go_to_package_registry if Runtime::Env.super_sidebar_enabled?
hover_group_packages do
within_submenu do
click_element(:sidebar_menu_item_link, menu_item: 'Package Registry')

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
module QA
module Page
module Group
module SubMenus
module SuperSidebar
module Operate
extend QA::Page::PageConcern
def self.prepended(base)
super
base.class_eval do
include QA::Page::SubMenus::SuperSidebar::Operate
end
end
end
end
end
end
end
end

View File

@ -7,7 +7,7 @@ module QA
module Packages
extend QA::Page::PageConcern
def click_packages_link
def go_to_package_registry
hover_registry do
within_submenu do
click_element(:sidebar_menu_item_link, menu_item: 'Package Registry')

View File

@ -8,8 +8,12 @@ module QA
module Operate
extend QA::Page::PageConcern
def go_to_package_registry
open_operate_submenu('Package Registry')
def self.included(base)
super
base.class_eval do
include QA::Page::SubMenus::SuperSidebar::Operate
end
end
def go_to_infrastructure_registry

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module QA
module Page
module SubMenus
module SuperSidebar
module Operate
extend QA::Page::PageConcern
def go_to_package_registry
open_operate_submenu('Package Registry')
end
def go_to_container_registry
open_operate_submenu('Container Registry')
end
def go_to_dependency_proxy
open_operate_submenu('Dependency proxy')
end
private
def open_operate_submenu(sub_menu)
open_submenu('Operate', sub_menu)
end
end
end
end
end
end

View File

@ -31,6 +31,10 @@ module QA
open_settings_submenu('CI/CD')
end
def go_to_package_settings
open_settings_submenu('Packages and registries')
end
private
def open_settings_submenu(sub_menu)

View File

@ -72,7 +72,7 @@ module QA
end
it 'publishes a composer package and deletes it', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348016' do
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -69,7 +69,7 @@ module QA
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -83,7 +83,7 @@ module QA
end
it 'uploads a generic package and downloads it', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348017' do
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -73,7 +73,7 @@ module QA
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package_name)

View File

@ -92,7 +92,7 @@ module QA
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package_name)

View File

@ -134,7 +134,7 @@ module QA
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package_name)

View File

@ -61,7 +61,7 @@ module QA
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package_name)

View File

@ -153,7 +153,7 @@ module QA
end
project.visit!
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -126,7 +126,7 @@ module QA
end
project.visit!
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -174,7 +174,7 @@ product_group: :package_registry do
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -92,7 +92,7 @@ module QA
context 'when at the project level' do
it 'publishes and installs a pypi package', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348015' do
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)
@ -112,7 +112,7 @@ module QA
dashboard.go_to_project(project.name)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
index.wait_for_package_replication(package.name)

View File

@ -89,7 +89,7 @@ module QA
expect(job).to be_successful(timeout: 800)
end
Page::Project::Menu.perform(&:click_packages_link)
Page::Project::Menu.perform(&:go_to_package_registry)
Page::Project::Packages::Index.perform do |index|
expect(index).to have_package(package.name)

View File

@ -71,7 +71,7 @@ RSpec.describe 'GitLab metrics server', :aggregate_failures do
if use_golang_server
stub_env('GITLAB_GOLANG_METRICS_SERVER', '1')
allow(Settings).to receive(:monitoring).and_return(
Settingslogic.new(config.dig('test', 'monitoring')))
GitlabSettings::Options.build(config.dig('test', 'monitoring')))
else
config_file.write(YAML.dump(config))
config_file.close

View File

@ -18,17 +18,12 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
let(:sidekiq_exporter_enabled) { false }
let(:sidekiq_exporter_port) { '3807' }
let(:config_file) { Tempfile.new('gitlab.yml') }
let(:config) do
{
'test' => {
'monitoring' => {
'sidekiq_exporter' => {
'address' => 'localhost',
'enabled' => sidekiq_exporter_enabled,
'port' => sidekiq_exporter_port
}
}
'sidekiq_exporter' => {
'address' => 'localhost',
'enabled' => sidekiq_exporter_enabled,
'port' => sidekiq_exporter_port
}
}
end
@ -37,14 +32,6 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
let(:metrics_cleanup_service) { instance_double(Prometheus::CleanupMultiprocDirService, execute: nil) }
before do
stub_env('RAILS_ENV', 'test')
config_file.write(YAML.dump(config))
config_file.close
allow(::Settings).to receive(:source).and_return(config_file.path)
::Settings.reload!
allow(Gitlab::ProcessManagement).to receive(:write_pid)
allow(Gitlab::SidekiqCluster::SidekiqProcessSupervisor).to receive(:instance).and_return(supervisor)
allow(supervisor).to receive(:supervise)
@ -52,8 +39,13 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
allow(Prometheus::CleanupMultiprocDirService).to receive(:new).and_return(metrics_cleanup_service)
end
after do
config_file.unlink
around do |example|
original = Settings['monitoring']
Settings['monitoring'] = config
example.run
Settings['monitoring'] = original
end
describe '#run' do
@ -318,13 +310,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
context 'when sidekiq_exporter is not set up' do
let(:config) do
{
'test' => {
'monitoring' => {
'sidekiq_exporter' => {}
}
}
}
{ 'sidekiq_exporter' => {} }
end
it 'does not start a sidekiq metrics server' do
@ -336,13 +322,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, feature_category: :gitlab_cli, stub_
context 'with missing sidekiq_exporter setting' do
let(:config) do
{
'test' => {
'monitoring' => {
'sidekiq_exporter' => nil
}
}
}
{ 'sidekiq_exporter' => nil }
end
it 'does not start a sidekiq metrics server' do

View File

@ -5,7 +5,7 @@ require Rails.root.join('config', 'object_store_settings.rb')
RSpec.describe ObjectStoreSettings, feature_category: :shared do
describe '#parse!' do
let(:settings) { Settingslogic.new(config) }
let(:settings) { GitlabSettings::Options.build(config) }
subject { described_class.new(settings).parse! }
@ -68,7 +68,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
expect(settings.artifacts['enabled']).to be true
expect(settings.artifacts['object_store']['enabled']).to be true
expect(settings.artifacts['object_store']['connection']).to eq(connection)
expect(settings.artifacts['object_store']['connection'].to_hash).to eq(connection)
expect(settings.artifacts['object_store']['direct_upload']).to be true
expect(settings.artifacts['object_store']['proxy_download']).to be false
expect(settings.artifacts['object_store']['remote_directory']).to eq('artifacts')
@ -78,7 +78,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
expect(settings.lfs['enabled']).to be true
expect(settings.lfs['object_store']['enabled']).to be true
expect(settings.lfs['object_store']['connection']).to eq(connection)
expect(settings.lfs['object_store']['connection'].to_hash).to eq(connection)
expect(settings.lfs['object_store']['direct_upload']).to be true
expect(settings.lfs['object_store']['proxy_download']).to be true
expect(settings.lfs['object_store']['remote_directory']).to eq('lfs-objects')
@ -88,7 +88,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
expect(settings.pages['enabled']).to be true
expect(settings.pages['object_store']['enabled']).to be true
expect(settings.pages['object_store']['connection']).to eq(connection)
expect(settings.pages['object_store']['connection'].to_hash).to eq(connection)
expect(settings.pages['object_store']['remote_directory']).to eq('pages')
expect(settings.pages['object_store']['bucket_prefix']).to eq(nil)
expect(settings.pages['object_store']['consolidated_settings']).to be true
@ -128,7 +128,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
it 'populates artifacts CDN config' do
subject
expect(settings.artifacts['object_store']['cdn']).to eq(cdn_config)
expect(settings.artifacts['object_store']['cdn'].to_hash).to eq(cdn_config)
end
end
@ -163,7 +163,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
it 'allows pages to define its own connection' do
expect { subject }.not_to raise_error
expect(settings.pages['object_store']['connection']).to eq(pages_connection)
expect(settings.pages['object_store']['connection'].to_hash).to eq(pages_connection)
expect(settings.pages['object_store']['consolidated_settings']).to be_falsey
end
end
@ -230,7 +230,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
end
it 'respects original values' do
original_settings = Settingslogic.new({
original_settings = GitlabSettings::Options.build({
'enabled' => true,
'remote_directory' => 'artifacts'
})
@ -244,7 +244,7 @@ RSpec.describe ObjectStoreSettings, feature_category: :shared do
end
it 'supports bucket prefixes' do
original_settings = Settingslogic.new({
original_settings = GitlabSettings::Options.build({
'enabled' => true,
'remote_directory' => 'gitlab/artifacts'
})

View File

@ -31,7 +31,7 @@ RSpec.describe Settings, feature_category: :system_access do
with_them do
before do
allow(Gitlab.config).to receive(:gitlab).and_return(
Settingslogic.new({
GitlabSettings::Options.build({
'host' => host,
'https' => true,
'port' => port,

View File

@ -19,7 +19,7 @@ RSpec.describe SmimeSignatureSettings, feature_category: :shared do
context 'when providing custom values' do
it 'sets correct default values to disabled' do
custom_settings = Settingslogic.new({})
custom_settings = GitlabSettings::Options.build({})
parsed_settings = described_class.parse(custom_settings)
@ -30,7 +30,7 @@ RSpec.describe SmimeSignatureSettings, feature_category: :shared do
end
it 'enables smime with default key and cert' do
custom_settings = Settingslogic.new({
custom_settings = GitlabSettings::Options.build({
'enabled' => true
})
@ -46,7 +46,7 @@ RSpec.describe SmimeSignatureSettings, feature_category: :shared do
custom_key = '/custom/key'
custom_cert = '/custom/cert'
custom_ca_certs = '/custom/ca_certs'
custom_settings = Settingslogic.new({
custom_settings = GitlabSettings::Options.build({
'enabled' => true,
'key_file' => custom_key,
'cert_file' => custom_cert,

View File

@ -57,7 +57,7 @@ RSpec.describe 'Sandboxed Mermaid rendering', :js, feature_category: :team_plann
context 'in a project milestone' do
let(:milestone) { create(:project_milestone, project: project, description: description) }
it 'includes mermaid frame correctly' do
it 'includes mermaid frame correctly', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/408560' do
visit(project_milestone_path(project, milestone))
wait_for_requests

View File

@ -1,5 +1,6 @@
import mockJobsCount from 'test_fixtures/graphql/jobs/get_jobs_count.query.graphql.json';
import mockJobsEmpty from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.empty.json';
import mockAllJobsEmpty from 'test_fixtures/graphql/jobs/get_all_jobs.query.graphql.empty.json';
import mockJobsPaginated from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.paginated.json';
import mockAllJobsPaginated from 'test_fixtures/graphql/jobs/get_all_jobs.query.graphql.paginated.json';
import mockJobs from 'test_fixtures/graphql/jobs/get_jobs.query.graphql.json';
@ -16,6 +17,7 @@ threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
export const mockJobsResponsePaginated = mockJobsPaginated;
export const mockAllJobsResponsePaginated = mockAllJobsPaginated;
export const mockJobsResponseEmpty = mockJobsEmpty;
export const mockAllJobsResponseEmpty = mockAllJobsEmpty;
export const mockJobsNodes = mockJobs.data.project.jobs.nodes;
export const mockAllJobsNodes = mockAllJobs.data.jobs.nodes;
export const mockJobsNodesAsGuest = mockJobsAsGuest.data.project.jobs.nodes;

View File

@ -1,6 +1,6 @@
import { GlLoadingIcon, GlEmptyState, GlAlert } from '@gitlab/ui';
import { GlLoadingIcon, GlEmptyState, GlAlert, GlIntersectionObserver } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
@ -14,8 +14,8 @@ import JobsTable from '~/jobs/components/table/jobs_table.vue';
import {
mockAllJobsResponsePaginated,
mockJobsResponseEmpty,
mockCancelableJobsCountResponse,
mockAllJobsResponseEmpty,
statuses,
} from '../../../../../jobs/mock_data';
@ -25,9 +25,9 @@ describe('Job table app', () => {
let wrapper;
const successHandler = jest.fn().mockResolvedValue(mockAllJobsResponsePaginated);
const emptyHandler = jest.fn().mockResolvedValue(mockJobsResponseEmpty);
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const cancelHandler = jest.fn().mockResolvedValue(mockCancelableJobsCountResponse);
const emptyHandler = jest.fn().mockResolvedValue(mockAllJobsResponseEmpty);
const findSkeletonLoader = () => wrapper.findComponent(JobsSkeletonLoader);
const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
@ -37,6 +37,9 @@ describe('Job table app', () => {
const findTabs = () => wrapper.findComponent(JobsTableTabs);
const findCancelJobsButton = () => wrapper.findComponent(CancelJobs);
const triggerInfiniteScroll = () =>
wrapper.findComponent(GlIntersectionObserver).vm.$emit('appear');
const createMockApolloProvider = (handler, cancelableHandler) => {
const requestHandlers = [
[getAllJobsQuery, handler],
@ -106,6 +109,33 @@ describe('Job table app', () => {
expect(wrapper.vm.$apollo.queries.jobs.refetch).toHaveBeenCalledTimes(1);
});
describe('when infinite scrolling is triggered', () => {
it('does not display a skeleton loader', () => {
triggerInfiniteScroll();
expect(findSkeletonLoader().exists()).toBe(false);
});
it('handles infinite scrolling by calling fetch more', async () => {
triggerInfiniteScroll();
await nextTick();
const pageSize = 50;
expect(findLoadingSpinner().exists()).toBe(true);
await waitForPromises();
expect(findLoadingSpinner().exists()).toBe(false);
expect(successHandler).toHaveBeenLastCalledWith({
first: pageSize,
after: mockAllJobsResponsePaginated.data.jobs.pageInfo.endCursor,
});
});
});
});
describe('empty state', () => {

View File

@ -9,12 +9,7 @@ RSpec.describe Settings do
expect(Gitlab.config.ldap.servers.main.label).to eq('ldap')
end
# Specifically trying to cause this error discovered in EE when removing the
# reassignment of each server element with Settingslogic.
#
# `undefined method `label' for #<Hash:0x007fbd18b59c08>`
#
it 'can be accessed in a very specific way that breaks without reassigning each element with Settingslogic' do
it 'can be accessed in a very specific way that breaks without reassigning each element' do
server_settings = Gitlab.config.ldap.servers['main']
expect(server_settings.label).to eq('ldap')
end

View File

@ -49,7 +49,7 @@ RSpec.describe Gitlab::Auth::OAuth::Provider do
context 'for an LDAP provider' do
context 'when the provider exists' do
it 'returns the config' do
expect(described_class.config_for('ldapmain')).to be_a(Hash)
expect(described_class.config_for('ldapmain')).to be_a(GitlabSettings::Options)
end
end

View File

@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Components::InstancePath, feature_category: :pipeline
let_it_be(:user) { create(:user) }
let(:path) { described_class.new(address: address, content_filename: 'template.yml') }
let(:settings) { Settingslogic.new({ 'component_fqdn' => current_host }) }
let(:settings) { GitlabSettings::Options.build({ 'component_fqdn' => current_host }) }
let(:current_host) { 'acme.com/' }
before do

View File

@ -22,7 +22,7 @@ RSpec.describe Gitlab::Consul::Internal do
context 'when consul setting is not present in gitlab.yml' do
before do
allow(Gitlab.config).to receive(:consul).and_raise(Settingslogic::MissingSetting)
allow(Gitlab.config).to receive(:consul).and_raise(GitlabSettings::MissingSetting)
end
it 'does not fail' do

View File

@ -228,7 +228,7 @@ RSpec.describe Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService
context 'when prometheus setting is not present in gitlab.yml' do
before do
allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
allow(Gitlab.config).to receive(:prometheus).and_raise(GitlabSettings::MissingSetting)
end
it 'does not fail' do

View File

@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::LegacyGithubImport::Client do
let(:token) { '123456' }
let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
let(:github_provider) { GitlabSettings::Options.build('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
let(:wait_for_rate_limit_reset) { true }
subject(:client) { described_class.new(token, wait_for_rate_limit_reset: wait_for_rate_limit_reset) }
@ -17,7 +17,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Client do
expect(client.client.options.keys).to all(be_kind_of(Symbol))
end
it 'does not crash (e.g. Settingslogic::MissingSetting) when verify_ssl config is not present' do
it 'does not crash (e.g. GitlabSettings::MissingSetting) when verify_ssl config is not present' do
expect { client.api }.not_to raise_error
end

View File

@ -81,7 +81,7 @@ RSpec.describe Gitlab::Prometheus::Internal do
context 'when prometheus setting is not present in gitlab.yml' do
before do
allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
allow(Gitlab.config).to receive(:prometheus).and_raise(GitlabSettings::MissingSetting)
end
it 'does not fail' do
@ -97,7 +97,7 @@ RSpec.describe Gitlab::Prometheus::Internal do
context 'when prometheus setting is not present in gitlab.yml' do
before do
allow(Gitlab.config).to receive(:prometheus).and_raise(Settingslogic::MissingSetting)
allow(Gitlab.config).to receive(:prometheus).and_raise(GitlabSettings::MissingSetting)
end
it 'does not fail' do

View File

@ -17,6 +17,27 @@ RSpec.describe Gitlab::SidekiqConfig do
end
end
describe '.cron_jobs' do
it 'renames job_class to class and removes incomplete jobs' do
expect(Gitlab)
.to receive(:config)
.twice
.and_return(GitlabSettings::Options.build(
load_dynamic_cron_schedules!: true,
cron_jobs: {
job: { cron: '0 * * * *', job_class: 'SomeWorker' },
incomplete_job: { cron: '0 * * * *' }
}))
expect(Gitlab::AppLogger)
.to receive(:error)
.with("Invalid cron_jobs config key: 'incomplete_job'. Check your gitlab config file.")
expect(described_class.cron_jobs)
.to eq('job' => { 'class' => 'SomeWorker', 'cron' => '0 * * * *' })
end
end
describe '.worker_queues' do
it 'includes all queues' do
queues = described_class.worker_queues

View File

@ -45,7 +45,7 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
context 'when snowplow_micro config is not set' do
before do
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(GitlabSettings::MissingSetting)
end
it 'returns localhost hostname' do

View File

@ -312,7 +312,7 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
end
it 'returns false when snowplow_micro is not configured' do
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(GitlabSettings::MissingSetting)
expect(described_class).not_to be_snowplow_micro_enabled
end

View File

@ -166,8 +166,8 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
let(:lfs_config) do
{
'enabled' => lfs_enabled,
# This nesting of Settingslogic is necessary to trigger the bug
'object_store' => Settingslogic.new({ 'enabled' => true })
# This nesting of settings is necessary to trigger the bug
'object_store' => GitlabSettings::Options.build({ 'enabled' => true })
}
end
@ -175,16 +175,15 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only, feature_category: :sh
{
'gitlab' => Gitlab.config.gitlab,
'repositories' => { 'storages' => { 'default' => 'test' } },
'lfs' => Settingslogic.new(lfs_config)
'lfs' => GitlabSettings::Options.build(lfs_config)
}
end
let(:host) { 'http://127.0.0.1:9000' }
let(:settings) { Settingslogic.new(config) }
let(:settings) { GitlabSettings::Options.build(config) }
before do
allow(Gitlab).to receive(:config).and_return(settings)
# Triggers Settingslogic bug: https://gitlab.com/gitlab-org/gitlab/-/issues/286873
settings.repositories.storages.default
end

View File

@ -0,0 +1,155 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSettings::Options, :aggregate_failures, feature_category: :shared do
let(:config) { { foo: { bar: 'baz' } } }
subject(:options) { described_class.build(config) }
describe '.build' do
context 'when argument is a hash' do
it 'creates a new GitlabSettings::Options instance' do
options = described_class.build(config)
expect(options).to be_a described_class
expect(options.foo).to be_a described_class
expect(options.foo.bar).to eq 'baz'
end
end
end
describe '#[]' do
it 'accesses the configuration key as string' do
expect(options['foo']).to be_a described_class
expect(options['foo']['bar']).to eq 'baz'
expect(options['inexistent']).to be_nil
end
it 'accesses the configuration key as symbol' do
expect(options[:foo]).to be_a described_class
expect(options[:foo][:bar]).to eq 'baz'
expect(options[:inexistent]).to be_nil
end
end
describe '#[]=' do
it 'changes the configuration key as string' do
options['foo']['bar'] = 'anothervalue'
expect(options['foo']['bar']).to eq 'anothervalue'
end
it 'changes the configuration key as symbol' do
options[:foo][:bar] = 'anothervalue'
expect(options[:foo][:bar]).to eq 'anothervalue'
end
context 'when key does not exist' do
it 'creates a new configuration by string key' do
options['inexistent'] = 'value'
expect(options['inexistent']).to eq 'value'
end
it 'creates a new configuration by string key' do
options[:inexistent] = 'value'
expect(options[:inexistent]).to eq 'value'
end
end
end
describe '#key?' do
it 'checks if a string key exists' do
expect(options.key?('foo')).to be true
expect(options.key?('inexistent')).to be false
end
it 'checks if a symbol key exists' do
expect(options.key?(:foo)).to be true
expect(options.key?(:inexistent)).to be false
end
end
describe '#to_hash' do
it 'returns the hash representation of the config' do
expect(options.to_hash).to eq('foo' => { 'bar' => 'baz' })
end
end
describe '#merge' do
it 'merges a hash to the existing options' do
expect(options.merge(more: 'configs').to_hash).to eq(
'foo' => { 'bar' => 'baz' },
'more' => 'configs'
)
end
context 'when the merge hash replaces existing configs' do
it 'merges a hash to the existing options' do
expect(options.merge(foo: 'configs').to_hash).to eq('foo' => 'configs')
end
end
end
describe '#deep_merge' do
it 'merges a hash to the existing options' do
expect(options.deep_merge(foo: { more: 'configs' }).to_hash).to eq('foo' => {
'bar' => 'baz',
'more' => 'configs'
})
end
context 'when the merge hash replaces existing configs' do
it 'merges a hash to the existing options' do
expect(options.deep_merge(foo: { bar: 'configs' }).to_hash).to eq('foo' => {
'bar' => 'configs'
})
end
end
end
describe '#is_a?' do
it 'returns false for anything different of Hash or GitlabSettings::Options' do
expect(options.is_a?(described_class)).to be true
expect(options.is_a?(Hash)).to be true
expect(options.is_a?(String)).to be false
end
end
describe '#method_missing' do
context 'when method is an option' do
it 'delegates methods to options keys' do
expect(options.foo.bar).to eq('baz')
end
it 'uses methods to change options values' do
expect { options.foo = 1 }
.to change { options.foo }
.to(1)
end
end
context 'when method is not an option' do
it 'delegates the method to the internal options hash' do
expect { options.foo.delete('bar') }
.to change { options.to_hash }
.to({ 'foo' => {} })
end
end
context 'when method is not an option and does not exist in hash' do
it 'raises GitlabSettings::MissingSetting' do
expect { options.anything }
.to raise_error(
::GitlabSettings::MissingSetting,
"option 'anything' not defined"
)
end
end
end
end

View File

@ -0,0 +1,53 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe GitlabSettings::Settings, :aggregate_failures, feature_category: :shared do
let(:config) do
{
section1: {
config1: {
value1: 1
}
}
}
end
let(:source) { Tempfile.new('config.yaml') }
before do
File.write(source, config.to_yaml)
end
subject(:settings) { described_class.new(source.path, 'section1') }
it 'requires a source' do
expect { described_class.new('', '') }
.to raise_error(ArgumentError, 'config source is required')
end
it 'requires a section' do
expect { described_class.new(source, '') }
.to raise_error(ArgumentError, 'config section is required')
end
it 'loads the given section config' do
expect(settings.config1.value1).to eq(1)
end
describe '#reload!' do
it 'reloads the config' do
expect(settings.config1.value1).to eq(1)
File.write(source, { section1: { config1: { value1: 2 } } }.to_yaml)
# config doesn't change when source changes
expect(settings.config1.value1).to eq(1)
settings.reload!
# config changes after reload! if source changed
expect(settings.config1.value1).to eq(2)
end
end
end

View File

@ -99,7 +99,7 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
context 'for Golang server' do
let(:log_enabled) { false }
let(:settings) do
Settingslogic.new(
GitlabSettings::Options.build(
{
'web_exporter' => {
'enabled' => true,
@ -304,7 +304,7 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
end
context 'for sidekiq' do
let(:settings) { Settingslogic.new({ "sidekiq_exporter" => { "enabled" => true } }) }
let(:settings) { GitlabSettings::Options.build({ "sidekiq_exporter" => { "enabled" => true } }) }
before do
allow(::Settings).to receive(:monitoring).and_return(settings)

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe EnsureAwardEmojiBigintBackfillIsFinishedForGitlabDotCom, feature_category: :database do
describe '#up' do
let(:migration_arguments) do
{
job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
table_name: 'award_emoji',
column_name: 'id',
job_arguments: [['awardable_id'], ['awardable_id_convert_to_bigint']]
}
end
it 'ensures the migration is completed for GitLab.com, dev, or test' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
expect(instance).to receive(:ensure_batched_background_migration_is_finished).with(migration_arguments)
end
migrate!
end
it 'skips the check for other instances' do
expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
expect(instance).not_to receive(:ensure_batched_background_migration_is_finished)
end
migrate!
end
end
end

View File

@ -0,0 +1,67 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe SwapAwardEmojiNoteIdToBigintForGitlabDotCom, feature_category: :database do
describe '#up' do
before do
# A we call `schema_migrate_down!` before each example, and for this migration
# `#down` is same as `#up`, we need to ensure we start from the expected state.
connection = described_class.new.connection
connection.execute('ALTER TABLE award_emoji ALTER COLUMN awardable_id TYPE integer')
connection.execute('ALTER TABLE award_emoji ALTER COLUMN awardable_id_convert_to_bigint TYPE bigint')
end
# rubocop: disable RSpec/AnyInstanceOf
it 'swaps the integer and bigint columns for GitLab.com, dev, or test' do
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(true)
award_emoji = table(:award_emoji)
disable_migrations_output do
reversible_migration do |migration|
migration.before -> {
award_emoji.reset_column_information
expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('integer')
expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type).to eq('bigint')
}
migration.after -> {
award_emoji.reset_column_information
expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('bigint')
expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type)
.to eq('integer')
}
end
end
end
it 'is a no-op for other instances' do
allow_any_instance_of(described_class).to receive(:com_or_dev_or_test_but_not_jh?).and_return(false)
award_emoji = table(:award_emoji)
disable_migrations_output do
reversible_migration do |migration|
migration.before -> {
award_emoji.reset_column_information
expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('integer')
expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type).to eq('bigint')
}
migration.after -> {
award_emoji.reset_column_information
expect(award_emoji.columns.find { |c| c.name == 'awardable_id' }.sql_type).to eq('integer')
expect(award_emoji.columns.find { |c| c.name == 'awardable_id_convert_to_bigint' }.sql_type).to eq('bigint')
}
end
end
end
# rubocop: enable RSpec/AnyInstanceOf
end
end

View File

@ -73,7 +73,7 @@ RSpec.describe InstanceConfiguration do
it 'returns Settings.pages' do
gitlab_pages.delete(:ip_address)
expect(gitlab_pages).to eq(Settings.pages.symbolize_keys)
expect(gitlab_pages).to eq(Settings.pages.to_hash.deep_symbolize_keys)
end
it 'returns the GitLab\'s pages host ip address' do

View File

@ -102,7 +102,7 @@ module StubConfiguration
messages[storage_name] = Gitlab::GitalyClient::StorageSettings.new(storage_hash.to_h)
end
allow(Gitlab.config.repositories).to receive(:storages).and_return(Settingslogic.new(messages))
allow(Gitlab.config.repositories).to receive(:storages).and_return(::GitlabSettings::Options.build(messages))
end
def stub_sentry_settings(enabled: true)
@ -175,11 +175,11 @@ module StubConfiguration
end
end
# Support nested hashes by converting all values into Settingslogic objects
# Support nested hashes by converting all values into GitlabSettings::Objects objects
def to_settings(hash)
hash.transform_values do |value|
if value.is_a? Hash
Settingslogic.new(value.to_h.deep_stringify_keys)
::GitlabSettings::Options.build(value)
else
value
end

View File

@ -17,7 +17,7 @@ module StubObjectStorage
direct_upload: false,
cdn: {}
)
old_config = Settingslogic.new(config.to_h.deep_stringify_keys)
old_config = ::GitlabSettings::Options.build(config.to_h.deep_stringify_keys)
new_config = config.to_h.deep_symbolize_keys.merge({
enabled: enabled,
proxy_download: proxy_download,
@ -32,7 +32,7 @@ module StubObjectStorage
allow(config).to receive(:proxy_download) { proxy_download }
allow(config).to receive(:direct_upload) { direct_upload }
uploader_config = Settingslogic.new(new_config.to_h.deep_stringify_keys)
uploader_config = ::GitlabSettings::Options.build(new_config.to_h.deep_stringify_keys)
allow(uploader).to receive(:object_store_options).and_return(uploader_config)
allow(uploader.options).to receive(:object_store).and_return(uploader_config)

View File

@ -41,7 +41,7 @@ RSpec.describe ObjectStorage::CDN, feature_category: :build_artifacts do
before do
stub_artifacts_object_storage(enabled: true)
options = Settingslogic.new(Gitlab.config.uploads.deep_merge(cdn_options))
options = Gitlab.config.uploads.deep_merge(cdn_options)
allow(uploader_class).to receive(:options).and_return(options)
end

View File

@ -446,7 +446,7 @@ RSpec.describe ObjectStorage do
end
describe '#fog_credentials' do
let(:connection) { Settingslogic.new("provider" => "AWS") }
let(:connection) { GitlabSettings::Options.build("provider" => "AWS") }
before do
allow(uploader_class).to receive(:options) do
@ -479,7 +479,7 @@ RSpec.describe ObjectStorage do
}
end
let(:options) { Settingslogic.new(raw_options) }
let(:options) { GitlabSettings::Options.build(raw_options) }
before do
allow(uploader_class).to receive(:options) do