Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-09-19 21:13:17 +00:00
parent 87ce80bead
commit 9adb0b9ffb
56 changed files with 1146 additions and 1204 deletions

View File

@ -2,12 +2,14 @@
# project here: https://gitlab.com/gitlab-org/gitlab/-/project_members
# As described in https://docs.gitlab.com/ee/user/project/code_owners.html
* @gitlab-org/maintainers/rails-backend @gitlab-org/maintainers/frontend @gitlab-org/maintainers/database @gl-quality/qe-maintainers @gl-quality/tooling-maintainers @gitlab-org/delivery @gitlab-org/maintainers/cicd-templates @nolith @gitlab-org/tw-leadership
# In order to support release automation, each required approval group MUST
# include @gitlab-bot
* @gitlab-bot @gitlab-org/maintainers/rails-backend @gitlab-org/maintainers/frontend @gitlab-org/maintainers/database @gl-quality/qe-maintainers @gl-quality/tooling-maintainers @gitlab-org/delivery @gitlab-org/maintainers/cicd-templates @nolith @gitlab-org/tw-leadership
.gitlab/CODEOWNERS @gitlab-org/development-leaders @gitlab-org/tw-leadership
.gitlab/CODEOWNERS @gitlab-bot @gitlab-org/development-leaders @gitlab-org/tw-leadership
## Allows release tooling to update the Gitaly Version
GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend @gitlab-org/delivery
GITALY_SERVER_VERSION @gitlab-bot @project_278964_bot6 @gitlab-org/maintainers/rails-backend @gitlab-org/delivery
## Files that are excluded from required approval
## These rules override the * rule above, so that changes to docs and templates
@ -23,13 +25,13 @@ GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend
## Technical writing files that do not need `*` rule approval,
## but still require an approval from a TW team DRI for each file.
/data/deprecations/templates/ @marcel.amirault @sarahgerman @gitlab-org/tw-leadership
.markdownlint.yml @marcel.amirault @eread @aqualls @gitlab-org/tw-leadership
/doc/.markdownlint/ @marcel.amirault @eread @aqualls @gitlab-org/tw-leadership
/doc/.vale/ @marcel.amirault @eread @aqualls @gitlab-org/tw-leadership
/lib/tasks/gitlab/tw/codeowners.rake @aqualls @gitlab-org/tw-leadership
/scripts/lint-doc.sh @marcel.amirault @eread @axil @sarahgerman @gitlab-org/tw-leadership
/scripts/lint-docs-metadata.sh @marcel.amirault @eread @axil @sarahgerman @gitlab-org/tw-leadership
/data/deprecations/templates/ @gitlab-bot @marcel.amirault @sarahgerman @gitlab-org/tw-leadership
.markdownlint.yml @gitlab-bot @marcel.amirault @eread @aqualls @gitlab-org/tw-leadership
/doc/.markdownlint/ @gitlab-bot @marcel.amirault @eread @aqualls @gitlab-org/tw-leadership
/doc/.vale/ @gitlab-bot @marcel.amirault @eread @aqualls @gitlab-org/tw-leadership
/lib/tasks/gitlab/tw/codeowners.rake @gitlab-bot @aqualls @gitlab-org/tw-leadership
/scripts/lint-doc.sh @gitlab-bot @marcel.amirault @eread @axil @sarahgerman @gitlab-org/tw-leadership
/scripts/lint-docs-metadata.sh @gitlab-bot @marcel.amirault @eread @axil @sarahgerman @gitlab-org/tw-leadership
^[Source code editing]
.solargraph.yml.example @igor.drozdov
@ -48,13 +50,13 @@ GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend
/spec/frontend_integration/
/ee/spec/frontend_integration/
[Clickhouse] @gitlab-org/maintainers/clickhouse
[Clickhouse] @gitlab-bot @gitlab-org/maintainers/clickhouse
/db/click_house/
/ee/db/click_house/
/**/click(_|-)?house/
## We list db/ subfolders explicitly as we don't want to match Clickhouse files
[Database] @gitlab-org/maintainers/database
[Database] @gitlab-bot @gitlab-org/maintainers/database
/db/database_connections/
/ee/db/database_connections/
/db/docs/
@ -79,31 +81,31 @@ GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend
/ee/app/finders/
/rubocop/rubocop-migrations.yml
[Pipeline configuration] @gl-quality/eng-prod
[Pipeline configuration] @gitlab-bot @gl-quality/eng-prod
/.gitlab-ci.yml
/.gitlab/ci/
/.gitlab/ci/docs.gitlab-ci.yml @gl-quality/eng-prod @gl-docsteam
/.gitlab/ci/frontend.gitlab-ci.yml @gl-quality/eng-prod @gitlab-org/maintainers/frontend
/.gitlab/ci/package-and-test/ @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/qa.gitlab-ci.yml @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/qa-common/ @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/releases.gitlab-ci.yml @gl-quality/eng-prod @gitlab-org/delivery
/.gitlab/ci/reports.gitlab-ci.yml @gl-quality/eng-prod @gitlab-com/gl-security/appsec
/.gitlab/ci/review-apps/qa.gitlab-ci.yml @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/test-on-gdk/ @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/docs.gitlab-ci.yml @gitlab-bot @gl-quality/eng-prod @gl-docsteam
/.gitlab/ci/frontend.gitlab-ci.yml @gitlab-bot @gl-quality/eng-prod @gitlab-org/maintainers/frontend
/.gitlab/ci/package-and-test/ @gitlab-bot @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/qa.gitlab-ci.yml @gitlab-bot @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/qa-common/ @gitlab-bot @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/releases.gitlab-ci.yml @gitlab-bot @gl-quality/eng-prod @gitlab-org/delivery
/.gitlab/ci/reports.gitlab-ci.yml @gitlab-bot @gl-quality/eng-prod @gitlab-com/gl-security/appsec
/.gitlab/ci/review-apps/qa.gitlab-ci.yml @gitlab-bot @gl-quality/eng-prod @gl-quality/qe-maintainers
/.gitlab/ci/test-on-gdk/ @gitlab-bot @gl-quality/eng-prod @gl-quality/qe-maintainers
/gems/gem.gitlab-ci.yml
[Tooling] @gl-quality/eng-prod
[Tooling] @gitlab-bot @gl-quality/eng-prod
Dangerfile
/danger/
/tooling/danger/
/scripts/
/scripts/**/*.rb @gl-quality/eng-prod @gitlab-org/maintainers/rails-backend
/scripts/**/glfm/**/* @gl-quality/eng-prod @gitlab-org/plan-stage/backend-engineers
/scripts/**/*.js @gl-quality/eng-prod @gitlab-org/maintainers/frontend
/scripts/frontend/ @gl-quality/eng-prod @gitlab-org/maintainers/frontend
/scripts/remote_development/ @gitlab-org/maintainers/remote-development/backend
/scripts/review_apps/seed-dast-test-data.sh @gl-quality/eng-prod @dappelt @ngeorge1
/scripts/**/*.rb @gitlab-bot @gl-quality/eng-prod @gitlab-org/maintainers/rails-backend
/scripts/**/glfm/**/* @gitlab-bot @gl-quality/eng-prod @gitlab-org/plan-stage/backend-engineers
/scripts/**/*.js @gitlab-bot @gl-quality/eng-prod @gitlab-org/maintainers/frontend
/scripts/frontend/ @gitlab-bot @gl-quality/eng-prod @gitlab-org/maintainers/frontend
/scripts/remote_development/ @gitlab-bot @gitlab-org/maintainers/remote-development/backend
/scripts/review_apps/seed-dast-test-data.sh @gitlab-bot @gl-quality/eng-prod @dappelt @ngeorge1
/.codeclimate.yml
/.dockerignore
/.editorconfig
@ -415,7 +417,7 @@ Dangerfile
^[Workhorse] @gitlab-org/maintainers/gitlab-workhorse
/workhorse/
[Application Security] @gitlab-com/gl-security/appsec
[Application Security] @gitlab-bot @gitlab-com/gl-security/appsec
/app/assets/javascripts/lib/dompurify.js
/app/assets/javascripts/gfm_auto_complete.js
/ee/app/assets/javascripts/gfm_auto_complete.js
@ -1024,7 +1026,7 @@ lib/gitlab/checks/**
/doc/user/workspace/ @ashrafkhamis
# End rake-managed-docs-block
[Authentication and Authorization] @gitlab-org/manage/authentication-and-authorization/approvers
[Authentication and Authorization] @gitlab-bot @gitlab-org/manage/authentication-and-authorization/approvers
/app/assets/javascripts/access_tokens/
/app/assets/javascripts/alerts_settings/graphql/mutations/reset_http_token.mutation.graphql
/app/assets/javascripts/authentication/
@ -1270,7 +1272,7 @@ lib/gitlab/checks/**
/lib/tasks/gitlab/password.rake
/lib/tasks/tokens.rake
[Verify] @gitlab-org/maintainers/cicd-verify @shinya.maeda @stanhu @ayufan
[Verify] @gitlab-bot @gitlab-org/maintainers/cicd-verify @shinya.maeda @stanhu @ayufan
# With these catch-all rules we will require backend approval and use it as an
# opportunity to refine specific rules defined in this section.
# Note that frontend, CI templates and other concerns should be kept within
@ -1341,93 +1343,93 @@ lib/gitlab/checks/**
/ee/lib/ee/api/entities/merge_train.rb
# Overrides for Verify. These files below require approval from teams outside Verify.
/**/lib/**/ci/reports/**/ @gitlab-org/maintainers/rails-backend
/**/lib/**/ci/parsers/**/ @gitlab-org/maintainers/rails-backend
/ee/lib/gitlab/ci/parsers/license_compliance/ @gitlab-org/secure/composition-analysis-be
/ee/lib/gitlab/ci/parsers/security/ @gitlab-org/govern/threat-insights-backend-team
/ee/lib/gitlab/ci/reports/coverage_fuzzing/ @gitlab-org/secure/fuzzing-be
/ee/lib/gitlab/ci/reports/dependency_list/ @gitlab-org/secure/composition-analysis-be
/ee/lib/gitlab/ci/reports/license_scanning/ @gitlab-org/secure/composition-analysis-be
/ee/lib/gitlab/ci/reports/security/ @gitlab-org/govern/threat-insights-backend-team
/**/lib/**/ci/reports/**/ @gitlab-bot @gitlab-org/maintainers/rails-backend
/**/lib/**/ci/parsers/**/ @gitlab-bot @gitlab-org/maintainers/rails-backend
/ee/lib/gitlab/ci/parsers/license_compliance/ @gitlab-bot @gitlab-org/secure/composition-analysis-be
/ee/lib/gitlab/ci/parsers/security/ @gitlab-bot @gitlab-org/govern/threat-insights-backend-team
/ee/lib/gitlab/ci/reports/coverage_fuzzing/ @gitlab-bot @gitlab-org/secure/fuzzing-be
/ee/lib/gitlab/ci/reports/dependency_list/ @gitlab-bot @gitlab-org/secure/composition-analysis-be
/ee/lib/gitlab/ci/reports/license_scanning/ @gitlab-bot @gitlab-org/secure/composition-analysis-be
/ee/lib/gitlab/ci/reports/security/ @gitlab-bot @gitlab-org/govern/threat-insights-backend-team
# Verify frontend
/**/javascripts/ci/ @gitlab-org/ci-cd/verify/frontend
/**/javascripts/token_access/ @gitlab-org/ci-cd/verify/frontend
/**/javascripts/admin/application_settings/runner_token_expiration/ @gitlab-org/ci-cd/verify/frontend
/**/javascripts/usage_quotas/pipelines/ @gitlab-org/ci-cd/verify/frontend @sheldonled @aalakkad @kpalchyk
/**/javascripts/ci/ @gitlab-bot @gitlab-org/ci-cd/verify/frontend
/**/javascripts/token_access/ @gitlab-bot @gitlab-org/ci-cd/verify/frontend
/**/javascripts/admin/application_settings/runner_token_expiration/ @gitlab-bot @gitlab-org/ci-cd/verify/frontend
/**/javascripts/usage_quotas/pipelines/ @gitlab-bot @gitlab-org/ci-cd/verify/frontend @sheldonled @aalakkad @kpalchyk
## Verify:Runner Fleet Backend
/app/controllers/admin/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/concerns/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/groups/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/projects/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/finders/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/mutations/ci/runner/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/resolvers/ci/*_runners_resolver.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/resolvers/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/types/ci/runner_*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/types/namespace/shared_runners_setting_enum.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/types/permission_types/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/ci/build_runner_session.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/concerns/ci/has_runner_executor.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/concerns/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/preloaders/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/policies/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/presenters/ci/runner_*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/serializers/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/services/groups/update_shared_runners_service.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/services/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/workers/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/db/docs/ci_runner*.yml @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/controllers/ee/admin/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/controllers/ee/groups/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/graphql/ee/mutations/ci/runner/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/graphql/ee/types/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/graphql/resolvers/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/models/ee/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/policies/ee/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/services/audit_events/*runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/services/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/services/ee/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/workers/ci/runners/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/ci/helpers/runner.rb @gitlab-org/maintainers/cicd-verify @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/ci/runner.rb @gitlab-org/maintainers/cicd-verify @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/entities/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/gitlab/audit/ci_runner_token_author.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/gitlab/ci/runner*.rb @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/gitlab/seeders/ci/runner/ @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/tasks/gitlab/seed/runner_fleet.rake @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/admin/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/concerns/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/groups/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/projects/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/controllers/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/finders/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/mutations/ci/runner/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/resolvers/ci/*_runners_resolver.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/resolvers/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/types/ci/runner_*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/types/namespace/shared_runners_setting_enum.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/graphql/types/permission_types/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/ci/build_runner_session.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/concerns/ci/has_runner_executor.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/concerns/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/models/preloaders/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/policies/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/presenters/ci/runner_*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/serializers/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/services/groups/update_shared_runners_service.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/services/ci/runners/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/app/workers/ci/runners/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/db/docs/ci_runner*.yml @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/controllers/ee/admin/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/controllers/ee/groups/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/graphql/ee/mutations/ci/runner/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/graphql/ee/types/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/graphql/resolvers/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/models/ee/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/policies/ee/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/services/audit_events/*runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/services/ci/runners/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/services/ee/ci/runners/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/ee/app/workers/ci/runners/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/ci/helpers/runner.rb @gitlab-bot @gitlab-org/maintainers/cicd-verify @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/ci/runner.rb @gitlab-bot @gitlab-org/maintainers/cicd-verify @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/api/entities/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/gitlab/audit/ci_runner_token_author.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/gitlab/ci/runner*.rb @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/gitlab/seeders/ci/runner/ @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
/lib/tasks/gitlab/seed/runner_fleet.rake @gitlab-bot @gitlab-org/ci-cd/runner-fleet-team/backend-approvers
# CI/CD templates require approval from specific owners.
/lib/gitlab/ci/templates/ @gitlab-org/maintainers/cicd-templates
/lib/gitlab/ci/templates/Security/ @gonzoyumo @twoodham @amarpatel
/lib/gitlab/ci/templates/Security/API-Fuzzing.*.yml @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/Security/Container-Scanning.*.yml @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.*.yml @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/Security/DAST.*.yml @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/Security/Dependency-Scanning.*.yml @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Security/License-Scanning.*.yml @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Security/SAST.*.yml @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Security/Secret-Detection.*.yml @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Security/Secure-Binaries.*.yml @gitlab-org/secure/static-analysis @gitlab-org/secure/composition-analysis-be @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/ @gitlab-bot @gitlab-org/maintainers/cicd-templates
/lib/gitlab/ci/templates/Security/ @gitlab-bot @gonzoyumo @twoodham @amarpatel
/lib/gitlab/ci/templates/Security/API-Fuzzing.*.yml @gitlab-bot @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/Security/Container-Scanning.*.yml @gitlab-bot @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Security/Coverage-Fuzzing.*.yml @gitlab-bot @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/Security/DAST.*.yml @gitlab-bot @gitlab-org/secure/dynamic-analysis
/lib/gitlab/ci/templates/Security/Dependency-Scanning.*.yml @gitlab-bot @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Security/License-Scanning.*.yml @gitlab-bot @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Security/SAST.*.yml @gitlab-bot @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Security/Secret-Detection.*.yml @gitlab-bot @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Security/Secure-Binaries.*.yml @gitlab-bot @gitlab-org/secure/static-analysis @gitlab-org/secure/composition-analysis-be @gitlab-org/secure/dynamic-analysis
# Note: The `Fortify-FoD-sast.gitlab-ci.yml` template is provided and maintained by Fortify, an official Technology Partner with GitLab.
/lib/gitlab/ci/templates/Jobs/Container-Scanning.*.yml @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.*.yml @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Jobs/License-Scanning.*.yml @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Jobs/SAST.*.yml @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Jobs/Secret-Detection.*.yml @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Jobs/Container-Scanning.*.yml @gitlab-bot @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.*.yml @gitlab-bot @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Jobs/License-Scanning.*.yml @gitlab-bot @gitlab-org/secure/composition-analysis-be
/lib/gitlab/ci/templates/Jobs/SAST.*.yml @gitlab-bot @gitlab-org/secure/static-analysis
/lib/gitlab/ci/templates/Jobs/Secret-Detection.*.yml @gitlab-bot @gitlab-org/secure/static-analysis
[Data Stores::Tenant Scale] @abdwdd @alexpooley @manojmj
[Data Stores::Tenant Scale] @gitlab-bot @abdwdd @alexpooley @manojmj
lib/api/entities/basic_project_details.rb
lib/api/entities/project_with_access.rb
lib/api/entities/project_identity.rb
lib/api/entities/project.rb
ee/lib/ee/api/entities/project.rb
[Compliance] @gitlab-org/govern/compliance
[Compliance] @gitlab-bot @gitlab-org/govern/compliance
/app/services/audit_events/build_service.rb
/ee/app/services/ee/audit_events/build_service.rb
/ee/spec/services/audit_events/custom_audit_event_service_spec.rb
@ -1490,21 +1492,22 @@ ee/lib/ee/api/entities/project.rb
/app/workers/releases/create_evidence_worker.rb
/app/workers/releases/manage_evidence_worker.rb
^[Fulfillment::Utilization] @sheldonled @aalakkad @kpalchyk
^[Fulfillment::Utilization] @gitlab-bot @sheldonled @aalakkad @kpalchyk
/ee/app/assets/javascripts/usage_quotas/components/
/ee/app/assets/javascripts/usage_quotas/seats/
/ee/app/assets/javascripts/usage_quotas/storage/
[Manage::Foundations] @gitlab-org/manage/foundations/engineering
[Manage::Foundations] @gitlab-bot @gitlab-org/manage/foundations/engineering
/lib/sidebars/
/ee/lib/sidebars/
[Global Search] @gitlab-org/search-team/migration-maintainers
[Global Search] @gitlab-bot @gitlab-org/search-team/migration-maintainers
/ee/elastic/migrate/
/ee/spec/elastic/migrate/
/ee/spec/support/elastic.rb
[Create::IDE - Remote Development Backend] @gitlab-org/maintainers/remote-development/backend
[Create::IDE - Remote Development Backend] @gitlab-bot @gitlab-org/maintainers/remote-development/backend
/ee/app/models/remote_development/
/ee/app/policies/remote_development/
/ee/app/finders/remote_development/
@ -1519,7 +1522,7 @@ ee/lib/ee/api/entities/project.rb
/ee/spec/policies/remote_development/
/ee/spec/requests/api/graphql/mutations/remote_development/
/ee/spec/requests/api/graphql/remote_development/
/ee/spec/features/remote_development/ @gitlab-org/maintainers/remote-development/backend @gitlab-org/maintainers/remote-development/frontend
/ee/spec/features/remote_development/ @gitlab-bot @gitlab-org/maintainers/remote-development/backend @gitlab-org/maintainers/remote-development/frontend
/ee/spec/finders/remote_development/
/ee/spec/support/shared_contexts/remote_development/
/ee/spec/graphql/types/remote_development/
@ -1528,9 +1531,9 @@ ee/lib/ee/api/entities/project.rb
/ee/spec/fixtures/remote_development/
/ee/spec/controllers/remote_development/
/ee/spec/services/remote_development/
/qa/qa/specs/features/**/remote_development/ @gitlab-org/maintainers/remote-development/backend @gl-quality/qe-maintainers
/qa/qa/specs/features/**/remote_development/ @gitlab-bot @gitlab-org/maintainers/remote-development/backend @gl-quality/qe-maintainers
[Create::IDE - Remote Development Frontend] @gitlab-org/maintainers/remote-development/frontend
[Create::IDE - Remote Development Frontend] @gitlab-bot @gitlab-org/maintainers/remote-development/frontend
/ee/app/assets/remote_development/
/ee/app/assets/**/remote_development/
/ee/app/views/remote_development/

View File

@ -7,13 +7,6 @@ import { __, sprintf } from '~/locale';
import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
/**
* Renders header component for job and pipeline page based on UI mockups
*
* Used in:
* - job show page
* - pipeline show page
*/
export default {
components: {
CiBadgeLink,
@ -33,33 +26,21 @@ export default {
type: Object,
required: true,
},
itemName: {
name: {
type: String,
required: true,
},
itemId: {
type: String,
required: false,
default: '',
},
time: {
type: String,
required: true,
},
user: {
type: Object,
required: false,
default: () => ({}),
},
hasSidebarButton: {
type: Boolean,
required: false,
default: false,
required: true,
},
shouldRenderTriggeredLabel: {
type: Boolean,
required: false,
default: true,
required: true,
},
},
@ -92,13 +73,6 @@ export default {
message() {
return this.user?.status?.message;
},
item() {
if (this.itemId) {
return `${this.itemName} #${this.itemId}`;
}
return this.itemName;
},
userId() {
return isGid(this.user?.id) ? getIdFromGraphQLId(this.user?.id) : this.user?.id;
},
@ -121,7 +95,7 @@ export default {
<section class="header-main-content gl-mr-3">
<ci-badge-link class="gl-mr-3" :status="status" />
<strong data-testid="job-header-item-text">{{ item }}</strong>
<strong data-testid="job-name">{{ name }}</strong>
<template v-if="shouldRenderTriggeredLabel">{{ __('started') }}</template>
<template v-else>{{ __('created') }}</template>
@ -165,7 +139,6 @@ export default {
<slot></slot>
</section>
<gl-button
v-if="hasSidebarButton"
class="gl-md-display-none gl-ml-auto gl-align-self-start js-sidebar-build-toggle"
icon="chevron-double-lg-left"
:aria-label="__('Toggle sidebar')"

View File

@ -129,7 +129,7 @@ export default {
return Boolean(this.job.retry_path);
},
itemName() {
jobName() {
return sprintf(__('Job %{jobName}'), { jobName: this.job.name });
},
},
@ -229,9 +229,8 @@ export default {
:status="job.status"
:time="headerTime"
:user="job.user"
:has-sidebar-button="true"
:should-render-triggered-label="shouldRenderTriggeredLabel"
:item-name="itemName"
:name="jobName"
@clickedSidebarButton="toggleSidebar"
/>
</div>

View File

@ -23,7 +23,7 @@ module IssuableCollections
end
def set_pagination
row_count = finder.row_count
row_count = request.format.atom? ? -1 : finder.row_count
@issuables = @issuables.page(params[:page])
@issuables = per_page_for_relative_position if params[:sort] == 'relative_position'

View File

@ -5,17 +5,24 @@ module Types
module Ci
class JobTraceType < BaseObject
graphql_name 'CiJobTrace'
MAX_SIZE_KB = 16
MAX_SIZE_B = MAX_SIZE_KB * 1024
field :html_summary, GraphQL::Types::String, null: false,
alpha: { milestone: '15.11' },
description: 'HTML summary that contains the tail lines of the trace.' do
description: 'HTML summary that contains the tail lines of the trace. ' \
"Returns at most #{MAX_SIZE_KB}KB of raw bytes from the trace. " \
'The returned string might start with an unexpected invalid UTF-8 code point due to truncation.' do
argument :last_lines, Integer,
required: false, default_value: 10,
description: 'Number of tail lines to return, up to a maximum of 100 lines.'
end
def html_summary(last_lines:)
object.html(last_lines: last_lines.clamp(1, 100)).html_safe
object.html(
last_lines: last_lines.clamp(1, 100),
max_size: Feature.enabled?(:graphql_job_trace_html_summary_max_size) ? MAX_SIZE_B : nil
).html_safe
end
end
end

View File

@ -196,6 +196,10 @@ class BulkImports::Entity < ApplicationRecord
update!(has_failures: true)
end
def source_version
@source_version ||= bulk_import.source_version_info
end
private
def validate_parent_is_a_group

View File

@ -35,7 +35,7 @@ module Pages
{
type: 'zip',
path: deployment.file.url_or_file_path(
expire_at: ::Gitlab::Pages::CacheControl::DEPLOYMENT_EXPIRATION.from_now
expire_at: ::Gitlab::Pages::DEPLOYMENT_EXPIRATION.from_now
),
global_id: global_id,
sha256: deployment.file_sha256,

View File

@ -2,9 +2,8 @@
module Pages
class VirtualDomain
def initialize(projects:, cache: nil, trim_prefix: nil, domain: nil)
def initialize(projects:, trim_prefix: nil, domain: nil)
@projects = projects
@cache = cache
@trim_prefix = trim_prefix
@domain = domain
end
@ -25,14 +24,9 @@ module Pages
.reverse
end
# cache_key is required by #present_cached in ::API::Internal::Pages
def cache_key
@cache_key ||= cache&.cache_key
end
private
attr_reader :projects, :trim_prefix, :domain, :cache
attr_reader :projects, :trim_prefix, :domain
def lookup_paths_for(project)
Pages::LookupPath.new(project, trim_prefix: trim_prefix, domain: domain)

View File

@ -1,72 +0,0 @@
# frozen_string_literal: true
module BulkImports
class CreatePipelineTrackersService
def initialize(entity)
@entity = entity
end
def execute!
entity.class.transaction do
entity.pipelines.each do |pipeline|
status = skip_pipeline?(pipeline) ? -2 : 0
entity.trackers.create!(
stage: pipeline[:stage],
pipeline_name: pipeline[:pipeline],
status: status
)
end
end
end
private
attr_reader :entity
def skip_pipeline?(pipeline)
return false unless source_version.valid?
minimum_version, maximum_version = pipeline.values_at(:minimum_source_version, :maximum_source_version)
if minimum_version && non_patch_source_version < Gitlab::VersionInfo.parse(minimum_version)
log_skipped_pipeline(pipeline, minimum_version, maximum_version)
return true
end
if maximum_version && non_patch_source_version > Gitlab::VersionInfo.parse(maximum_version)
log_skipped_pipeline(pipeline, minimum_version, maximum_version)
return true
end
false
end
def source_version
@source_version ||= entity.bulk_import.source_version_info
end
def non_patch_source_version
source_version.without_patch
end
def log_skipped_pipeline(pipeline, minimum_version, maximum_version)
logger.info(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
pipeline_name: pipeline[:pipeline],
minimum_source_version: minimum_version,
maximum_source_version: maximum_version,
source_version: source_version.to_s,
importer: 'gitlab_migration'
)
end
def logger
@logger ||= Gitlab::Import::Logger.build
end
end
end

View File

@ -3369,15 +3369,6 @@
:weight: 1
:idempotent: false
:tags: []
- :name: pages_invalidate_domain_cache
:worker_name: Pages::InvalidateDomainCacheWorker
:feature_category: :pages
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: post_receive
:worker_name: PostReceive
:feature_category: :source_code_management

View File

@ -22,7 +22,7 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
@bulk_import.start! if @bulk_import.created?
created_entities.first(next_batch_size).each do |entity|
BulkImports::CreatePipelineTrackersService.new(entity).execute!
create_tracker(entity)
entity.start!
@ -51,7 +51,7 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
end
def all_entities_failed?
entities.all? { |entity| entity.failed? }
entities.all?(&:failed?)
end
# A new BulkImportWorker job is enqueued to either
@ -72,4 +72,55 @@ class BulkImportWorker # rubocop:disable Scalability/IdempotentWorker
def next_batch_size
[DEFAULT_BATCH_SIZE - started_entities.count, 0].max
end
def create_tracker(entity)
entity.class.transaction do
entity.pipelines.each do |pipeline|
status = skip_pipeline?(pipeline, entity) ? :skipped : :created
entity.trackers.create!(
stage: pipeline[:stage],
pipeline_name: pipeline[:pipeline],
status: BulkImports::Tracker.state_machine.states[status].value
)
end
end
end
def skip_pipeline?(pipeline, entity)
return false unless entity.source_version.valid?
minimum_version, maximum_version = pipeline.values_at(:minimum_source_version, :maximum_source_version)
if source_version_out_of_range?(minimum_version, maximum_version, entity.source_version.without_patch)
log_skipped_pipeline(pipeline, entity, minimum_version, maximum_version)
return true
end
false
end
def source_version_out_of_range?(minimum_version, maximum_version, non_patch_source_version)
(minimum_version && non_patch_source_version < Gitlab::VersionInfo.parse(minimum_version)) ||
(maximum_version && non_patch_source_version > Gitlab::VersionInfo.parse(maximum_version))
end
def log_skipped_pipeline(pipeline, entity, minimum_version, maximum_version)
logger.info(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
pipeline_name: pipeline[:pipeline],
minimum_source_version: minimum_version,
maximum_source_version: maximum_version,
source_version: entity.source_version.to_s,
importer: 'gitlab_migration'
)
end
def logger
@logger ||= Gitlab::Import::Logger.build
end
end

View File

@ -1,37 +0,0 @@
# frozen_string_literal: true
module Pages
class InvalidateDomainCacheWorker
include Gitlab::EventStore::Subscriber
idempotent!
feature_category :pages
def handle_event(event)
domain_ids(event).each do |domain_id|
::Gitlab::Pages::CacheControl
.for_domain(domain_id)
.clear_cache
end
event.data.values_at(
:root_namespace_id,
:old_root_namespace_id,
:new_root_namespace_id
).compact.uniq.each do |namespace_id|
::Gitlab::Pages::CacheControl
.for_namespace(namespace_id)
.clear_cache
end
end
def domain_ids(event)
ids = PagesDomain.ids_for_project(event.data[:project_id])
ids << event.data[:domain_id] if event.data[:domain_id]
ids
end
end
end

View File

@ -0,0 +1,8 @@
---
name: api_keyset_pagination_multi_order
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130019
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/422999
milestone: '16.5'
type: development
group: group::authentication and authorization
default_enabled: false

View File

@ -1,8 +1,7 @@
---
name: cache_pages_domain_api
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/88956
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/364127
milestone: '15.2'
name: graphql_job_trace_html_summary_max_size
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/130984
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/425196
milestone: '16.5'
group: group::pipeline execution
type: development
group: group::editor
default_enabled: false

View File

@ -481,8 +481,6 @@
- 1
- - pages_domain_verification
- 1
- - pages_invalidate_domain_cache
- 1
- - personal_access_tokens
- 1
- - pipeline_background

View File

@ -450,3 +450,19 @@ For more information, see:
- [Having different owners](https://www.postgresql.org/message-id/2039.1177339749@sss.pgh.pa.us).
- Stack Overflow: [Resulting errors](https://stackoverflow.com/questions/4368789/error-must-be-owner-of-language-plpgsql).
### Restoring fails due to Git server hook
While restoring from backup, you can encounter an error when the following are true:
- A Git Server Hook (`custom_hook`) is configured using the method for [GitLab version 15.10 and earlier](../server_hooks.md)
- Your GitLab version is on version 15.11 and later
- You created symlinks to a directory outside of the GitLab-managed locations
The error looks like:
```plaintext
{"level":"fatal","msg":"restore: pipeline: 1 failures encountered:\n - @hashed/path/to/hashed_repository.git (path/to_project): manager: restore custom hooks, \"@hashed/path/to/hashed_repository/<BackupTimestamp>_<GitLabVersion>-ee/001.custom_hooks.tar\": rpc error: code = Internal desc = setting custom hooks: generating prepared vote: walking directory: copying file to hash: read /mnt/gitlab-app/git-data/repositories/+gitaly/tmp/default-repositories.old.<timestamp>.<temporaryfolder>/custom_hooks/compliance-triggers.d: is a directory\n","pid":3256017,"time":"2023-08-10T20:09:44.395Z"}
```
To resolve this, you can update the Git [server hooks](../server_hooks.md) for GitLab version 15.11 and later, and create a new backup.

View File

@ -176,13 +176,15 @@ Running? ... yes
Checking Sidekiq ... Finished
Checking GitLab ...
Checking GitLab App...
Database config exists? ... yes
Database is SQLite ... no
All migrations up? ... yes
GitLab config exists? ... yes
GitLab config outdated? ... no
GitLab config up to date? ... no
Cable config exists? ... yes
Resque config exists? ... yes
Log directory writable? ... yes
Tmp directory writable? ... yes
Init script exists? ... yes

View File

@ -14389,7 +14389,7 @@ CI/CD variables for a GitLab instance.
##### `CiJobTrace.htmlSummary`
HTML summary that contains the tail lines of the trace.
HTML summary that contains the tail lines of the trace. Returns at most 16KB of raw bytes from the trace. The returned string might start with an unexpected invalid UTF-8 code point due to truncation.
WARNING:
**Introduced** in 15.11.

View File

@ -540,6 +540,7 @@ options:
| [Project jobs](../jobs.md#list-project-jobs) | `order_by=id`, `sort=desc` only | Authenticated users only. |
| [Project audit events](../audit_events.md#retrieve-all-project-audit-events) | `order_by=id`, `sort=desc` only | Authenticated users only. |
| [Projects](../projects.md) | `order_by=id` only | Authenticated and unauthenticated users. |
| [Users](../users.md) | `order_by=id`, `order_by=name`, `order_by=username` | Authenticated and unauthenticated users. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/419556) in GitLab 15.4 [with a flag](../../user/feature_flags.md)) named `api_keyset_pagination_multi_order`. Disabled by default. |
### Pagination response headers

View File

@ -58,6 +58,8 @@ GET /users
]
```
This endpoint supports [keyset pagination](rest/index.md#keyset-based-pagination). Keyset pagination [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/419556) in GitLab 16.5 [with a flag](../user/feature_flags.md) named `api_keyset_pagination_multi_order`. Disabled by default.
You can also use `?search=` to search for users by name, username, or public email. For example, `/users?search=John`. When you search for a:
- Public email, you must use the full email address to get an exact match. A search might return a partial match. For example, if you search for the email `on@example.com`, the search can return both `on@example.com` and `jon@example.com`.

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

View File

@ -78,12 +78,12 @@ To use the review apps template:
1. On the left sidebar, select **Search or go to** and
find the project you want to create a review app job for.
1. Select **Build > Environments**.
1. Select **Operate > Environments**.
1. Select **Enable review apps**.
1. Copy the provided code snippet and paste it into your
`.gitlab-ci.yml` file:
![enable review apps modal](img/enable_review_app_v12_8.png)
![enable review apps modal](img/enable_review_app_v16.png)
You can edit this template as needed.

View File

@ -36,16 +36,7 @@ module API
virtual_domain = ::Gitlab::Pages::VirtualHostFinder.new(params[:host]).execute
no_content! unless virtual_domain
if virtual_domain.cache_key.present?
# Cache context is not added to make it easier to expire the cache with
# Gitlab::Pages::CacheControl
present_cached virtual_domain,
cache_context: nil,
with: Entities::Internal::Pages::VirtualDomain,
expires_in: ::Gitlab::Pages::CacheControl::EXPIRE
else
present virtual_domain, with: Entities::Internal::Pages::VirtualDomain
end
present virtual_domain, with: Entities::Internal::Pages::VirtualDomain
end
end
end

View File

@ -141,7 +141,11 @@ module API
users = users.preload(:user_detail)
present paginate(users), options
if Feature.enabled?(:api_keyset_pagination_multi_order)
present paginate_with_strategies(users), options
else
present paginate(users), options
end
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -30,9 +30,9 @@ module Gitlab
@job = job
end
def html(last_lines: nil)
def html(last_lines: nil, max_size: nil)
read do |stream|
stream.html(last_lines: last_lines)
stream.html(last_lines: last_lines, max_size: max_size)
end
end

View File

@ -53,18 +53,20 @@ module Gitlab
append(data, 0)
end
def raw(last_lines: nil)
def raw(last_lines: nil, max_size: nil)
return unless valid?
if last_lines.to_i > 0
if max_size.to_i > 0
read_last_lines_with_max_size(last_lines, max_size)
elsif last_lines.to_i > 0
read_last_lines(last_lines)
else
stream.read
end.force_encoding(Encoding.default_external)
end
def html(last_lines: nil)
text = raw(last_lines: last_lines)
def html(last_lines: nil, max_size: nil)
text = raw(last_lines: last_lines, max_size: max_size)
buffer = StringIO.new(text)
::Gitlab::Ci::Ansi2html.convert(buffer).html
end
@ -117,6 +119,37 @@ module Gitlab
to_enum(:reverse_line).first(limit).reverse.join
end
def read_last_lines_with_max_size(limit, max_size)
linesleft = limit
result = ''
reverse_line_with_max_size(max_size) do |line|
result = line + result
unless linesleft.nil?
linesleft -= 1
break if linesleft <= 0
end
end
result
end
def reverse_line_with_max_size(max_size)
stream.seek(0, IO::SEEK_END)
debris = ''
sizeleft = max_size
until sizeleft <= 0 || (buf = read_backward([BUFFER_SIZE, sizeleft].min)).empty?
sizeleft -= buf.bytesize
debris, *lines = (buf + debris).each_line.to_a
lines.reverse_each do |line|
yield(line.force_encoding(Encoding.default_external))
end
end
yield(debris.force_encoding(Encoding.default_external)) unless debris.empty?
end
def reverse_line
stream.seek(0, IO::SEEK_END)
debris = ''

View File

@ -5,7 +5,7 @@ module Gitlab
class TablesTruncate
GITLAB_SCHEMAS_TO_IGNORE = %i[gitlab_geo gitlab_embedding].freeze
def initialize(database_name:, min_batch_size:, logger: nil, until_table: nil, dry_run: false)
def initialize(database_name:, min_batch_size: 5, logger: nil, until_table: nil, dry_run: false)
@database_name = database_name
@min_batch_size = min_batch_size
@logger = logger
@ -19,19 +19,6 @@ module Gitlab
logger&.info "DRY RUN:" if dry_run
schemas_for_connection = Gitlab::Database.gitlab_schemas_for_connection(connection)
tables_to_truncate = Gitlab::Database::GitlabSchema.tables_to_schema.reject do |_, schema_name|
GITLAB_SCHEMAS_TO_IGNORE.union(schemas_for_connection).include?(schema_name)
end.keys
Gitlab::Database::SharedModel.using_connection(connection) do
Postgresql::DetachedPartition.find_each do |detached_partition|
next if GITLAB_SCHEMAS_TO_IGNORE.union(schemas_for_connection).include?(detached_partition.table_schema)
tables_to_truncate << detached_partition.fully_qualified_table_name
end
end
tables_sorted = Gitlab::Database::TablesSortedByForeignKeys.new(connection, tables_to_truncate).execute
# Checking if all the tables have the write-lock triggers
# to make sure we are deleting the right tables on the right database.
@ -63,10 +50,41 @@ module Gitlab
truncate_tables_in_batches(tables_sorted)
end
def needs_truncation?
return false if single_database_setup?
sql = tables_to_truncate.map { |table_name| "(SELECT EXISTS( SELECT * FROM #{table_name} ))" }.join("\nUNION\n")
result = with_suppressed_query_analyzers do
connection.execute(sql).to_a
end
result.to_a.any? { |row| row['exists'] == true }
end
private
attr_accessor :database_name, :min_batch_size, :logger, :dry_run, :until_table
def tables_to_truncate
@tables_to_truncate ||= begin
schemas_for_connection = Gitlab::Database.gitlab_schemas_for_connection(connection)
tables = Gitlab::Database::GitlabSchema.tables_to_schema.reject do |_, schema_name|
GITLAB_SCHEMAS_TO_IGNORE.union(schemas_for_connection).include?(schema_name)
end.keys
Gitlab::Database::SharedModel.using_connection(connection) do
Postgresql::DetachedPartition.find_each do |detached_partition|
next if GITLAB_SCHEMAS_TO_IGNORE.union(schemas_for_connection).include?(detached_partition.table_schema)
tables << detached_partition.fully_qualified_table_name
end
end
tables
end
end
def connection
@connection ||= Gitlab::Database.database_base_models[database_name].connection
end
@ -133,6 +151,12 @@ module Gitlab
ci_base_model = Gitlab::Database.database_base_models[:ci]
!!Gitlab::Database.db_config_share_with(ci_base_model.connection_db_config)
end
def with_suppressed_query_analyzers(&block)
Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection.with_suppressed do
Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer.with_suppressed(&block)
end
end
end
end
end

View File

@ -36,26 +36,6 @@ module Gitlab
store.subscribe ::MergeRequests::UpdateHeadPipelineWorker, to: ::Ci::PipelineCreatedEvent
store.subscribe ::Namespaces::UpdateRootStatisticsWorker, to: ::Projects::ProjectDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Pages::PageDeployedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Pages::PageDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectCreatedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectArchivedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectTransferedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker,
to: ::Projects::ProjectAttributesChangedEvent,
if: -> (event) { event.pages_related? }
store.subscribe ::Pages::InvalidateDomainCacheWorker,
to: ::Projects::ProjectFeaturesChangedEvent,
if: -> (event) { event.pages_related? }
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupTransferedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupPathChangedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::PagesDomains::PagesDomainDeletedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::PagesDomains::PagesDomainUpdatedEvent
store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::PagesDomains::PagesDomainCreatedEvent
store.subscribe ::MergeRequests::CreateApprovalEventWorker, to: ::MergeRequests::ApprovedEvent
store.subscribe ::MergeRequests::CreateApprovalNoteWorker, to: ::MergeRequests::ApprovedEvent
store.subscribe ::MergeRequests::ResolveTodosAfterApprovalWorker, to: ::MergeRequests::ApprovedEvent

View File

@ -5,6 +5,7 @@ module Gitlab
VERSION = File.read(Rails.root.join("GITLAB_PAGES_VERSION")).strip.freeze
INTERNAL_API_REQUEST_HEADER = 'Gitlab-Pages-Api-Request'
MAX_SIZE = 1.terabyte
DEPLOYMENT_EXPIRATION = 24.hours
include JwtAuthenticatable

View File

@ -38,15 +38,9 @@ module Gitlab
return if namespace.blank?
cache = if Feature.enabled?(:cache_pages_domain_api, namespace)
::Gitlab::Pages::CacheControl.for_namespace(namespace.id)
end
::Pages::VirtualDomain.new(
trim_prefix: namespace.full_path,
projects: namespace.all_projects_with_pages,
cache: cache
)
projects: namespace.all_projects_with_pages)
end
def by_custom_domain(host)
@ -54,15 +48,7 @@ module Gitlab
return unless domain&.pages_deployed?
cache = if Feature.enabled?(:cache_pages_domain_api, domain.project.root_namespace)
::Gitlab::Pages::CacheControl.for_domain(domain.id)
end
::Pages::VirtualDomain.new(
projects: [domain.project],
domain: domain,
cache: cache
)
::Pages::VirtualDomain.new(projects: [domain.project], domain: domain)
end
end
end

View File

@ -10,6 +10,20 @@ module Gitlab
::Packages::BuildInfo => { id: :desc }
}.freeze
SUPPORTED_MULTI_ORDERING = {
Group => { name: [:asc] },
AuditEvent => { id: [:desc] },
User => {
id: [:asc, :desc],
name: [:asc, :desc],
username: [:asc, :desc],
created_at: [:asc, :desc],
updated_at: [:asc, :desc]
},
::Ci::Build => { id: [:desc] },
::Packages::BuildInfo => { id: [:desc] }
}.freeze
# Relation types that are enforced in this list
# enforce the use of keyset pagination, thus erroring out requests
# made with offset pagination above a certain limit.
@ -19,7 +33,11 @@ module Gitlab
ENFORCED_TYPES = [Group].freeze
def self.available_for_type?(relation)
SUPPORTED_ORDERING.key?(relation.klass)
if Feature.enabled?(:api_keyset_pagination_multi_order)
SUPPORTED_MULTI_ORDERING.key?(relation.klass)
else
SUPPORTED_ORDERING.key?(relation.klass)
end
end
def self.available?(cursor_based_request_context, relation)
@ -32,9 +50,16 @@ module Gitlab
end
def self.order_satisfied?(relation, cursor_based_request_context)
order_by_from_request = cursor_based_request_context.order_by
if Feature.enabled?(:api_keyset_pagination_multi_order)
order_by_from_request = cursor_based_request_context.order
sort_from_request = cursor_based_request_context.sort
SUPPORTED_ORDERING[relation.klass] == order_by_from_request
SUPPORTED_MULTI_ORDERING[relation.klass][order_by_from_request]&.include?(sort_from_request)
else
order_by_from_request = cursor_based_request_context.order_by
SUPPORTED_ORDERING[relation.klass] == order_by_from_request
end
end
private_class_method :order_satisfied?
end

View File

@ -32,6 +32,14 @@ module Gitlab
def order_by
{ (params[:order_by]&.to_sym || DEFAULT_SORT_COLUMN) => (params[:sort]&.to_sym || DEFAULT_SORT_DIRECTION) }
end
def order
params[:order_by]&.to_sym || DEFAULT_SORT_COLUMN
end
def sort
params[:sort]&.to_sym || DEFAULT_SORT_DIRECTION
end
end
end
end

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
module SystemCheck
module App
class TableTruncateCheck < SystemCheck::BaseCheck
set_name 'Tables are truncated?'
def skip?
Gitlab::Database.database_mode != Gitlab::Database::MODE_MULTIPLE_DATABASES
end
def check?
@rake_tasks = []
Gitlab::Database.database_base_models_with_gitlab_shared.keys.each_with_object({}) do |database_name, _h|
if Gitlab::Database::TablesTruncate.new(database_name: database_name).needs_truncation?
@rake_tasks << "gitlab:db:truncate_legacy_tables:#{database_name}"
end
end
@rake_tasks.empty?
end
def show_error
try_fixing_it(
sudo_gitlab("bundle exec rake #{@rake_tasks.join(' ')}")
)
for_more_information(
"doc/development/database/multiple_databases.md in section 'Truncating tables'"
)
fix_and_rerun
end
end
end
end

View File

@ -13,6 +13,7 @@ module SystemCheck
def self.checks
[
SystemCheck::App::DatabaseConfigExistsCheck,
SystemCheck::App::TableTruncateCheck,
SystemCheck::App::MigrationsAreUpCheck,
SystemCheck::App::OrphanedGroupMembersCheck,
SystemCheck::App::GitlabConfigExistsCheck,

View File

@ -12367,6 +12367,9 @@ msgstr ""
msgid "ComplianceStandardsAdherence|The following features help satisfy this requirement."
msgstr ""
msgid "ComplianceStandardsAdherence|Unable to load the standards adherence report. Refresh the page and try again."
msgstr ""
msgid "ComplianceStandardsAdherence|Update approval settings in the project's merge request settings to satisfy this requirement."
msgstr ""

View File

@ -1,41 +0,0 @@
# frozen_string_literal: true
module QA
RSpec.describe 'Verify', :runner, product_group: :pipeline_execution do
describe 'Code coverage statistics' do
let(:executor) { "qa-runner-#{Time.now.to_i}" }
let(:runner) { create(:project_runner, name: executor, tags: ['e2e-test']) }
let(:merge_request) do
Resource::MergeRequest.fabricate_via_api! do |mr|
mr.project = runner.project
mr.file_name = '.gitlab-ci.yml'
mr.file_content = <<~EOF
test:
tags: [e2e-test]
coverage: '/\\d+\\.\\d+% covered/'
script:
- echo '66.67% covered'
EOF
end
end
before do
Flow::Login.sign_in
end
after do
runner.remove_via_api!
end
it 'creates an MR with code coverage statistics', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348068' do
merge_request.visit!
Page::MergeRequest::Show.perform do |mr_widget|
mr_widget.has_pipeline_status?('passed')
expect(mr_widget).to have_content('Test coverage 66.67%')
end
end
end
end
end

View File

@ -120,11 +120,11 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
allow(Kaminari.config).to receive(:default_per_page).and_return(1)
end
it 'redirects to last page when out of bounds on non-html requests' do
it 'does not redirect when out of bounds on non-html requests' do
get :index, params: params.merge(page: last_page + 1), format: 'atom'
expect(response).to have_gitlab_http_status(:redirect)
expect(response).to redirect_to(action: 'index', format: 'atom', page: last_page, state: 'opened')
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:issues).size).to eq(0)
end
end

View File

@ -264,8 +264,6 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
before do
forked_project.add_maintainer(user2)
stub_feature_flags(auto_merge_labels_mr_widget: false)
visit project_merge_request_path(project, merge_request)
page.within('.merge-request-tabs') do

View File

@ -958,4 +958,21 @@ RSpec.describe 'Merge request > User sees merge widget', :js, feature_category:
end
end
end
context 'views MR when pipeline has code coverage enabled' do
let!(:pipeline) { create(:ci_pipeline, status: 'success', project: project, ref: merge_request.source_branch) }
let!(:build) { create(:ci_build, :success, :coverage, pipeline: pipeline) }
before do
merge_request.update!(head_pipeline: pipeline)
visit project_merge_request_path(project, merge_request)
end
it 'shows the coverage' do
within '.ci-widget' do
expect(find_by_testid('pipeline-coverage')).to have_content('Test coverage 99.90% ')
end
end
end
end

View File

@ -2,7 +2,9 @@
require 'spec_helper'
RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, feature_category: :code_review_workflow do
RSpec.describe 'Merge request > User sets to auto-merge', :js, feature_category: :code_review_workflow do
include ContentEditorHelpers
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) do
@ -32,22 +34,23 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
context 'when there is active pipeline for merge request' do
before do
create(:ci_build, pipeline: pipeline)
stub_feature_flags(auto_merge_labels_mr_widget: true)
sign_in(user)
visit project_merge_request_path(project, merge_request)
end
describe 'enabling Merge when pipeline succeeds' do
describe 'setting to auto-merge when pipeline succeeds' do
shared_examples 'Set to auto-merge activator' do
it 'activates the Merge when pipeline succeeds feature', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
it 'activates auto-merge feature', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
close_rich_text_promo_popover_if_present
expect(page).to have_content 'Set to auto-merge'
click_button "Set to auto-merge"
wait_for_requests
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds"
expect(page).to have_content "Source branch will not be deleted"
expect(page).to have_selector ".js-cancel-auto-merge"
visit project_merge_request_path(project, merge_request) # Needed to refresh the page
expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i
expect(page).to have_content(/enabled an automatic merge when the pipeline for \h{8} succeeds/i)
end
end
@ -57,6 +60,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
context 'when enabled after it was previously canceled' do
before do
close_rich_text_promo_popover_if_present
click_button "Set to auto-merge"
wait_for_requests
@ -64,14 +68,12 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
click_button "Cancel auto-merge"
wait_for_requests
expect(page).to have_content 'Set to auto-merge'
end
it_behaves_like 'Set to auto-merge activator'
end
context 'when it was enabled and then canceled' do
context 'when it is enabled and then canceled' do
let(:merge_request) do
create(
:merge_request_with_diffs,
@ -94,7 +96,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
end
end
context 'when merge when pipeline succeeds is enabled' do
context 'when there is an active pipeline' do
let(:merge_request) do
create(
:merge_request_with_diffs,
@ -112,12 +114,13 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
end
before do
stub_feature_flags(auto_merge_labels_mr_widget: true)
sign_in user
visit project_merge_request_path(project, merge_request)
end
it 'allows to cancel the automatic merge', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410494' do
it 'allows to cancel the auto-merge', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410055' do
close_rich_text_promo_popover_if_present
click_button "Cancel auto-merge"
expect(page).to have_button "Set to auto-merge"
@ -128,22 +131,13 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, featur
end
end
context 'when pipeline is not active' do
it 'does not allow to enable merge when pipeline succeeds' do
stub_feature_flags(auto_merge_labels_mr_widget: false)
visit project_merge_request_path(project, merge_request)
expect(page).not_to have_link 'Merge when pipeline succeeds'
context 'when there is no active pipeline' do
before do
sign_in user
visit project_merge_request_path(project, merge_request.reload)
end
end
context 'when pipeline is not active and auto_merge_labels_mr_widget on' do
it 'does not allow to enable merge when pipeline succeeds' do
stub_feature_flags(auto_merge_labels_mr_widget: true)
visit project_merge_request_path(project, merge_request)
it 'does not allow to set to auto-merge' do
expect(page).not_to have_link 'Set to auto-merge'
end
end

View File

@ -16,6 +16,7 @@ describe('Header CI Component', () => {
text: 'failed',
details_path: 'path',
},
name: 'Job build_job',
time: '2017-05-08T14:57:39.781Z',
user: {
id: 1234,
@ -25,7 +26,7 @@ describe('Header CI Component', () => {
email: 'foo@bar.com',
avatar_url: 'link',
},
hasSidebarButton: true,
shouldRenderTriggeredLabel: true,
};
const findCiBadgeLink = () => wrapper.findComponent(CiBadgeLink);
@ -34,7 +35,7 @@ describe('Header CI Component', () => {
const findSidebarToggleBtn = () => wrapper.findComponent(GlButton);
const findStatusTooltip = () => wrapper.findComponent(GlTooltip);
const findActionButtons = () => wrapper.findByTestId('job-header-action-buttons');
const findHeaderItemText = () => wrapper.findByTestId('job-header-item-text');
const findJobName = () => wrapper.findByTestId('job-name');
const createComponent = (props, slots) => {
wrapper = extendedWrapper(
@ -50,7 +51,7 @@ describe('Header CI Component', () => {
describe('render', () => {
beforeEach(() => {
createComponent({ itemName: 'Pipeline' });
createComponent();
});
it('should render status badge', () => {
@ -72,7 +73,7 @@ describe('Header CI Component', () => {
describe('user avatar', () => {
beforeEach(() => {
createComponent({ itemName: 'Pipeline' });
createComponent();
});
it('contains the username', () => {
@ -93,7 +94,6 @@ describe('Header CI Component', () => {
beforeEach(() => {
createComponent({
itemName: 'Pipeline',
user: { ...defaultProps.user, status: { message: STATUS_MESSAGE } },
});
});
@ -108,7 +108,6 @@ describe('Header CI Component', () => {
beforeEach(() => {
createComponent({
itemName: 'Pipeline',
user: { ...defaultProps.user, id: `gid://gitlab/User/${1}` },
});
});
@ -125,29 +124,19 @@ describe('Header CI Component', () => {
});
});
describe('with item id', () => {
describe('job name', () => {
beforeEach(() => {
createComponent({ itemName: 'Pipeline', itemId: '123' });
createComponent();
});
it('should render item name and id', () => {
expect(findHeaderItemText().text()).toBe('Pipeline #123');
});
});
describe('without item id', () => {
beforeEach(() => {
createComponent({ itemName: 'Job build_job' });
});
it('should render item name', () => {
expect(findHeaderItemText().text()).toBe('Job build_job');
it('should render the job name', () => {
expect(findJobName().text()).toBe('Job build_job');
});
});
describe('slot', () => {
it('should render header action buttons', () => {
createComponent({ itemName: 'Job build_job' }, { slots: { default: 'Test Actions' } });
createComponent({}, { slots: { default: 'Test Actions' } });
expect(findActionButtons().exists()).toBe(true);
expect(findActionButtons().text()).toBe('Test Actions');
@ -156,7 +145,7 @@ describe('Header CI Component', () => {
describe('shouldRenderTriggeredLabel', () => {
it('should render created keyword when the shouldRenderTriggeredLabel is false', () => {
createComponent({ shouldRenderTriggeredLabel: false, itemName: 'Job build_job' });
createComponent({ shouldRenderTriggeredLabel: false });
expect(wrapper.text()).toContain('created');
expect(wrapper.text()).not.toContain('started');

View File

@ -26,7 +26,7 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
it 'shows the correct trace contents' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10).and_call_original
expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
end
is_expected.to eq('<span>BUILD TRACE</span>')
@ -48,7 +48,7 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
it 'shows the last 10 lines of trace contents' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10).and_call_original
expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
end
is_expected.to eq expected_html_trace_contents(10)
@ -60,7 +60,7 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
it 'shows the last line of trace contents' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 1).and_call_original
expect(trace).to receive(:html).with(last_lines: 1, max_size: 16384).and_call_original
end
is_expected.to eq expected_html_trace_contents(1)
@ -72,7 +72,7 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
it 'shows the correct trace contents' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10).and_call_original
expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
end
is_expected.to eq expected_html_trace_contents(10)
@ -84,12 +84,116 @@ RSpec.describe GitlabSchema.types['CiJobTrace'], feature_category: :continuous_i
it 'shows the last 100 lines of trace contents' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 100).and_call_original
expect(trace).to receive(:html).with(last_lines: 100, max_size: 16384).and_call_original
end
is_expected.to eq expected_html_trace_contents(100)
end
end
end
context 'when trace contains long lines' do
before do
# Creates lines of "aaaaaaaa...aaaaaaaa"
job.trace.set((1..20).map { (1..1024).map { "a" }.join("") }.join("\n"))
end
context 'when last_lines is lower than 16KB' do
let(:args) { {} }
it 'shows the whole lines' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10, max_size: 16384).and_call_original
end
is_expected.to eq "<span>#{(1..10).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
end
end
context 'when last_lines is higher than 16KB' do
let(:args) { { last_lines: 20 } }
it 'shows only the latest byte' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 20, max_size: 16384).and_call_original
end
is_expected.to eq "<span>#{(1..1009).map { 'a' }.join('')}<br/>" \
"#{(1..15).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
end
end
context 'when FF graphql_job_trace_html_summary_max_size is disabled' do
before do
stub_feature_flags(graphql_job_trace_html_summary_max_size: false)
end
let(:args) { { last_lines: 20 } }
it 'does not limit the read size from the raw trace' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 20, max_size: nil).and_call_original
end
is_expected.to eq "<span>#{(1..20).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
end
end
context 'when trace is cut in middle of a line' do
let(:args) { {} }
before do
stub_const('Types::Ci::JobTraceType::MAX_SIZE_B', 1536)
end
it 'shows only the latest byte' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10, max_size: 1536).and_call_original
end
is_expected.to eq "<span>#{(1..511).map { 'a' }.join('')}<br/>#{(1..1024).map { 'a' }.join('')}</span>"
end
end
context 'when trace is cut at end of a line' do
let(:args) { {} }
before do
stub_const('Types::Ci::JobTraceType::MAX_SIZE_B', 2050)
end
it 'shows only the latest byte' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10, max_size: 2050).and_call_original
end
is_expected.to eq "<span><br/>#{(1..2).map { (1..1024).map { 'a' }.join('') }.join('<br/>')}</span>"
end
end
end
context 'when trace contains multi-bytes UTF-8' do
before do
# Creates lines of 4 pound symbol, pound symbol is 2 byte wise in UTF-8
# Append an "a" (1 byte character) at the end to cut in the middle of UTF-8
job.trace.set((1..20).map { (1..4).map { "£" }.join("") }.join("\n"))
end
context 'when cut in the middle of a codepoint' do
before do
stub_const('Types::Ci::JobTraceType::MAX_SIZE_B', 5)
end
let(:args) { {} }
it 'shows a single "invalid utf-8" symbol' do
expect_next_instance_of(Gitlab::Ci::Trace) do |trace|
expect(trace).to receive(:html).with(last_lines: 10, max_size: 5).and_call_original
end
is_expected.to eq "<span><3E>££</span>"
end
end
end
end
end

View File

@ -243,6 +243,56 @@ RSpec.describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
expect(result.encoding).to eq(Encoding.default_external)
end
end
context 'limit max size' do
before do
# specifying BUFFER_SIZE forces to seek backwards
allow(described_class).to receive(:BUFFER_SIZE)
.and_return(2)
end
it 'returns every lines with respect of the size' do
all_lines = lines.join
max_size = all_lines.bytesize.div(2)
result = stream.raw(max_size: max_size)
expect(result.bytes).to eq(all_lines.bytes[-max_size..])
expect(result.lines.count).to be > 1
expect(result.encoding).to eq(Encoding.default_external)
end
it 'returns everything if trying to get too many bytes' do
all_lines = lines.join
result = stream.raw(max_size: all_lines.bytesize * 2)
expect(result).to eq(all_lines)
expect(result.encoding).to eq(Encoding.default_external)
end
end
context 'limit max lines and max size' do
before do
# specifying BUFFER_SIZE forces to seek backwards
allow(described_class).to receive(:BUFFER_SIZE)
.and_return(2)
end
it 'returns max lines if max size is greater' do
result = stream.raw(last_lines: 2, max_size: lines.join.bytesize * 2)
expect(result).to eq(lines.last(2).join)
expect(result.encoding).to eq(Encoding.default_external)
end
it 'returns max size if max lines is greater' do
all_lines = lines.join
max_size = all_lines.bytesize.div(2)
result = stream.raw(last_lines: lines.size * 2, max_size: max_size)
expect(result.bytes).to eq(all_lines.bytes[-max_size..])
expect(result.encoding).to eq(Encoding.default_external)
end
end
end
let(:path) { __FILE__ }

View File

@ -9,6 +9,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
let(:min_batch_size) { 1 }
let(:main_connection) { ApplicationRecord.connection }
let(:ci_connection) { Ci::ApplicationRecord.connection }
let(:logger) { instance_double(Logger) }
# Main Database
let(:main_db_main_item_model) { table("_test_gitlab_main_items", database: "main") }
@ -32,8 +33,123 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
table("gitlab_partitions_dynamic._test_gitlab_hook_logs_202201", database: "ci")
end
before do
skip_if_shared_database(:ci)
# Creating some test tables on the main database
main_tables_sql = <<~SQL
CREATE TABLE _test_gitlab_main_items (id serial NOT NULL PRIMARY KEY);
CREATE TABLE _test_gitlab_main_references (
id serial NOT NULL PRIMARY KEY,
item_id BIGINT NOT NULL,
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
);
CREATE TABLE _test_gitlab_hook_logs (
id bigserial not null,
created_at timestamptz not null,
item_id BIGINT NOT NULL,
PRIMARY KEY (id, created_at),
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
) PARTITION BY RANGE(created_at);
CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201
PARTITION OF _test_gitlab_hook_logs
FOR VALUES FROM ('20220101') TO ('20220131');
CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202
PARTITION OF _test_gitlab_hook_logs
FOR VALUES FROM ('20220201') TO ('20220228');
ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201;
SQL
execute_on_each_database(main_tables_sql)
ci_tables_sql = <<~SQL
CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY);
CREATE TABLE _test_gitlab_ci_references (
id serial NOT NULL PRIMARY KEY,
item_id BIGINT NOT NULL,
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_ci_items(id)
);
SQL
execute_on_each_database(ci_tables_sql)
internal_tables_sql = <<~SQL
CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY);
SQL
execute_on_each_database(internal_tables_sql)
# Filling the tables
5.times do |i|
# Main Database
main_db_main_item_model.create!(id: i)
main_db_main_reference_model.create!(item_id: i)
main_db_ci_item_model.create!(id: i)
main_db_ci_reference_model.create!(item_id: i)
main_db_shared_item_model.create!(id: i)
main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
# CI Database
ci_db_main_item_model.create!(id: i)
ci_db_main_reference_model.create!(item_id: i)
ci_db_ci_item_model.create!(id: i)
ci_db_ci_reference_model.create!(item_id: i)
ci_db_shared_item_model.create!(id: i)
ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
end
Gitlab::Database::SharedModel.using_connection(main_connection) do
Postgresql::DetachedPartition.create!(
table_name: '_test_gitlab_hook_logs_202201',
drop_after: Time.current
)
end
Gitlab::Database::SharedModel.using_connection(ci_connection) do
Postgresql::DetachedPartition.create!(
table_name: '_test_gitlab_hook_logs_202201',
drop_after: Time.current
)
end
allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return(
{
"_test_gitlab_main_items" => :gitlab_main,
"_test_gitlab_main_references" => :gitlab_main,
"_test_gitlab_hook_logs" => :gitlab_main,
"_test_gitlab_ci_items" => :gitlab_ci,
"_test_gitlab_ci_references" => :gitlab_ci,
"_test_gitlab_shared_items" => :gitlab_shared,
"_test_gitlab_geo_items" => :gitlab_geo
}
)
allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return(
{
"_test_gitlab_main_items" => :gitlab_main,
"_test_gitlab_main_references" => :gitlab_main,
"_test_gitlab_hook_logs" => :gitlab_main,
"_test_gitlab_ci_items" => :gitlab_ci,
"_test_gitlab_ci_references" => :gitlab_ci,
"_test_gitlab_shared_items" => :gitlab_shared,
"_test_gitlab_geo_items" => :gitlab_geo,
"detached_partitions" => :gitlab_shared,
"postgres_foreign_keys" => :gitlab_shared,
"postgres_partitions" => :gitlab_shared
}
)
allow(logger).to receive(:info).with(any_args)
end
shared_examples 'truncating legacy tables on a database' do
let(:logger) { instance_double(Logger) }
let(:dry_run) { false }
let(:until_table) { nil }
@ -47,122 +163,6 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
).execute
end
before do
skip_if_shared_database(:ci)
# Creating some test tables on the main database
main_tables_sql = <<~SQL
CREATE TABLE _test_gitlab_main_items (id serial NOT NULL PRIMARY KEY);
CREATE TABLE _test_gitlab_main_references (
id serial NOT NULL PRIMARY KEY,
item_id BIGINT NOT NULL,
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
);
CREATE TABLE _test_gitlab_hook_logs (
id bigserial not null,
created_at timestamptz not null,
item_id BIGINT NOT NULL,
PRIMARY KEY (id, created_at),
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
) PARTITION BY RANGE(created_at);
CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201
PARTITION OF _test_gitlab_hook_logs
FOR VALUES FROM ('20220101') TO ('20220131');
CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202
PARTITION OF _test_gitlab_hook_logs
FOR VALUES FROM ('20220201') TO ('20220228');
ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201;
SQL
execute_on_each_database(main_tables_sql)
ci_tables_sql = <<~SQL
CREATE TABLE _test_gitlab_ci_items (id serial NOT NULL PRIMARY KEY);
CREATE TABLE _test_gitlab_ci_references (
id serial NOT NULL PRIMARY KEY,
item_id BIGINT NOT NULL,
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_ci_items(id)
);
SQL
execute_on_each_database(ci_tables_sql)
internal_tables_sql = <<~SQL
CREATE TABLE _test_gitlab_shared_items (id serial NOT NULL PRIMARY KEY);
SQL
execute_on_each_database(internal_tables_sql)
# Filling the tables
5.times do |i|
# Main Database
main_db_main_item_model.create!(id: i)
main_db_main_reference_model.create!(item_id: i)
main_db_ci_item_model.create!(id: i)
main_db_ci_reference_model.create!(item_id: i)
main_db_shared_item_model.create!(id: i)
main_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
main_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
# CI Database
ci_db_main_item_model.create!(id: i)
ci_db_main_reference_model.create!(item_id: i)
ci_db_ci_item_model.create!(id: i)
ci_db_ci_reference_model.create!(item_id: i)
ci_db_shared_item_model.create!(id: i)
ci_db_partitioned_item.create!(item_id: i, created_at: '2022-02-02 02:00')
ci_db_partitioned_item_detached.create!(item_id: i, created_at: '2022-01-01 01:00')
end
Gitlab::Database::SharedModel.using_connection(main_connection) do
Postgresql::DetachedPartition.create!(
table_name: '_test_gitlab_hook_logs_202201',
drop_after: Time.current
)
end
Gitlab::Database::SharedModel.using_connection(ci_connection) do
Postgresql::DetachedPartition.create!(
table_name: '_test_gitlab_hook_logs_202201',
drop_after: Time.current
)
end
allow(Gitlab::Database::GitlabSchema).to receive(:tables_to_schema).and_return(
{
"_test_gitlab_main_items" => :gitlab_main,
"_test_gitlab_main_references" => :gitlab_main,
"_test_gitlab_hook_logs" => :gitlab_main,
"_test_gitlab_ci_items" => :gitlab_ci,
"_test_gitlab_ci_references" => :gitlab_ci,
"_test_gitlab_shared_items" => :gitlab_shared,
"_test_gitlab_geo_items" => :gitlab_geo
}
)
allow(Gitlab::Database::GitlabSchema).to receive(:views_and_tables_to_schema).and_return(
{
"_test_gitlab_main_items" => :gitlab_main,
"_test_gitlab_main_references" => :gitlab_main,
"_test_gitlab_hook_logs" => :gitlab_main,
"_test_gitlab_ci_items" => :gitlab_ci,
"_test_gitlab_ci_references" => :gitlab_ci,
"_test_gitlab_shared_items" => :gitlab_shared,
"_test_gitlab_geo_items" => :gitlab_geo,
"detached_partitions" => :gitlab_shared,
"postgres_foreign_keys" => :gitlab_shared,
"postgres_partitions" => :gitlab_shared
}
)
allow(logger).to receive(:info).with(any_args)
end
context 'when the truncated tables are not locked for writes' do
it 'raises an error that the tables are not locked for writes' do
error_message = /is not locked for writes. Run the rake task gitlab:db:lock_writes first/
@ -348,6 +348,50 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
end
end
describe '#needs_truncation?' do
let(:database_name) { 'ci' }
subject { described_class.new(database_name: database_name).needs_truncation? }
context 'when running in a single database mode' do
before do
skip_if_multiple_databases_are_setup(:ci)
end
it { is_expected.to eq(false) }
end
context 'when running in a multiple database mode' do
before do
skip_if_shared_database(:ci)
end
context 'with main data in ci database' do
it { is_expected.to eq(true) }
end
context 'with no main data in ci datatabase' do
before do
# Remove 'main' data in ci database
ci_connection.truncate_tables([:_test_gitlab_main_items, :_test_gitlab_main_references])
end
it { is_expected.to eq(false) }
it 'supresses some QueryAnalyzers' do
expect(
Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection
).to receive(:with_suppressed).and_call_original
expect(
Gitlab::Database::QueryAnalyzers::Ci::PartitioningRoutingAnalyzer
).to receive(:with_suppressed).and_call_original
subject
end
end
end
end
def geo_configured?
!!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'geo')
end

View File

@ -1,88 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do
RSpec.shared_examples 'cache_control' do |type|
it { expect(subject.cache_key).to match(/pages_domain_for_#{type}_1_*/) }
describe '#clear_cache', :use_clean_rails_redis_caching do
before do
Rails.cache.write("pages_domain_for_#{type}_1", ['settings-hash'])
Rails.cache.write("pages_domain_for_#{type}_1_settings-hash", 'payload')
end
it 'clears the cache' do
cached_keys = [
"pages_domain_for_#{type}_1_settings-hash",
"pages_domain_for_#{type}_1"
]
expect(::Gitlab::AppLogger)
.to receive(:info)
.with(
message: 'clear pages cache',
pages_keys: cached_keys,
pages_type: type,
pages_id: 1
)
expect(Rails.cache)
.to receive(:delete_multi)
.with(cached_keys)
subject.clear_cache
end
end
end
describe '.for_namespace' do
subject(:cache_control) { described_class.for_namespace(1) }
it_behaves_like 'cache_control', :namespace
end
describe '.for_domain' do
subject(:cache_control) { described_class.for_domain(1) }
it_behaves_like 'cache_control', :domain
end
describe '#cache_key' do
it 'does not change the pages config' do
expect { described_class.new(type: :domain, id: 1).cache_key }
.not_to change(Gitlab.config, :pages)
end
it 'is based on pages settings' do
access_control = Gitlab.config.pages.access_control
cache_key = described_class.new(type: :domain, id: 1).cache_key
stub_config(pages: { access_control: !access_control })
expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key)
end
it 'is based on the force_pages_access_control settings' do
force_pages_access_control = ::Gitlab::CurrentSettings.force_pages_access_control
cache_key = described_class.new(type: :domain, id: 1).cache_key
::Gitlab::CurrentSettings.force_pages_access_control = !force_pages_access_control
expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key)
end
it 'caches the application settings hash' do
expect(Rails.cache)
.to receive(:write)
.with('pages_domain_for_domain_1', kind_of(Set))
described_class.new(type: :domain, id: 1).cache_key
end
end
it 'fails with invalid type' do
expect { described_class.new(type: :unknown, id: nil) }
.to raise_error(ArgumentError, 'type must be :namespace or :domain')
end
end

View File

@ -40,23 +40,9 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_domain_#{pages_domain.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
context 'when :cache_pages_domain_api is disabled' do
before do
stub_feature_flags(cache_pages_domain_api: false)
end
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to be_nil
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
end
end
end
@ -76,23 +62,8 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(0)
end
context 'when :cache_pages_domain_api is disabled' do
before do
stub_feature_flags(cache_pages_domain_api: false)
end
it 'returns the virual domain with no lookup_paths' do
virtual_domain = described_class.new("#{project.namespace.path}.example.com".downcase).execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to be_nil
expect(virtual_domain.lookup_paths.length).to eq(0)
end
end
end
context 'when there are pages deployed for the project' do
@ -111,7 +82,6 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
@ -120,25 +90,9 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
virtual_domain = described_class.new("#{project.namespace.path}.Example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to match(/pages_domain_for_namespace_#{project.namespace.id}_/)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
context 'when :cache_pages_domain_api is disabled' do
before_all do
stub_feature_flags(cache_pages_domain_api: false)
end
it 'returns the virual domain when there are pages deployed for the project' do
virtual_domain = described_class.new("#{project.namespace.path}.example.com").execute
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.cache_key).to be_nil
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
end
end
end
@ -187,18 +141,6 @@ RSpec.describe Gitlab::Pages::VirtualHostFinder, feature_category: :pages do
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
end
context 'when :cache_pages_domain_api is disabled' do
before do
stub_feature_flags(cache_pages_domain_api: false)
end
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths.length).to eq(1)
expect(virtual_domain.lookup_paths.first.project_id).to eq(project.id)
end
end
end
end

View File

@ -6,20 +6,52 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
subject { described_class }
describe '.available_for_type?' do
it 'returns true for Group' do
expect(subject.available_for_type?(Group.all)).to be_truthy
context 'with api_keyset_pagination_multi_order FF disabled' do
before do
stub_feature_flags(api_keyset_pagination_multi_order: false)
end
it 'returns true for Group' do
expect(subject.available_for_type?(Group.all)).to be_truthy
end
it 'returns true for Ci::Build' do
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
end
it 'returns true for Packages::BuildInfo' do
expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
end
it 'return false for User' do
expect(subject.available_for_type?(User.all)).to be_falsey
end
end
it 'returns true for Ci::Build' do
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
end
context 'with api_keyset_pagination_multi_order FF enabled' do
before do
stub_feature_flags(api_keyset_pagination_multi_order: true)
end
it 'returns true for Packages::BuildInfo' do
expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
end
it 'returns true for Group' do
expect(subject.available_for_type?(Group.all)).to be_truthy
end
it 'return false for other types of relations' do
expect(subject.available_for_type?(User.all)).to be_falsey
it 'returns true for Ci::Build' do
expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
end
it 'returns true for Packages::BuildInfo' do
expect(subject.available_for_type?(Packages::BuildInfo.all)).to be_truthy
end
it 'returns true for User' do
expect(subject.available_for_type?(User.all)).to be_truthy
end
it 'return false for other types of relations' do
expect(subject.available_for_type?(Issue.all)).to be_falsey
end
end
end
@ -58,7 +90,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
end
it 'return false for other types of relations' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_falsey
end
@ -68,16 +100,48 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
let(:order_by) { :id }
let(:sort) { :desc }
it 'returns true for Ci::Build' do
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
context 'with api_keyset_pagination_multi_order FF disabled' do
before do
stub_feature_flags(api_keyset_pagination_multi_order: false)
end
it 'returns true for Ci::Build' do
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
end
it 'returns true for AuditEvent' do
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
it 'returns true for Packages::BuildInfo' do
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
end
it 'returns false for User' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
end
end
it 'returns true for AuditEvent' do
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
context 'with api_keyset_pagination_multi_order FF enabled' do
before do
stub_feature_flags(api_keyset_pagination_multi_order: true)
end
it 'returns true for Packages::BuildInfo' do
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
it 'returns true for Ci::Build' do
expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
end
it 'returns true for AuditEvent' do
expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
it 'returns true for Packages::BuildInfo' do
expect(subject.available?(cursor_based_request_context, Packages::BuildInfo.all)).to be_truthy
end
it 'returns true for User' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_truthy
end
end
end
@ -90,7 +154,7 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
end
it 'return false for other types of relations' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
expect(subject.available?(cursor_based_request_context, Issue.all)).to be_falsey
end
end
end

View File

@ -0,0 +1,75 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe SystemCheck::App::TableTruncateCheck, feature_category: :cell do
context 'when running on single databases' do
before do
skip_if_database_exists(:ci)
end
describe '#skip?' do
subject { described_class.new.skip? }
it { is_expected.to eq(true) }
end
end
context 'when running on multiple databases' do
let(:needs_truncation) { true }
before do
skip_if_shared_database(:ci)
allow_next_instances_of(Gitlab::Database::TablesTruncate, 2) do |instance|
allow(instance).to receive(:needs_truncation?).and_return(needs_truncation)
end
end
describe '#skip?' do
subject { described_class.new.skip? }
it { is_expected.to eq(false) }
end
describe '#check?' do
subject { described_class.new.check? }
context 'when TableTruncate returns false' do
let(:needs_truncation) { false }
it { is_expected.to eq(true) }
end
context 'when TableTruncate returns true' do
let(:needs_truncation) { true }
it { is_expected.to eq(false) }
end
end
describe '#show_error' do
let(:needs_truncation) { true }
let(:checker) { described_class.new }
before do
checker.check?
end
subject(:show_error) { checker.show_error }
it 'outputs error information' do
expected = %r{
Try\sfixing\sit:\s+
sudo\s-u\s.+?\s-H\sbundle\sexec\srake\sgitlab:db:truncate_legacy_tables:main\s
gitlab:db:truncate_legacy_tables:ci\s+
For\smore\sinformation\ssee:\s+
doc/development/database/multiple_databases.md\sin\ssection\s'Truncating\stables'\s+
Please\sfix\sthe\serror\sabove\sand\srerun\sthe\schecks.\s+
}x
expect { show_error }.to output(expected).to_stdout
end
end
end
end

View File

@ -438,4 +438,12 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
end
end
end
describe '#source_version' do
subject { build(:bulk_import_entity, :group_entity) }
it 'pulls the source version from the associated BulkImport' do
expect(subject.source_version).to eq(subject.bulk_import.source_version_info)
end
end
end

View File

@ -73,19 +73,4 @@ RSpec.describe Pages::VirtualDomain, feature_category: :pages do
end
end
end
describe '#cache_key' do
it 'returns the cache key based in the given cache_control' do
cache_control = instance_double(::Gitlab::Pages::CacheControl, cache_key: 'cache_key')
virtual_domain = described_class.new(projects: [instance_double(Project)], cache: cache_control)
expect(virtual_domain.cache_key).to eq('cache_key')
end
it 'returns nil when no cache_control is given' do
virtual_domain = described_class.new(projects: [instance_double(Project)])
expect(virtual_domain.cache_key).to be_nil
end
end
end

View File

@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile do
include WorkhorseHelpers
include KeysetPaginationHelpers
let_it_be(:admin) { create(:admin) }
let_it_be(:user, reload: true) { create(:user, username: 'user.withdot') }
@ -258,6 +259,48 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile
end.not_to exceed_all_query_limit(control_count)
end
end
context 'when api_keyset_pagination_multi_order FF is enabled' do
before do
stub_feature_flags(api_keyset_pagination_multi_order: true)
end
it_behaves_like 'an endpoint with keyset pagination', invalid_order: nil do
let(:first_record) { user }
let(:second_record) { admin }
let(:api_call) { api(path, user) }
end
it 'still supports offset pagination when keyset pagination params are not provided' do
get api(path, user)
expect(response).to include_pagination_headers
end
end
context 'when api_keyset_pagination_multi_order FF is disabled' do
before do
stub_feature_flags(api_keyset_pagination_multi_order: false)
end
it 'paginates the records correctly using offset pagination' do
get api(path, user), params: { pagination: 'keyset', per_page: 1 }
params_for_next_page = pagination_params_from_next_url(response)
expect(response).to include_pagination_headers
expect(params_for_next_page).not_to include('cursor')
end
context 'on making requests with unsupported ordering structure' do
it 'does not return error' do
get api(path, user),
params: { pagination: 'keyset', per_page: 1, order_by: 'created_at', sort: 'asc' }
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
end
end
end
end
end

View File

@ -1,176 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::CreatePipelineTrackersService, feature_category: :importers do
describe '#execute!' do
context 'when entity is group' do
it 'creates trackers for group entity' do
bulk_import = create(:bulk_import)
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
described_class.new(entity).execute!
expect(entity.trackers.to_a).to include(
have_attributes(
stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
),
have_attributes(
stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
)
)
end
end
context 'when entity is project' do
it 'creates trackers for project entity' do
bulk_import = create(:bulk_import)
entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
described_class.new(entity).execute!
expect(entity.trackers.to_a).to include(
have_attributes(
stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
),
have_attributes(
stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
)
)
end
end
context 'when tracker configuration has a minimum version defined' do
before do
allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
allow(stage).to receive(:config).and_return(
{
pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
}
)
end
end
context 'when the source instance version is older than the tracker mininum version' do
let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
it 'creates trackers as skipped if version requirement does not meet' do
described_class.new(entity).execute!
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:created, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:skipped, 'PipelineClass4'],
[:skipped, 'PipelineClass5']
)
end
it 'logs an info message for the skipped pipelines' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info).with({
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
importer: 'gitlab_migration',
pipeline_name: 'PipelineClass4',
minimum_source_version: '15.1.0',
maximum_source_version: nil,
source_version: '15.0.0'
})
expect(logger).to receive(:info).with({
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
importer: 'gitlab_migration',
pipeline_name: 'PipelineClass5',
minimum_source_version: '16.0.0',
maximum_source_version: nil,
source_version: '15.0.0'
})
end
described_class.new(entity).execute!
end
end
context 'when the source instance version is undefined' do
it 'creates trackers as created' do
bulk_import = create(:bulk_import, source_version: nil)
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
described_class.new(entity).execute!
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:created, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:created, 'PipelineClass4'],
[:created, 'PipelineClass5']
)
end
end
end
context 'when tracker configuration has a maximum version defined' do
before do
allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
allow(stage).to receive(:config).and_return(
{
pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
}
)
end
end
context 'when the source instance version is older than the tracker maximum version' do
it 'creates trackers as skipped if version requirement does not meet' do
bulk_import = create(:bulk_import, source_version: '15.0.0')
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
described_class.new(entity).execute!
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:skipped, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:created, 'PipelineClass4'],
[:created, 'PipelineClass5']
)
end
end
context 'when the source instance version is a patch version' do
it 'creates trackers with the same status as the non-patch source version' do
bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
described_class.new(entity_1).execute!
described_class.new(entity_2).execute!
trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
expect(trackers_1).to eq(trackers_2)
end
end
end
end
end

View File

@ -49,7 +49,8 @@ RSpec.shared_examples "protected branches > access control > CE" do
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(access_type_id)
end
it "allows updating protected branches so that #{access_type_name} can merge to them" do
it "allows updating protected branches so that #{access_type_name} can merge to them",
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/425080' do
visit project_protected_branches_path(project)
show_add_form

View File

@ -40,9 +40,11 @@ RSpec.shared_examples 'an endpoint with keyset pagination' do |invalid_order: 'n
context 'on making requests with unsupported ordering structure' do
let(:additional_params) { { order_by: invalid_order, sort: invalid_sort } }
it 'returns error', :aggregate_failures do
is_expected.to have_gitlab_http_status(:method_not_allowed)
expect(json_response['error']).to eq('Keyset pagination is not yet available for this type of request')
if invalid_order
it 'returns error', :aggregate_failures do
is_expected.to have_gitlab_http_status(:method_not_allowed)
expect(json_response['error']).to eq('Keyset pagination is not yet available for this type of request')
end
end
end
end

View File

@ -137,5 +137,174 @@ RSpec.describe BulkImportWorker, feature_category: :importers do
end
end
end
context 'when importing a group' do
it 'creates trackers for group entity' do
bulk_import = create(:bulk_import)
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
subject.perform(bulk_import.id)
expect(entity.trackers.to_a).to include(
have_attributes(
stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
),
have_attributes(
stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
)
)
end
end
context 'when importing a project' do
it 'creates trackers for project entity' do
bulk_import = create(:bulk_import)
entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
subject.perform(bulk_import.id)
expect(entity.trackers.to_a).to include(
have_attributes(
stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
),
have_attributes(
stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
)
)
end
end
context 'when tracker configuration has a minimum version defined' do
before do
allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
allow(stage).to receive(:config).and_return(
{
pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
}
)
end
end
context 'when the source instance version is older than the tracker mininum version' do
let_it_be(:bulk_import) { create(:bulk_import, source_version: '15.0.0') }
let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
it 'creates trackers as skipped if version requirement does not meet' do
subject.perform(bulk_import.id)
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:created, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:skipped, 'PipelineClass4'],
[:skipped, 'PipelineClass5']
)
end
it 'logs an info message for the skipped pipelines' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:info).with({
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
importer: 'gitlab_migration',
pipeline_name: 'PipelineClass4',
minimum_source_version: '15.1.0',
maximum_source_version: nil,
source_version: '15.0.0'
})
expect(logger).to receive(:info).with({
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
importer: 'gitlab_migration',
pipeline_name: 'PipelineClass5',
minimum_source_version: '16.0.0',
maximum_source_version: nil,
source_version: '15.0.0'
})
end
subject.perform(bulk_import.id)
end
end
context 'when the source instance version is undefined' do
it 'creates trackers as created' do
bulk_import = create(:bulk_import, source_version: nil)
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
subject.perform(bulk_import.id)
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:created, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:created, 'PipelineClass4'],
[:created, 'PipelineClass5']
)
end
end
end
context 'when tracker configuration has a maximum version defined' do
before do
allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
allow(stage).to receive(:config).and_return(
{
pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
}
)
end
end
context 'when the source instance version is older than the tracker maximum version' do
it 'creates trackers as skipped if version requirement does not meet' do
bulk_import = create(:bulk_import, source_version: '15.0.0')
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
subject.perform(bulk_import.id)
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:skipped, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:created, 'PipelineClass4'],
[:created, 'PipelineClass5']
)
end
end
context 'when the source instance version is a patch version' do
it 'creates trackers with the same status as the non-patch source version' do
bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
described_class.perform_inline(bulk_import_1.id)
described_class.perform_inline(bulk_import_2.id)
trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
expect(trackers_1).to eq(trackers_2)
end
end
end
end
end

View File

@ -1,267 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Pages::InvalidateDomainCacheWorker, feature_category: :pages do
shared_examples 'clears caches with' do |event_class:, event_data:, caches:|
include AfterNextHelpers
let(:event) { event_class.new(data: event_data) }
subject { consume_event(subscriber: described_class, event: event) }
it_behaves_like 'subscribes to event'
it 'clears the cache with Gitlab::Pages::CacheControl' do
caches.each do |cache|
expect_next(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id])
.to receive(:clear_cache)
end
subject
end
end
context 'when a project have multiple domains' do
include AfterNextHelpers
let_it_be(:project) { create(:project) }
let_it_be(:pages_domain) { create(:pages_domain, project: project) }
let_it_be(:pages_domain2) { create(:pages_domain, project: project) }
let(:event) do
Pages::PageDeployedEvent.new(
data: {
project_id: project.id,
namespace_id: project.namespace_id,
root_namespace_id: project.root_ancestor.id
}
)
end
subject { consume_event(subscriber: described_class, event: event) }
it 'clears the cache with Gitlab::Pages::CacheControl' do
expect_next(Gitlab::Pages::CacheControl, type: :namespace, id: project.namespace_id)
.to receive(:clear_cache)
expect_next(Gitlab::Pages::CacheControl, type: :domain, id: pages_domain.id)
.to receive(:clear_cache)
expect_next(Gitlab::Pages::CacheControl, type: :domain, id: pages_domain2.id)
.to receive(:clear_cache)
subject
end
end
it_behaves_like 'clears caches with',
event_class: Pages::PageDeployedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Pages::PageDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectCreatedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectArchivedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectPathChangedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
old_path: 'old_path',
new_path: 'new_path'
},
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectTransferedEvent,
event_data: {
project_id: 1,
old_namespace_id: 2,
old_root_namespace_id: 3,
new_namespace_id: 4,
new_root_namespace_id: 5
},
caches: [
{ type: :namespace, id: 3 },
{ type: :namespace, id: 5 }
]
it_behaves_like 'clears caches with',
event_class: Groups::GroupTransferedEvent,
event_data: {
group_id: 1,
old_root_namespace_id: 3,
new_root_namespace_id: 5
},
caches: [
{ type: :namespace, id: 3 },
{ type: :namespace, id: 5 }
]
it_behaves_like 'clears caches with',
event_class: Groups::GroupPathChangedEvent,
event_data: {
group_id: 1,
root_namespace_id: 2,
old_path: 'old_path',
new_path: 'new_path'
},
caches: [
{ type: :namespace, id: 2 }
]
it_behaves_like 'clears caches with',
event_class: Groups::GroupDeletedEvent,
event_data: {
group_id: 1,
root_namespace_id: 3
},
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: PagesDomains::PagesDomainDeletedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
domain_id: 4,
domain: 'somedomain.com'
},
caches: [
{ type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: PagesDomains::PagesDomainUpdatedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
domain_id: 4,
domain: 'somedomain.com'
},
caches: [
{ type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: PagesDomains::PagesDomainCreatedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
domain_id: 4,
domain: 'somedomain.com'
},
caches: [
{ type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
context 'when project attributes change' do
Projects::ProjectAttributesChangedEvent::PAGES_RELATED_ATTRIBUTES.each do |attribute|
it_behaves_like 'clears caches with',
event_class: Projects::ProjectAttributesChangedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
domain_id: 4,
attributes: [attribute]
},
caches: [
{ type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
end
it_behaves_like 'ignores the published event' do
let(:event) do
Projects::ProjectAttributesChangedEvent.new(
data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
attributes: ['unknown']
}
)
end
end
end
context 'when project features change' do
it_behaves_like 'clears caches with',
event_class: Projects::ProjectFeaturesChangedEvent,
event_data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
features: ['pages_access_level']
},
caches: [
{ type: :namespace, id: 3 }
]
it_behaves_like 'ignores the published event' do
let(:event) do
Projects::ProjectFeaturesChangedEvent.new(
data: {
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
features: ['unknown']
}
)
end
end
end
context 'when namespace based cache keys are duplicated' do
# de-dups namespace cache keys
it_behaves_like 'clears caches with',
event_class: Projects::ProjectTransferedEvent,
event_data: {
project_id: 1,
old_namespace_id: 2,
old_root_namespace_id: 5,
new_namespace_id: 4,
new_root_namespace_id: 5
},
caches: [
{ type: :namespace, id: 5 }
]
end
end