Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-01-26 12:10:19 +00:00
parent db950c5706
commit a1c0b634f7
57 changed files with 1082 additions and 846 deletions

View File

@ -0,0 +1,116 @@
include:
- local: .gitlab/ci/rails/shared.gitlab-ci.yml
db:rollback:
extends: .db-job-base
script:
- scripts/db_tasks db:migrate VERSION=20210602155110
- scripts/db_tasks db:migrate SKIP_SCHEMA_VERSION_CHECK=true
db:rollback single-db:
extends:
- db:rollback
- .single-db
- .rails:rules:single-db
db:migrate:reset:
extends: .db-job-base
script:
- bundle exec rake db:migrate:reset
db:migrate:reset single-db:
extends:
- db:migrate:reset
- .single-db
- .rails:rules:single-db
db:check-schema:
extends:
- .db-job-base
- .rails:rules:ee-mr-and-default-branch-only
script:
- run_timed_command "bundle exec rake db:drop db:create"
- run_timed_command "scripts/db_tasks db:migrate"
db:check-schema-single-db:
extends:
- db:check-schema
- .single-db
- .rails:rules:single-db
db:check-migrations:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
- scripts/validate_migration_schema
allow_failure: true
db:check-migrations-single-db:
extends:
- db:check-migrations
- .single-db
- .rails:rules:single-db
db:post_deployment_migrations_validator:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
- scripts/post_deployment_migrations_validator
allow_failure: true
db:post_deployment_migrations_validator-single-db:
extends:
- db:post_deployment_migrations_validator
- .single-db
- .rails:rules:single-db
db:migrate-non-superuser:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- bundle exec rake gitlab:db:reset_as_non_superuser
db:gitlabcom-database-testing:
extends: .rails:rules:db:gitlabcom-database-testing
stage: test
image: ruby:${RUBY_VERSION}-alpine
needs: []
allow_failure: true
script:
- source scripts/utils.sh
- install_gitlab_gem
- ./scripts/trigger-build.rb gitlab-com-database-testing
db:backup_and_restore:
extends:
- .db-job-base
- .rails:rules:db-backup
variables:
SETUP_DB: "false"
GITLAB_ASSUME_YES: "1"
script:
- . scripts/prepare_build.sh
- bundle exec rake db:drop db:create db:structure:load db:seed_fu
- mkdir -p tmp/tests/public/uploads tmp/tests/{artifacts,pages,lfs-objects,terraform_state,registry,packages}
- bundle exec rake gitlab:backup:create
- date
- bundle exec rake gitlab:backup:restore
db:backup_and_restore single-db:
extends:
- db:backup_and_restore
- .single-db
- .rails:rules:db-backup
db:rollback geo:
extends:
- db:rollback
- .rails:rules:ee-only-migration
script:
- bundle exec rake db:migrate:geo VERSION=20170627195211
- bundle exec rake db:migrate:geo

View File

@ -210,91 +210,6 @@ rspec fast_spec_helper:
# Load fast_spec_helper as well just in case there are no specs available.
- bin/rspec --dry-run spec/fast_spec_helper.rb $fast_spec_helper_specs
db:rollback:
extends: .db-job-base
script:
- scripts/db_tasks db:migrate VERSION=20210602155110
- scripts/db_tasks db:migrate SKIP_SCHEMA_VERSION_CHECK=true
db:rollback single-db:
extends:
- db:rollback
- .single-db
- .rails:rules:single-db
db:migrate:reset:
extends: .db-job-base
script:
- bundle exec rake db:migrate:reset
db:migrate:reset single-db:
extends:
- db:migrate:reset
- .single-db
- .rails:rules:single-db
db:check-schema:
extends:
- .db-job-base
- .rails:rules:ee-mr-and-default-branch-only
script:
- run_timed_command "bundle exec rake db:drop db:create"
- run_timed_command "scripts/db_tasks db:migrate"
db:check-schema-single-db:
extends:
- db:check-schema
- .single-db
- .rails:rules:single-db
db:check-migrations:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
- scripts/validate_migration_schema
allow_failure: true
db:check-migrations-single-db:
extends:
- db:check-migrations
- .single-db
- .rails:rules:single-db
db:post_deployment_migrations_validator:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME --depth 20
- scripts/post_deployment_migrations_validator
allow_failure: true
db:post_deployment_migrations_validator-single-db:
extends:
- db:post_deployment_migrations_validator
- .single-db
- .rails:rules:single-db
db:migrate-non-superuser:
extends:
- .db-job-base
- .rails:rules:ee-and-foss-mr-with-migration
script:
- bundle exec rake gitlab:db:reset_as_non_superuser
db:gitlabcom-database-testing:
extends: .rails:rules:db:gitlabcom-database-testing
stage: test
image: ruby:${RUBY_VERSION}-alpine
needs: []
allow_failure: true
script:
- source scripts/utils.sh
- install_gitlab_gem
- ./scripts/trigger-build.rb gitlab-com-database-testing
gitlab:setup:
extends: .db-job-base
variables:
@ -312,27 +227,6 @@ gitlab:setup:
paths:
- log/*.log
db:backup_and_restore:
extends:
- .db-job-base
- .rails:rules:db-backup
variables:
SETUP_DB: "false"
GITLAB_ASSUME_YES: "1"
script:
- . scripts/prepare_build.sh
- bundle exec rake db:drop db:create db:structure:load db:seed_fu
- mkdir -p tmp/tests/public/uploads tmp/tests/{artifacts,pages,lfs-objects,terraform_state,registry,packages}
- bundle exec rake gitlab:backup:create
- date
- bundle exec rake gitlab:backup:restore
db:backup_and_restore single-db:
extends:
- db:backup_and_restore
- .single-db
- .rails:rules:db-backup
rspec:deprecations:
extends:
- .default-retry
@ -716,14 +610,6 @@ rspec-ee system pg12 single-db:
- rspec-ee system pg12
- .single-db-rspec
- .rails:rules:single-db
db:rollback geo:
extends:
- db:rollback
- .rails:rules:ee-only-migration
script:
- bundle exec rake db:migrate:geo VERSION=20170627195211
- bundle exec rake db:migrate:geo
# EE: default refs (MRs, default branch, schedules) jobs #
##################################################
@ -977,7 +863,7 @@ fail-pipeline-early:
extends:
- .rails:rules:rerun-previous-failed-tests
stage: test
needs: ["setup-test-env", "compile-test-assets", "detect-previous-failed-tests"]
needs: ["setup-test-env", "retrieve-tests-metadata", "compile-test-assets", "detect-previous-failed-tests"]
script:
- !reference [.base-script, script]
- rspec_rerun_previous_failed_tests "${PREVIOUS_FAILED_TESTS_FILE}"
@ -995,6 +881,5 @@ rspec rspec-ee-pg12-rerun-previous-failed-tests:
- .base-rspec-pg12-rerun-previous-failed-tests
variables:
PREVIOUS_FAILED_TESTS_FILE: tmp/previous_failed_tests/rspec_ee_failed_files.txt
# EE: Canonical MR pipelines
##################################################

View File

@ -509,6 +509,11 @@ RSpec/FactoriesInMigrationSpecs:
- 'spec/lib/ee/gitlab/background_migration/**/*.rb'
- 'ee/spec/lib/ee/gitlab/background_migration/**/*.rb'
RSpec/MissingFeatureCategory:
Enabled: true
Exclude:
- 'qa/**/*.rb'
RSpec/FactoryBot/AvoidCreate:
Enabled: true
Include:

View File

@ -2619,406 +2619,6 @@ RSpec/MissingFeatureCategory:
- 'ee/spec/workers/update_max_seats_used_for_gitlab_com_subscriptions_worker_spec.rb'
- 'ee/spec/workers/vulnerability_exports/export_deletion_worker_spec.rb'
- 'ee/spec/workers/vulnerability_exports/export_worker_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/automatic_failover_and_recovery_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/backend_node_recovery_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/changing_repository_storage_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/gitaly_mtls_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/praefect_connectivity_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/praefect_dataloss_spec.rb'
- 'qa/qa/specs/features/api/12_systems/gitaly/praefect_repo_sync_spec.rb'
- 'qa/qa/specs/features/api/1_manage/group_access_token_spec.rb'
- 'qa/qa/specs/features/api/1_manage/import/import_github_repo_spec.rb'
- 'qa/qa/specs/features/api/1_manage/integrations/webhook_events_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_group_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_issue_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_large_project_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_members_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_mr_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_pipeline_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_project_spec.rb'
- 'qa/qa/specs/features/api/1_manage/migration/gitlab_migration_release_spec.rb'
- 'qa/qa/specs/features/api/1_manage/project_access_token_spec.rb'
- 'qa/qa/specs/features/api/1_manage/rate_limits_spec.rb'
- 'qa/qa/specs/features/api/1_manage/user_access_termination_spec.rb'
- 'qa/qa/specs/features/api/1_manage/user_inherited_access_spec.rb'
- 'qa/qa/specs/features/api/3_create/merge_request/push_options_labels_spec.rb'
- 'qa/qa/specs/features/api/3_create/merge_request/push_options_mwps_spec.rb'
- 'qa/qa/specs/features/api/3_create/merge_request/push_options_remove_source_branch_spec.rb'
- 'qa/qa/specs/features/api/3_create/merge_request/push_options_target_branch_spec.rb'
- 'qa/qa/specs/features/api/3_create/merge_request/push_options_title_description_spec.rb'
- 'qa/qa/specs/features/api/3_create/repository/commit_to_templated_project_spec.rb'
- 'qa/qa/specs/features/api/3_create/repository/default_branch_name_setting_spec.rb'
- 'qa/qa/specs/features/api/3_create/repository/push_postreceive_idempotent_spec.rb'
- 'qa/qa/specs/features/api/3_create/repository/storage_size_spec.rb'
- 'qa/qa/specs/features/api/3_create/repository/tag_revision_trigger_prereceive_hook_spec.rb'
- 'qa/qa/specs/features/api/3_create/snippet/snippet_repository_storage_move_spec.rb'
- 'qa/qa/specs/features/api/4_verify/api_variable_inheritance_with_forward_pipeline_variables_spec.rb'
- 'qa/qa/specs/features/api/4_verify/cancel_pipeline_when_block_user_spec.rb'
- 'qa/qa/specs/features/api/4_verify/file_variable_spec.rb'
- 'qa/qa/specs/features/api/8_monitor/metrics_spec.rb'
- 'qa/qa/specs/features/browser_ui/14_analytics/performance_bar_spec.rb'
- 'qa/qa/specs/features/browser_ui/14_analytics/service_ping_default_enabled_spec.rb'
- 'qa/qa/specs/features/browser_ui/14_analytics/service_ping_disabled_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/group/create_group_with_mattermost_team_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/group/group_access_token_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/group/group_member_access_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/group/transfer_group_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/group/transfer_project_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/import/import_github_repo_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/integrations/jenkins/jenkins_build_status_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/integrations/jira/jira_basic_integration_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/integrations/jira/jira_issue_import_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/integrations/pipeline_status_emails_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/2fa_recovery_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/2fa_ssh_recovery_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/log_in_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/log_in_with_2fa_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/log_into_gitlab_via_ldap_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/log_into_mattermost_via_gitlab_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/login_via_instance_wide_saml_sso_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/maintain_log_in_mixed_env_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/login/register_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/migration/gitlab_migration_group_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/add_project_member_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/create_project_badge_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/create_project_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/dashboard_images_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/invite_group_to_project_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/project_access_token_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/project_owner_permissions_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/project/view_project_activity_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/user/follow_user_activity_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/user/impersonation_token_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/user/parent_group_access_termination_spec.rb'
- 'qa/qa/specs/features/browser_ui/1_manage/user/user_inherited_access_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/design_management/add_design_content_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/design_management/archive_design_content_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/design_management/modify_design_content_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/email/trigger_email_notification_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/check_mentions_for_xss_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/collapse_comments_in_discussions_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/comment_issue_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/create_issue_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/custom_issue_template_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/export_as_csv_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/filter_issue_comments_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/issue_suggestions_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/mentions_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue/real_time_assignee_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/issue_boards/focus_mode_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/milestone/assign_milestone_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/milestone/create_group_milestone_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/milestone/create_project_milestone_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/related_issues/related_issues_spec.rb'
- 'qa/qa/specs/features/browser_ui/2_plan/transient/comment_on_discussion_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/cherry_pick/cherry_pick_a_merge_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/cherry_pick/cherry_pick_commit_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/create_merge_request_from_push_notification_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/create_merge_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/create_merge_request_via_template_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/merge_merge_request_from_fork_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/merge_when_pipeline_succeeds_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/revert/revert_commit_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/revert/reverting_merge_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/squash_merge_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/suggestions/batch_suggestion_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/suggestions/custom_commit_suggestion_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/view_merge_request_diff_patch_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/pages/new_static_page_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/project_wiki/project_based_content_creation_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/project_wiki/project_based_content_manipulation_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/project_wiki/project_based_directory_management_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/project_wiki/project_based_file_upload_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/project_wiki/project_based_list_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/project_wiki/project_based_page_deletion_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/add_file_template_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/add_list_delete_branches_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/branch_with_unusual_name_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/clone_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/file/create_file_via_web_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/file/delete_file_via_web_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/file/edit_file_via_web_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/file/file_with_unusual_name_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/license_detection_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/move_project_create_fork_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/protected_tags_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_http_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_http_private_token_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_lfs_over_http_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_mirroring_over_http_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_file_size_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_over_http_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_over_ssh_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_protected_branch_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/push_to_canary_gitaly_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/ssh_key_support_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/repository/user_views_commit_diff_patch_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/add_comment_to_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/add_file_to_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/clone_push_pull_personal_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/clone_push_pull_project_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/copy_snippet_file_contents_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/create_personal_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/create_personal_snippet_with_multiple_files_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/create_project_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/create_project_snippet_with_multiple_files_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/delete_file_from_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/share_snippet_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/snippet/snippet_index_page_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/source_editor/source_editor_toolbar_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/add_file_template_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/add_new_directory_in_web_ide_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/create_first_file_in_web_ide_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/link_to_line_in_web_ide_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/open_fork_in_web_ide_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/open_web_ide_from_diff_tab_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/review_merge_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/server_hooks_custom_error_message_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/upload_new_file_in_web_ide_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide/web_terminal_spec.rb'
- 'qa/qa/specs/features/browser_ui/3_create/web_ide_new/add_new_directory_in_web_ide_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/custom_variable_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/pipeline_with_protected_variable_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/prefill_variables_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/raw_variables_defined_in_yaml_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/ui_variable_inheritable_when_forward_pipeline_variables_true_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/ci_variable/ui_variable_non_inheritable_when_forward_pipeline_variables_false_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/create_and_process_pipeline_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/include_local_config_file_paths_with_wildcard_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/include_multiple_files_from_a_project_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/locked_artifacts_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/merge_mr_when_pipline_is_blocked_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/mr_event_rule_pipeline_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/parent_child_pipelines_independent_relationship_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/pass_dotenv_variables_to_downstream_via_bridge_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/pipeline_editor_branch_switcher_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/pipeline_editor_can_create_merge_request_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/pipeline_editor_tabs_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/pipeline_with_image_pull_policy_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/run_pipeline_via_web_only_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/run_pipeline_with_manual_jobs_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/trigger_child_pipeline_with_manual_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/trigger_matrix_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/pipeline/update_ci_file_with_pipeline_editor_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/runner/register_runner_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/testing/endpoint_coverage_spec.rb'
- 'qa/qa/specs/features/browser_ui/4_verify/testing/view_code_coverage_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_omnibus_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/container_registry/container_registry_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/container_registry/online_garbage_collection_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/dependency_proxy/dependency_proxy_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/infrastructure_registry/terraform_module_registry_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/composer_registry_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/conan_repository_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/generic_repository_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/helm_registry_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/maven/maven_group_level_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/maven/maven_project_level_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/maven_gradle_repository_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/npm/npm_instance_level_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/npm/npm_project_level_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/nuget/nuget_group_level_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/nuget/nuget_project_level_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/pypi_repository_spec.rb'
- 'qa/qa/specs/features/browser_ui/5_package/package_registry/rubygems_registry_spec.rb'
- 'qa/qa/specs/features/browser_ui/6_release/deploy_key/add_deploy_key_spec.rb'
- 'qa/qa/specs/features/browser_ui/6_release/deploy_token/add_deploy_token_spec.rb'
- 'qa/qa/specs/features/browser_ui/7_configure/auto_devops/auto_devops_templates_spec.rb'
- 'qa/qa/specs/features/browser_ui/7_configure/auto_devops/create_project_with_auto_devops_spec.rb'
- 'qa/qa/specs/features/browser_ui/8_monitor/alert_management/alert_settings_create_new_alerts_spec.rb'
- 'qa/qa/specs/features/browser_ui/8_monitor/alert_management/automatically_creates_incident_for_alert_spec.rb'
- 'qa/qa/specs/features/browser_ui/8_monitor/alert_management/create_alert_using_authorization_key_spec.rb'
- 'qa/qa/specs/features/ee/api/1_manage/import/import_github_repo_spec.rb'
- 'qa/qa/specs/features/ee/api/1_manage/integrations/group_webhook_events_spec.rb'
- 'qa/qa/specs/features/ee/api/1_manage/migration/gitlab_migration_group_spec.rb'
- 'qa/qa/specs/features/ee/api/1_manage/user/minimal_access_user_spec.rb'
- 'qa/qa/specs/features/ee/api/3_create/wiki/group_wiki_repository_storage_move_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/change_vulnerability_status_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/create_merge_request_with_secure_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/export_vulnerability_report_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/fix_vulnerability_workflow_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/group/group_audit_logs_1_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/group/group_audit_logs_2_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/instance/instance_audit_logs_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/policies_list_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/project/project_audit_logs_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/project_security_dashboard_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/scan_result_policy_vulnerabilities_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/security_reports_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/vulnerabilities_jira_integration_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/10_govern/vulnerability_management_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/license/cloud_activation_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/license/license_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/purchase/overage_modal_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/purchase/purchase_ci_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/purchase/purchase_storage_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/purchase/upgrade_group_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/purchase/user_registration_billing_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/saas_user_limit_experience_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/utilization/billing_seats_usage_data_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/utilization/free_namespace_storage_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/11_fulfillment/utilization/usage_quotas_seats_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/attachment_replication_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/database_delete_replication_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/geo_replication_ci_job_log_artifacts_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/geo_replication_maven_package_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/geo_replication_npm_registry_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/geo_replication_project_snippets_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/http_push_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/http_push_to_secondary_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/rename_replication_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/ssh_push_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/ssh_push_to_secondary_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/wiki_http_push_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/wiki_http_push_to_secondary_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/wiki_ssh_push_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/12_systems/geo/wiki_ssh_push_to_secondary_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/13_secure/enable_scanning_from_configuration_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/13_secure/license_compliance_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/13_secure/merge_request_license_widget_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/14_model_ops/suggested_reviewer_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/group_ldap_sync_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/group_saml_enforced_sso_git_access_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/group_saml_enforced_sso_new_account_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/group_saml_non_enforced_sso_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/prevent_forking_outside_group_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/restrict_by_ip_address_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/group/share_group_with_group_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/integrations/jira_issues_list_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/ldap/admin_ldap_sync_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/1_manage/user/minimal_access_user_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/burndown_chart/burndown_chart_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/contribution_analytics_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/custom_email/custom_email_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/epic/epics_management_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/epic/promote_issue_to_epic_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/epic/roadmap_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/insights/default_insights_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue/default_issue_template_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/configurable_issue_board_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/configure_issue_board_by_label_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/create_group_issue_board_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/group_issue_boards_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/project_issue_boards_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/read_only_board_configuration_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issue_boards/sum_of_issues_weights_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issues_analytics/issues_analytics_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/issues_weight/issue_weight_visualization_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/iterations/assign_group_iteration_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/iterations/create_group_iteration_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/multiple_assignees_for_issues/four_assignees_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/multiple_assignees_for_issues/more_than_four_assignees_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/2_plan/scoped_labels/editing_scoped_labels_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/group_wiki/create_group_wiki_page_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/group_wiki/delete_group_wiki_page_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/group_wiki/file_upload_group_wiki_page_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/merge_request/add_batch_comments_in_merge_request_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/merge_request/approval_rules_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/merge_request/default_merge_request_template_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/assign_code_owners_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/code_owners_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/code_owners_with_protected_branch_and_squashed_commits_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/file_locking_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/group_file_template_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/merge_with_code_owner_in_root_group_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/merge_with_code_owner_in_subgroup_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/project_templates_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/pull_mirroring_over_http_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/pull_mirroring_over_ssh_with_key_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/push_rules_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/restrict_push_protected_branch_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/job_trace_archival_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/multi-project_pipelines_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/new_discussion_not_dropping_merge_trains_mr_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/parent_child_pipelines_dependent_relationship_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/pipeline_subscription_with_group_owned_project_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/pipelines_for_merged_results_and_merge_trains_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/system_cancel_merge_request_in_merge_train_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/transient/merge_trains_transient_bug_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/4_verify/user_cancel_merge_request_in_merge_train_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/5_package/dependency_proxy_sso_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/6_release/pipeline_status_on_operation_dashboard_spec.rb'
- 'qa/qa/specs/features/ee/browser_ui/9_data_stores/elasticsearch/elasticsearch_reindexing_spec.rb'
- 'qa/qa/specs/features/sanity/feature_flags_spec.rb'
- 'qa/qa/specs/features/sanity/framework_spec.rb'
- 'qa/qa/specs/features/sanity/interception_spec.rb'
- 'qa/spec/ee/resource/mixins/group_base_spec.rb'
- 'qa/spec/ee/runtime/geo_spec.rb'
- 'qa/spec/ee/scenario/test/integration/group_saml_spec.rb'
- 'qa/spec/factory/resource/user_spec.rb'
- 'qa/spec/git/location_spec.rb'
- 'qa/spec/git/repository_spec.rb'
- 'qa/spec/page/base_spec.rb'
- 'qa/spec/page/element_spec.rb'
- 'qa/spec/page/logging_spec.rb'
- 'qa/spec/page/validator_spec.rb'
- 'qa/spec/page/view_spec.rb'
- 'qa/spec/resource/api_fabricator_spec.rb'
- 'qa/spec/resource/base_spec.rb'
- 'qa/spec/resource/events/base_spec.rb'
- 'qa/spec/resource/events/project_spec.rb'
- 'qa/spec/resource/repository/push_spec.rb'
- 'qa/spec/resource/ssh_key_spec.rb'
- 'qa/spec/resource/user_spec.rb'
- 'qa/spec/runtime/api/client_spec.rb'
- 'qa/spec/runtime/api/request_spec.rb'
- 'qa/spec/runtime/application_settings_spec.rb'
- 'qa/spec/runtime/env_spec.rb'
- 'qa/spec/runtime/feature_spec.rb'
- 'qa/spec/runtime/key/ecdsa_spec.rb'
- 'qa/spec/runtime/key/ed25519_spec.rb'
- 'qa/spec/runtime/key/rsa_spec.rb'
- 'qa/spec/runtime/logger_spec.rb'
- 'qa/spec/runtime/namespace_spec.rb'
- 'qa/spec/runtime/release_spec.rb'
- 'qa/spec/runtime/scenario_spec.rb'
- 'qa/spec/runtime/script_extensions/interceptor_spec.rb'
- 'qa/spec/scenario/actable_spec.rb'
- 'qa/spec/scenario/bootable_spec.rb'
- 'qa/spec/scenario/template_spec.rb'
- 'qa/spec/scenario/test/instance/airgapped_spec.rb'
- 'qa/spec/scenario/test/instance/all_spec.rb'
- 'qa/spec/scenario/test/instance/reliable_spec.rb'
- 'qa/spec/scenario/test/instance/smoke_spec.rb'
- 'qa/spec/scenario/test/integration/instance_saml_spec.rb'
- 'qa/spec/scenario/test/integration/ldap_spec.rb'
- 'qa/spec/scenario/test/integration/mattermost_spec.rb'
- 'qa/spec/scenario/test/integration/service_ping_disabled_spec.rb'
- 'qa/spec/scenario/test/sanity/framework_spec.rb'
- 'qa/spec/scenario/test/sanity/selectors_spec.rb'
- 'qa/spec/service/docker_run/base_spec.rb'
- 'qa/spec/service/docker_run/gitlab_runner_spec.rb'
- 'qa/spec/service/docker_run/k3s_spec.rb'
- 'qa/spec/service/docker_run/mixins/third_party_docker_spec.rb'
- 'qa/spec/service/shellout_spec.rb'
- 'qa/spec/specs/allure_report_spec.rb'
- 'qa/spec/specs/helpers/context_selector_spec.rb'
- 'qa/spec/specs/helpers/feature_flag_spec.rb'
- 'qa/spec/specs/helpers/quarantine_spec.rb'
- 'qa/spec/specs/parallel_runner_spec.rb'
- 'qa/spec/specs/runner_spec.rb'
- 'qa/spec/support/formatters/allure_metadata_formatter_spec.rb'
- 'qa/spec/support/formatters/test_metrics_formatter_spec.rb'
- 'qa/spec/support/loglinking_spec.rb'
- 'qa/spec/support/page_error_checker_spec.rb'
- 'qa/spec/support/repeater_spec.rb'
- 'qa/spec/support/retrier_spec.rb'
- 'qa/spec/support/run_spec.rb'
- 'qa/spec/support/ssh_spec.rb'
- 'qa/spec/support/system_logs/kibana_spec.rb'
- 'qa/spec/support/system_logs/sentry_spec.rb'
- 'qa/spec/support/wait_for_requests_spec.rb'
- 'qa/spec/support/waiter_spec.rb'
- 'qa/spec/tools/ci/ff_changes_spec.rb'
- 'qa/spec/tools/ci/non_empty_suites_spec.rb'
- 'qa/spec/tools/ci/qa_changes_spec.rb'
- 'qa/spec/tools/ci/test_metrics_spec.rb'
- 'qa/spec/tools/long_running_spec_reporter_spec.rb'
- 'qa/spec/tools/reliable_report_spec.rb'
- 'qa/spec/tools/test_resources_data_processor_spec.rb'
- 'spec/benchmarks/banzai_benchmark.rb'
- 'spec/bin/audit_event_type_spec.rb'
- 'spec/bin/diagnostic_reports_uploader_spec.rb'

View File

@ -1 +1 @@
69e486270838efbbb78e6736ac6aecde5ccd8caa
6d228addeac5e159eaf89ce54aa523f71e336a29

View File

@ -35,13 +35,16 @@ export const removeSubscription = async (removePath) => {
});
};
export const fetchGroups = async (groupsPath, { page, perPage, search }) => {
export const fetchGroups = async (groupsPath, { page, perPage, search }, accessToken = null) => {
return axiosInstance.get(groupsPath, {
params: {
page,
per_page: perPage,
search,
},
headers: {
...(accessToken ? { Authorization: `Bearer ${accessToken}` } : {}),
},
});
};

View File

@ -1,4 +1,5 @@
<script>
import { mapState } from 'vuex';
import { GlLoadingIcon, GlPagination, GlAlert, GlSearchBoxByType } from '@gitlab/ui';
import { fetchGroups } from '~/jira_connect/subscriptions/api';
import {
@ -38,6 +39,7 @@ export default {
showPagination() {
return this.totalItems > this.$options.DEFAULT_GROUPS_PER_PAGE && this.groups.length > 0;
},
...mapState(['accessToken']),
},
mounted() {
return this.loadGroups().finally(() => {
@ -47,11 +49,15 @@ export default {
methods: {
loadGroups() {
this.isLoadingMore = true;
return fetchGroups(this.groupsPath, {
page: this.page,
perPage: this.$options.DEFAULT_GROUPS_PER_PAGE,
search: this.searchValue,
})
return fetchGroups(
this.groupsPath,
{
page: this.page,
perPage: this.$options.DEFAULT_GROUPS_PER_PAGE,
search: this.searchValue,
},
this.accessToken,
)
.then((response) => {
const { page, total } = parseIntPagination(normalizeHeaders(response.headers));
this.page = page;

View File

@ -133,7 +133,7 @@ export default function setupVueRepositoryList() {
},
on: {
input(selectedRef) {
visitUrl(generateRefDestinationPath(projectRootPath, selectedRef));
visitUrl(generateRefDestinationPath(projectRootPath, ref, selectedRef));
},
},
});

View File

@ -5,9 +5,9 @@ import { joinPaths } from '~/lib/utils/url_utility';
* Example: /root/Flight/-/blob/fix/main/test/spec/utils_spec.js
* Group 1: /-/blob
* Group 2: blob
* Group 3: main/test/spec/utils_spec.js
* Group 3: /test/spec/utils_spec.js
*/
const NAMESPACE_TARGET_REGEX = /(\/-\/(blob|tree))\/.*?\/(.*)/;
const getNamespaceTargetRegex = (ref) => new RegExp(`(/-/(blob|tree))/${ref}/(.*)`);
/**
* Generates a ref destination path based on the selected ref and current path.
@ -15,11 +15,12 @@ const NAMESPACE_TARGET_REGEX = /(\/-\/(blob|tree))\/.*?\/(.*)/;
* @param {string} projectRootPath - The root path for a project.
* @param {string} selectedRef - The selected ref from the ref dropdown.
*/
export function generateRefDestinationPath(projectRootPath, selectedRef) {
export function generateRefDestinationPath(projectRootPath, ref, selectedRef) {
const currentPath = window.location.pathname;
const encodedHash = '%23';
let namespace = '/-/tree';
let target;
const NAMESPACE_TARGET_REGEX = getNamespaceTargetRegex(ref);
const match = NAMESPACE_TARGET_REGEX.exec(currentPath);
if (match) {
[, namespace, , target] = match;

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Resolvers
module Ci
class VariablesResolver < BaseResolver
type Types::Ci::InstanceVariableType.connection_type, null: true
argument :sort, ::Types::Ci::VariableSortEnum,
required: false,
description: 'Sort order of results.'
def resolve(**args)
if parent.is_a?(Group) || parent.is_a?(Project)
parent.variables.order_by(args[:sort])
elsif current_user&.can_admin_all_resources?
::Ci::InstanceVariable.order_by(args[:sort])
end
end
private
def parent
object.respond_to?(:sync) ? object.sync : object
end
end
end
end

View File

@ -0,0 +1,13 @@
# frozen_string_literal: true
module Types
module Ci
class VariableSortEnum < BaseEnum
graphql_name 'CiVariableSort'
description 'Values for sorting variables'
value 'KEY_ASC', 'Sorted by key in ascending order.', value: :key_asc
value 'KEY_DESC', 'Sorted by key in descending order.', value: :key_desc
end
end
end

View File

@ -200,7 +200,7 @@ module Types
null: true,
description: "List of the group's CI/CD variables.",
authorize: :admin_group,
method: :variables
resolver: Resolvers::Ci::VariablesResolver
field :runners, Types::Ci::RunnerType.connection_type,
null: true,

View File

@ -345,7 +345,7 @@ module Types
null: true,
description: "List of the project's CI/CD variables.",
authorize: :admin_build,
method: :variables
resolver: Resolvers::Ci::VariablesResolver
field :ci_cd_settings, Types::Ci::CiCdSettingType,
null: true,

View File

@ -17,7 +17,8 @@ module Types
field :ci_variables,
Types::Ci::InstanceVariableType.connection_type,
null: true,
description: "List of the instance's CI/CD variables."
description: "List of the instance's CI/CD variables.",
resolver: Resolvers::Ci::VariablesResolver
field :container_repository, Types::ContainerRepositoryDetailsType,
null: true,
description: 'Find a container repository.' do
@ -167,12 +168,6 @@ module Types
application_settings
end
def ci_variables
return unless current_user&.can_admin_all_resources?
::Ci::InstanceVariable.all
end
def application_settings
Gitlab::CurrentSettings.current_application_settings
end

View File

@ -18,6 +18,7 @@ module Ci
scope :by_key, -> (key) { where(key: key) }
scope :order_key_asc, -> { reorder(key: :asc) }
scope :order_key_desc, -> { reorder(key: :desc) }
attr_encrypted :value,
mode: :per_attribute_iv_and_salt,
@ -30,6 +31,16 @@ module Ci
end
end
class_methods do
def order_by(method)
case method.to_s
when 'key_asc' then order_key_asc
when 'key_desc' then order_key_desc
else all
end
end
end
def to_runner_variable
var_cache_key = to_runner_variable_cache_key

View File

@ -395,7 +395,7 @@ class ContainerRepository < ApplicationRecord
end
def migrated?
MIGRATION_PHASE_1_ENDED_AT < self.created_at || import_done?
(self.created_at && MIGRATION_PHASE_1_ENDED_AT < self.created_at) || import_done?
end
def last_import_step_done_at

View File

@ -12,7 +12,6 @@ class LfsObject < ApplicationRecord
scope :with_files_stored_locally, -> { where(file_store: LfsObjectUploader::Store::LOCAL) }
scope :with_files_stored_remotely, -> { where(file_store: LfsObjectUploader::Store::REMOTE) }
scope :for_oids, -> (oids) { where(oid: oids) }
scope :for_oid_and_size, -> (oid, size) { find_by(oid: oid, size: size) }
validates :oid, presence: true, uniqueness: true, format: { with: /\A\h{64}\z/ }
@ -20,6 +19,10 @@ class LfsObject < ApplicationRecord
BATCH_SIZE = 3000
def self.for_oid_and_size(oid, size)
find_by(oid: oid, size: size)
end
def self.not_linked_to_project(project)
where('NOT EXISTS (?)',
project.lfs_objects_projects.select(1).where('lfs_objects_projects.lfs_object_id = lfs_objects.id'))

View File

@ -10,12 +10,15 @@ module Projects
}.freeze
def execute(container_repository, disable_timeout: true)
return false unless can?(current_user, :update_container_image, project)
return error('Unauthorized access') unless can_destroy?
# Delete tags outside of the transaction to avoid hitting an idle-in-transaction timeout
unless delete_tags(container_repository, disable_timeout) &&
if delete_tags(container_repository, disable_timeout) &&
destroy_container_repository(container_repository)
success
else
container_repository.delete_failed!
error('Deletion failed for container repository')
end
end
@ -40,9 +43,19 @@ module Projects
false
end
def can_destroy?
return true if skip_permission_check?
can?(current_user, :destroy_container_image, project)
end
def error_message(container_repository, message)
"Container repository with ID: #{container_repository.id} and path: #{container_repository.path}" \
" failed with message: #{message}"
"Container repository with ID: #{container_repository.id} and path: #{container_repository.path} " \
"failed with message: #{message}"
end
def skip_permission_check?
!!params[:skip_permission_check]
end
end
end

View File

@ -257,12 +257,12 @@ module Projects
return true unless Gitlab.config.registry.enabled
return false unless remove_legacy_registry_tags
results = []
project.container_repositories.find_each do |container_repository|
service = Projects::ContainerRepository::DestroyService.new(project, current_user)
service.execute(container_repository)
results << destroy_repository(project, container_repository)
end
true
results.all?
end
##
@ -272,9 +272,14 @@ module Projects
def remove_legacy_registry_tags
return true unless Gitlab.config.registry.enabled
::ContainerRepository.build_root_repository(project).tap do |repository|
break repository.has_tags? ? repository.delete_tags! : true
end
root_repository = ::ContainerRepository.build_root_repository(project)
root_repository.has_tags? ? destroy_repository(project, root_repository) : true
end
def destroy_repository(project, repository)
service = ContainerRepository::DestroyService.new(project, current_user, { skip_permission_check: true })
response = service.execute(repository)
response[:status] == :success
end
def raise_error(message)

View File

@ -440,7 +440,7 @@ Settings.mattermost['enabled'] = false if Settings.mattermost['enabled'].nil?
Settings.mattermost['host'] = nil unless Settings.mattermost.enabled
#
# Jira Connect (GitLab.com for Jira Cloud App)
# Jira Connect (GitLab for Jira Cloud App)
#
Settings['jira_connect'] ||= Settingslogic.new({})

View File

@ -4,7 +4,7 @@ classes:
- JiraConnectInstallation
feature_categories:
- integrations
description: GitLab.com for Jira Cloud app installation data, formerly Jira Connect App.
description: GitLab for Jira Cloud app installation data, formerly Jira Connect App.
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/9593
milestone: '11.9'
gitlab_schema: gitlab_main

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
class RemoveFkToCiBuildsCiUnitTestFailureOnBuildId < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
SOURCE_TABLE_NAME = :ci_unit_test_failures
TARGET_TABLE_NAME = :ci_builds
COLUMN = :build_id
TARGET_COLUMN = :id
FK_NAME = :fk_0f09856e1f
def up
with_lock_retries do
remove_foreign_key_if_exists(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
name: FK_NAME,
reverse_lock_order: true
)
end
end
def down
add_concurrent_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: COLUMN,
target_column: TARGET_COLUMN,
validate: true,
reverse_lock_order: true,
on_delete: :cascade,
name: FK_NAME
)
end
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
class RemoveFkToCiBuildsCiBuildPendingStateOnBuildId < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
SOURCE_TABLE_NAME = :ci_build_pending_states
TARGET_TABLE_NAME = :ci_builds
COLUMN = :build_id
TARGET_COLUMN = :id
FK_NAME = :fk_rails_0bbbfeaf9d
def up
with_lock_retries do
remove_foreign_key_if_exists(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
name: FK_NAME,
reverse_lock_order: true
)
end
end
def down
add_concurrent_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: COLUMN,
target_column: TARGET_COLUMN,
validate: true,
reverse_lock_order: true,
on_delete: :cascade,
name: FK_NAME
)
end
end

View File

@ -0,0 +1,35 @@
# frozen_string_literal: true
class RemoveFkToCiBuildsCiBuildTraceChunkOnBuildId < Gitlab::Database::Migration[2.1]
disable_ddl_transaction!
SOURCE_TABLE_NAME = :ci_build_trace_chunks
TARGET_TABLE_NAME = :ci_builds
COLUMN = :build_id
TARGET_COLUMN = :id
FK_NAME = :fk_rails_1013b761f2
def up
with_lock_retries do
remove_foreign_key_if_exists(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
name: FK_NAME,
reverse_lock_order: true
)
end
end
def down
add_concurrent_foreign_key(
SOURCE_TABLE_NAME,
TARGET_TABLE_NAME,
column: COLUMN,
target_column: TARGET_COLUMN,
validate: true,
reverse_lock_order: true,
on_delete: :cascade,
name: FK_NAME
)
end
end

View File

@ -0,0 +1 @@
c549badd648613b0f52c69719cd62c93c5f49c1d39190a500bd68ec63a90c045

View File

@ -0,0 +1 @@
0e71cb12dfdd1af07f23e218ad76fd081877f49a20e9122ffcbad01e65a25c6f

View File

@ -0,0 +1 @@
268e8ce7d9313f375843a3e8f23beb8028287ef2581f8120f78cbb2fe5b83f99

View File

@ -33376,9 +33376,6 @@ ALTER TABLE ONLY notification_settings
ALTER TABLE ONLY lists
ADD CONSTRAINT fk_0d3f677137 FOREIGN KEY (board_id) REFERENCES boards(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_unit_test_failures
ADD CONSTRAINT fk_0f09856e1f FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY deployment_approvals
ADD CONSTRAINT fk_0f58311058 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
@ -34336,9 +34333,6 @@ ALTER TABLE ONLY packages_conan_file_metadata
ALTER TABLE ONLY related_epic_links
ADD CONSTRAINT fk_rails_0b72027748 FOREIGN KEY (target_id) REFERENCES epics(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_build_pending_states
ADD CONSTRAINT fk_rails_0bbbfeaf9d FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY audit_events_external_audit_event_destinations
ADD CONSTRAINT fk_rails_0bc80a4edc FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
@ -34366,9 +34360,6 @@ ALTER TABLE ONLY issue_email_participants
ALTER TABLE ONLY merge_request_context_commits
ADD CONSTRAINT fk_rails_0fe0039f60 FOREIGN KEY (merge_request_id) REFERENCES merge_requests(id) ON DELETE CASCADE;
ALTER TABLE ONLY ci_build_trace_chunks
ADD CONSTRAINT fk_rails_1013b761f2 FOREIGN KEY (build_id) REFERENCES ci_builds(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_exports
ADD CONSTRAINT fk_rails_1019162882 FOREIGN KEY (author_id) REFERENCES users(id) ON DELETE CASCADE;

View File

@ -786,6 +786,7 @@ requeued
requeues
requeuing
resolver
resolver's
Restlet
resync
resynced
@ -833,6 +834,7 @@ SBOMs
sbt
SBT
scalers
scalar's
scatterplot
scatterplots
schedulable
@ -1095,6 +1097,7 @@ unrevoke
unsanitized
unschedule
unscoped
unsetting
unshare
unshared
unshares

View File

@ -66,7 +66,7 @@ Checking Geo ...
GitLab Geo is available ... yes
GitLab Geo is enabled ... yes
This machine's Geo node name matches a database record ... yes, found a secondary node named "Shanghai"
GitLab Geo secondary database is correctly configured ... yes
GitLab Geo tracking database is correctly configured ... yes
Database replication enabled? ... yes
Database replication working? ... yes
GitLab Geo HTTP(S) connectivity ...
@ -339,7 +339,7 @@ sudo gitlab-rake gitlab:geo:check
GitLab Geo is available ... yes
GitLab Geo is enabled ... yes
GitLab Geo secondary database is correctly configured ... not a secondary node
GitLab Geo tracking database is correctly configured ... not a secondary node
Database replication enabled? ... not a secondary node
...
Checking Geo ... Finished

View File

@ -97,6 +97,12 @@ This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
#### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="querycivariablessort"></a>`sort` | [`CiVariableSort`](#civariablesort) | Sort order of results. |
### `Query.containerRepository`
Find a container repository.
@ -13731,7 +13737,6 @@ GPG signature for a signed commit.
| <a id="groupallowstalerunnerpruning"></a>`allowStaleRunnerPruning` | [`Boolean!`](#boolean) | Indicates whether to regularly prune stale group runners. Defaults to false. |
| <a id="groupautodevopsenabled"></a>`autoDevopsEnabled` | [`Boolean`](#boolean) | Indicates whether Auto DevOps is enabled for all projects within this group. |
| <a id="groupavatarurl"></a>`avatarUrl` | [`String`](#string) | Avatar URL of the group. |
| <a id="groupcivariables"></a>`ciVariables` | [`CiGroupVariableConnection`](#cigroupvariableconnection) | List of the group's CI/CD variables. (see [Connections](#connections)) |
| <a id="groupcontainerrepositoriescount"></a>`containerRepositoriesCount` | [`Int!`](#int) | Number of container repositories in the group. |
| <a id="groupcontainslockedprojects"></a>`containsLockedProjects` | [`Boolean!`](#boolean) | Includes at least one project where the repository size exceeds the limit. |
| <a id="groupcrossprojectpipelineavailable"></a>`crossProjectPipelineAvailable` | [`Boolean!`](#boolean) | Indicates if the cross_project_pipeline feature is available for the namespace. |
@ -13825,6 +13830,22 @@ four standard [pagination arguments](#connection-pagination-arguments):
| ---- | ---- | ----------- |
| <a id="groupboardsid"></a>`id` | [`BoardID`](#boardid) | Find a board by its ID. |
##### `Group.ciVariables`
List of the group's CI/CD variables.
Returns [`CiGroupVariableConnection`](#cigroupvariableconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="groupcivariablessort"></a>`sort` | [`CiVariableSort`](#civariablesort) | Sort order of results. |
##### `Group.clusterAgents`
Cluster agents associated with projects in the group and its subgroups.
@ -17417,7 +17438,6 @@ Represents a product analytics dashboard widget.
| <a id="projectcicdsettings"></a>`ciCdSettings` | [`ProjectCiCdSetting`](#projectcicdsetting) | CI/CD settings for the project. |
| <a id="projectciconfigpathordefault"></a>`ciConfigPathOrDefault` | [`String!`](#string) | Path of the CI configuration file. |
| <a id="projectcijobtokenscope"></a>`ciJobTokenScope` | [`CiJobTokenScopeType`](#cijobtokenscopetype) | The CI Job Tokens scope of access. |
| <a id="projectcivariables"></a>`ciVariables` | [`CiProjectVariableConnection`](#ciprojectvariableconnection) | List of the project's CI/CD variables. (see [Connections](#connections)) |
| <a id="projectcodecoveragesummary"></a>`codeCoverageSummary` | [`CodeCoverageSummary`](#codecoveragesummary) | Code coverage summary associated with the project. |
| <a id="projectcomplianceframeworks"></a>`complianceFrameworks` | [`ComplianceFrameworkConnection`](#complianceframeworkconnection) | Compliance frameworks associated with the project. (see [Connections](#connections)) |
| <a id="projectcontainerexpirationpolicy"></a>`containerExpirationPolicy` | [`ContainerExpirationPolicy`](#containerexpirationpolicy) | Container expiration policy of the project. |
@ -17647,6 +17667,22 @@ Returns [`CiTemplate`](#citemplate).
| ---- | ---- | ----------- |
| <a id="projectcitemplatename"></a>`name` | [`String!`](#string) | Name of the CI/CD template to search for. Template must be formatted as `Name.gitlab-ci.yml`. |
##### `Project.ciVariables`
List of the project's CI/CD variables.
Returns [`CiProjectVariableConnection`](#ciprojectvariableconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#connection-pagination-arguments):
`before: String`, `after: String`, `first: Int`, `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="projectcivariablessort"></a>`sort` | [`CiVariableSort`](#civariablesort) | Sort order of results. |
##### `Project.clusterAgent`
Find a single cluster agent by name.
@ -21721,6 +21757,15 @@ Values for sorting runners.
| <a id="cirunnerupgradestatusnot_available"></a>`NOT_AVAILABLE` | Upgrade is not available for the runner. |
| <a id="cirunnerupgradestatusrecommended"></a>`RECOMMENDED` | Upgrade is available and recommended for the runner. |
### `CiVariableSort`
Values for sorting variables.
| Value | Description |
| ----- | ----------- |
| <a id="civariablesortkey_asc"></a>`KEY_ASC` | Sorted by key in ascending order. |
| <a id="civariablesortkey_desc"></a>`KEY_DESC` | Sorted by key in descending order. |
### `CiVariableType`
| Value | Description |

View File

@ -32,7 +32,7 @@ with anyone who may work in this part of the codebase in the future. You can fin
[Google Slides](https://docs.google.com/presentation/d/1qOTxpkTdHIp1CRjuTvO-aXg0_rUtzE3ETfLUdnBB5uQ/edit)
and in [PDF](https://gitlab.com/gitlab-org/create-stage/uploads/8e78ea7f326b2ef649e7d7d569c26d56/GraphQL_Deep_Dive__Create_.pdf).
Everything covered in this deep dive was accurate as of GitLab 11.9, and while specific
details may have changed since then, it should still serve as a good introduction.
details may have changed after that release, it should still serve as a good introduction.
## GraphiQL
@ -210,8 +210,8 @@ The `iid`, `title` and `description` are _scalar_ GraphQL types.
`iid` is a `GraphQL::Types::ID`, a special string type that signifies a unique ID.
`title` and `description` are regular `GraphQL::Types::String` types.
Note that the old scalar types `GraphQL:ID`, `GraphQL::INT_TYPE`, `GraphQL::STRING_TYPE`,
`GraphQL:BOOLEAN_TYPE`, and `GraphQL::FLOAT_TYPE` are no longer allowed. Please use `GraphQL::Types::ID`,
The old scalar types `GraphQL:ID`, `GraphQL::INT_TYPE`, `GraphQL::STRING_TYPE`,
`GraphQL:BOOLEAN_TYPE`, and `GraphQL::FLOAT_TYPE` are no longer allowed. Use `GraphQL::Types::ID`,
`GraphQL::Types::Int`, `GraphQL::Types::String`, `GraphQL::Types::Boolean`, and `GraphQL::Types::Float`.
When exposing a model through the GraphQL API, we do so by creating a
@ -250,7 +250,7 @@ the following reasons:
- Changing from a non-nullable field to a nullable field is difficult with a versionless schema
Non-nullable fields should only be used when a field is required, very unlikely
to become optional in the future, and very easy to calculate. An example would
to become optional in the future, and straightforward to calculate. An example would
be `id` fields.
A non-nullable GraphQL schema field is an object type followed by the exclamation point (bang) `!`. Here's an example from the `gitlab_schema.graphql` file:
@ -388,12 +388,12 @@ query($project_path: ID!) {
```
To ensure that we get consistent ordering, we append an ordering on the primary
key, in descending order. This is usually `id`, so we add `order(id: :desc)`
key, in descending order. The primary key is usually `id`, so we add `order(id: :desc)`
to the end of the relation. A primary key _must_ be available on the underlying table.
#### Shortcut fields
Sometimes it can seem easy to implement a "shortcut field", having the resolver return the first of a collection if no parameters are passed.
Sometimes it can seem straightforward to implement a "shortcut field", having the resolver return the first of a collection if no parameters are passed.
These "shortcut fields" are discouraged because they create maintenance overhead.
They need to be kept in sync with their canonical field, and deprecated or modified if their canonical field changes.
Use the functionality the framework provides unless there is a compelling reason to do otherwise.
@ -692,7 +692,7 @@ Global IDs, so as such they are coupled to model names. When we rename a
model, its Global ID changes.
If the Global ID is used as an _argument_ type anywhere in the schema, then the Global ID
change would normally constitute a breaking change.
change would typically constitute a breaking change.
To continue to support clients using the old Global ID argument, we add a deprecation
to `Gitlab::GlobalId::Deprecations`.
@ -763,24 +763,24 @@ support for the former argument style, remove the `Deprecation`:
DEPRECATIONS = [].freeze
```
During the deprecation period the API will accept either of these formats for the argument value:
During the deprecation period, the API accepts either of these formats for the argument value:
- `"gid://gitlab/PrometheusService/1"`
- `"gid://gitlab/Integrations::Prometheus/1"`
The API will also accept these types in the query signature for the argument:
The API also accepts these types in the query signature for the argument:
- `PrometheusServiceID`
- `IntegrationsPrometheusID`
NOTE:
Although queries that use the old type (`PrometheusServiceID` in this example) will be
considered valid and executable by the API, validator tools will consider them to be invalid.
This is because we are deprecating using a bespoke method outside of the
Although queries that use the old type (`PrometheusServiceID` in this example) are
considered valid and executable by the API, validator tools consider them to be invalid.
They are considered invalid because we are deprecating using a bespoke method outside of the
[`@deprecated` directive](https://spec.graphql.org/June2018/#sec--deprecated), so validators are not
aware of the support.
The documentation will mention that the old Global ID style is now deprecated.
The documentation mentions that the old Global ID style is now deprecated.
## Mark schema items as Alpha
@ -897,7 +897,7 @@ An example of the use of a union for this purpose is
Field names can be mapped to hash data keys using the `hash_key:` keyword if needed.
For example, given the following simple JSON data:
For example, given the following JSON data:
```json
{
@ -1054,7 +1054,7 @@ the objects in question.
To find objects to display in a field, we can add resolvers to
`app/graphql/resolvers`.
Arguments can be defined within the resolver in the same way as in a mutation.
Arguments can be defined in the resolver in the same way as in a mutation.
See the [Mutation arguments](#object-identifier-arguments) section.
To limit the amount of queries performed, we can use [BatchLoader](graphql_guide/batchloader.md).
@ -1098,7 +1098,7 @@ application:
- Services in mutations to apply operations.
- Loaders (batch-aware finders) specific to queries.
Note that there is never any reason to use batching in a mutation. Mutations are
There is never any reason to use batching in a mutation. Mutations are
executed in series, so there are no batching opportunities. All values are
evaluated eagerly as soon as they are requested, so batching is unnecessary
overhead. If you are writing:
@ -1134,7 +1134,7 @@ the entire field should resolve to `null`.
### Deriving resolvers (`BaseResolver.single` and `BaseResolver.last`)
For some simple use cases, we can derive resolvers from others.
For some use cases, we can derive resolvers from others.
The main use case for this is one resolver to find all items, and another to
find one specific one. For this, we supply convenience methods:
@ -1179,7 +1179,7 @@ class JobsResolver < BaseResolver
end
```
Here we have a simple resolver for getting pipeline jobs. The `name` argument is
Here we have a resolver for getting pipeline jobs. The `name` argument is
optional when getting a list, but required when getting a single job.
If there are multiple arguments, and neither can be made required, we can use
@ -1327,7 +1327,7 @@ class MyThingResolver < BaseResolver
end
```
For an example of real world use, please
For an example of real world use,
see [`ResolvesMergeRequests`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/graphql/resolvers/concerns/resolves_merge_requests.rb).
### Negated arguments
@ -1450,7 +1450,7 @@ It's acceptable to have both fine-grained mutations and coarse-grained mutations
that too many fine-grained mutations can lead to organizational challenges in maintainability, code
comprehensibility, and testing.
Each mutation requires a new class, which can lead to technical debt.
It also means the schema becomes very big, and we want users to easily navigate our schema.
It also means the schema becomes very big, which can make it difficult for users to navigate our schema.
As each new mutation also needs tests (including slower request integration tests), adding mutations
slows down the test suite.
@ -1730,7 +1730,7 @@ two fields: `errors: [String]`, and `thing: ThingType`. The specific nature of
the `thing` itself is irrelevant to these examples, as we are considering the
errors.
There are three states a mutation response can be in:
The three states a mutation response can be in are:
- [Success](#success)
- [Failure (relevant to the user)](#failure-relevant-to-the-user)
@ -1776,11 +1776,11 @@ Examples of this include:
- Model validation errors: the user may need to change the inputs.
- Permission errors: the user needs to know they cannot do this, they may need to request permission or sign in.
- Problems with application state that prevent the user's action, for example: merge conflicts, the resource was locked, and so on.
- Problems with the application state that prevent the user's action (for example, merge conflicts or a locked resource).
Ideally, we should prevent the user from getting this far, but if they do, they
need to be told what is wrong, so they understand the reason for the failure and
what they can do to achieve their intent, even if that is as simple as retrying the
what they can do to achieve their intent. For example, they might only need to retry the
request.
It is possible to return *recoverable* errors alongside mutation data. For example, if
@ -1803,7 +1803,7 @@ In this case there is no `data`:
}
```
This is the result of raising an error during the mutation. In our implementation,
This results from raising an error during the mutation. In our implementation,
the messages of argument errors and validation errors are returned to the client, and all other
`StandardError` instances are caught, logged and presented to the client with the message set to `"Internal server error"`.
See [`GraphqlController`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/controllers/graphql_controller.rb) for details.
@ -1832,7 +1832,7 @@ needs of the _user_ from the needs of the _client_.
> _Never catch an error unless the user needs to know about it._
If the user does need to know about it, communicate with frontend developers
to make sure the error information we are passing back is useful.
to make sure the error information we are passing back is relevant and serves a purpose.
See also the [frontend GraphQL guide](../development/fe_guide/graphql.md#handling-errors).
@ -1875,7 +1875,7 @@ process, read [merge request !42588](https://gitlab.com/gitlab-org/gitlab/-/merg
We use subscriptions to push updates to clients. We use the [Action Cable implementation](https://graphql-ruby.org/subscriptions/action_cable_implementation)
to deliver the messages over websockets.
When a client subscribes to a subscription, we store their query in-memory within Puma workers. Then when the subscription is triggered,
When a client subscribes to a subscription, we store their query in-memory in Puma workers. Then when the subscription is triggered,
the Puma workers execute the stored GraphQL queries and push the results to the clients.
NOTE:
@ -1897,7 +1897,7 @@ This class runs during the initial subscription request and subsequent updates.
You should implement the `#authorized?` method of the subscription class so that the initial subscription and subsequent updates are authorized.
When a user is not authorized, you should call the `unauthorized!` helper so that execution is halted and the user is unsubscribed. Returning `false`
results in redaction of the response but we leak information that some updates are happening. This is due to a
results in redaction of the response, but we leak information that some updates are happening. This leakage is due to a
[bug in the GraphQL gem](https://github.com/rmosolgo/graphql-ruby/issues/3390).
### Triggering subscriptions
@ -1913,7 +1913,7 @@ To learn more, visit [GraphQL pagination](graphql_guide/pagination.md).
For validations of single arguments, use the
[`prepare` option](https://github.com/rmosolgo/graphql-ruby/blob/master/guides/fields/arguments.md)
as normal.
as usual.
Sometimes a mutation or resolver may accept a number of optional
arguments, but we still want to validate that at least one of the optional
@ -1969,8 +1969,8 @@ field :created_at, Types::TimeType, null: true, description: 'Timestamp of when
## Testing
For testing mutations and resolvers, consider the unit of
test a full GraphQL request, not a call to a resolver. The reasons for this are
that we want to avoid lots of coupling to the framework, since this makes
test a full GraphQL request, not a call to a resolver. This allows us to
avoid tight coupling to the framework because such coupling makes
upgrades to dependencies much more difficult.
You should:
@ -2034,7 +2034,7 @@ When adding a query, you can use the `a working graphql query` shared example to
renders valid results.
You can construct a query including all available fields using the `GraphqlHelpers#all_graphql_fields_for`
helper. This makes it easy to add a test rendering all possible fields for a query.
helper. This makes it more straightforward to add a test rendering all possible fields for a query.
If you're adding a field to a query that supports pagination and sorting,
visit [Testing](graphql_guide/pagination.md#testing) for details.
@ -2176,11 +2176,11 @@ end
`spec/requests/api/graphql/ci/pipeline_spec.rb` regardless of the query being
used to fetch the pipeline data.
- There can be possible cyclic dependencies within our GraphQL types.
- There can be possible cyclic dependencies in our GraphQL types.
See [Adding field with resolver on a Type causes "Can't determine the return type " error on a different Type](https://github.com/rmosolgo/graphql-ruby/issues/3974#issuecomment-1084444214)
and [Fix unresolved name due to cyclic definition](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/84202/diffs#diff-content-32d14251082fd45412e1fdbf5820e62d157e70d2).
In particular, this can happen with `connection_type`. Normally we might use the following in a resolver:
In particular, this can happen with `connection_type`. Typically we might use the following in a resolver:
```ruby
type Types::IssueType.connection_type, null: true
@ -2226,8 +2226,8 @@ end
type "Types::IssueConnection", null: true
```
Only use this style if you are having spec failures. This is not intended to be a new
pattern that we use. This issue should disappear after we've upgraded to `2.x`.
Only use this style if you are having spec failures. We should not typically
use this pattern. This issue should disappear after we've upgraded to `2.x`.
- There can be instances where a spec fails because the class is not loaded correctly.
It relates to the
@ -2268,8 +2268,8 @@ end
See [this merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87295#note_946174036) for some discussion.
Only use this style if you are having spec failures. This is not intended to be a new
pattern that we use. This issue may disappear after we've upgraded to `2.x`.
Only use this style if you are having spec failures. We should not typically use this pattern.
This issue may disappear after we've upgraded to `2.x`.
- When testing resolvers using `GraphqlHelpers#resolve`, arguments for the resolver can be handled two ways.
@ -2299,8 +2299,8 @@ end
```
The use of `:internal_prepared` was added as a bridge for the
[GraphQL gem](https://graphql-ruby.org) upgrade. Testing resolvers directly will be
[removed eventually](https://gitlab.com/gitlab-org/gitlab/-/issues/363121),
[GraphQL gem](https://graphql-ruby.org) upgrade. Testing resolvers directly will
[eventually be removed](https://gitlab.com/gitlab-org/gitlab/-/issues/363121),
and writing unit tests for resolvers/mutations is
[already deprecated](#writing-unit-tests-deprecated)
@ -2341,7 +2341,7 @@ Queries and mutations are limited by depth, complexity, and recursion
to protect server resources from overly ambitious or malicious queries.
These values can be set as defaults and overridden in specific queries as needed.
The complexity values can be set per object as well, and the final query complexity is
evaluated based on how many objects are being returned. This is useful
evaluated based on how many objects are being returned. This can be used
for objects that are expensive (such as requiring Gitaly calls).
For example, a conditional complexity method in a resolver:

View File

@ -12,7 +12,7 @@ As [Werner Vogels](https://twitter.com/Werner), the CTO at Amazon Web Services,
<!-- vale gitlab.Spelling = NO -->
As a developer, it's as important to consider the failure modes in which your software may operate as much as normal operation. Doing so can mean the difference between a minor hiccup leading to a scattering of `500` errors experienced by a tiny fraction of users, and a full site outage that affects all users for an extended period.
As a developer, it's as important to consider the failure modes in which your software may operate as much as typical operation. Doing so can mean the difference between a minor hiccup leading to a scattering of `500` errors experienced by a tiny fraction of users, and a full site outage that affects all users for an extended period.
To paraphrase [Tolstoy](https://en.wikipedia.org/wiki/Anna_Karenina_principle), _all happy servers are alike, but all failing servers are failing in their own way_. Luckily, there are ways we can attempt to simulate these failure modes, and the chaos endpoints are tools for assisting in this process.
@ -65,8 +65,8 @@ GET /-/chaos/leakmem?memory_mb=1024&duration_s=50&async=true
| Attribute | Type | Required | Description |
| ------------ | ------- | -------- | ------------------------------------------------------------------------------------ |
| `memory_mb` | integer | no | How much memory, in MB, should be leaked. Defaults to 100MB. |
| `duration_s` | integer | no | Minimum duration_s, in seconds, that the memory should be retained. Defaults to 30s. |
| `memory_mb` | integer | no | How much memory, in MB, should be leaked. Defaults to 100 MB. |
| `duration_s` | integer | no | Minimum duration_s, in seconds, that the memory should be retained. Defaults to 30 s. |
| `async` | boolean | no | Set to true to leak memory in a Sidekiq background worker process |
```shell
@ -79,7 +79,7 @@ curl "http://localhost:3000/-/chaos/leakmem?memory_mb=1024&duration_s=10&token=s
This endpoint attempts to fully use a single core, at 100%, for the given period.
Depending on your rack server setup, your request may timeout after a predetermined period (normally 60 seconds).
Depending on your rack server setup, your request may timeout after a predetermined period (typically 60 seconds).
```plaintext
GET /-/chaos/cpu_spin
@ -89,7 +89,7 @@ GET /-/chaos/cpu_spin?duration_s=50&async=true
| Attribute | Type | Required | Description |
| ------------ | ------- | -------- | --------------------------------------------------------------------- |
| `duration_s` | integer | no | Duration, in seconds, that the core is used. Defaults to 30s |
| `duration_s` | integer | no | Duration, in seconds, that the core is used. Defaults to 30 s |
| `async` | boolean | no | Set to true to consume CPU in a Sidekiq background worker process |
```shell
@ -103,7 +103,7 @@ curl "http://localhost:3000/-/chaos/cpu_spin?duration_s=60&token=secret"
This endpoint attempts to fully use a single core, and interleave it with DB request, for the given period.
This endpoint can be used to model yielding execution to another threads when running concurrently.
Depending on your rack server setup, your request may timeout after a predetermined period (normally 60 seconds).
Depending on your rack server setup, your request may timeout after a predetermined period (typically 60 seconds).
```plaintext
GET /-/chaos/db_spin
@ -113,8 +113,8 @@ GET /-/chaos/db_spin?duration_s=50&async=true
| Attribute | Type | Required | Description |
| ------------ | ------- | -------- | --------------------------------------------------------------------------- |
| `interval_s` | float | no | Interval, in seconds, for every DB request. Defaults to 1s |
| `duration_s` | integer | no | Duration, in seconds, that the core is used. Defaults to 30s |
| `interval_s` | float | no | Interval, in seconds, for every DB request. Defaults to 1 s |
| `duration_s` | integer | no | Duration, in seconds, that the core is used. Defaults to 30 s |
| `async` | boolean | no | Set to true to perform the operation in a Sidekiq background worker process |
```shell
@ -137,7 +137,7 @@ GET /-/chaos/sleep?duration_s=50&async=true
| Attribute | Type | Required | Description |
| ------------ | ------- | -------- | ---------------------------------------------------------------------- |
| `duration_s` | integer | no | Duration, in seconds, that the request sleeps for. Defaults to 30s |
| `duration_s` | integer | no | Duration, in seconds, that the request sleeps for. Defaults to 30 s |
| `async` | boolean | no | Set to true to sleep in a Sidekiq background worker process |
```shell
@ -170,7 +170,7 @@ curl "http://localhost:3000/-/chaos/kill?token=secret"
## Quit
This endpoint simulates the unexpected death of a worker process using the `QUIT` signal.
Unlike `KILL`, the `QUIT` signal will also attempt to write a core dump.
Unlike `KILL`, the `QUIT` signal also attempts to write a core dump.
See [core(5)](https://man7.org/linux/man-pages/man5/core.5.html) for more information.
```plaintext
@ -191,7 +191,7 @@ curl "http://localhost:3000/-/chaos/quit?token=secret"
This endpoint triggers a GC run on the worker handling the request and returns its worker ID
plus GC stats as JSON. This is mostly useful when running Puma in standalone mode, since
otherwise the worker handling the request will not be known upfront.
otherwise the worker handling the request cannot be known upfront.
Endpoint:

View File

@ -514,29 +514,30 @@ of how the normalizations are specified.
Given all the constraints above, we can summarize the various goals related to the GLFM
specification and testing infrastructure:
1. A canonical `spec.txt` exists, and represents the official specification for
GLFM, which meets these requirements:
1. The spec is a strict superset of the GitHub Flavored Markdown
1. There is an official specification and single source of truth for how GLFM should render Markdown to HTML.
This source of truth is represented by three Markdown files:
1. [`ghfm_spec_v_?.??.md`](#github-flavored-markdown-specification) for the CommonMark + GFM examples.
1. [`glfm_official_specification.md`](#glfm_official_specificationmd) for the GLFM official examples.
1. [`glfm_internal_extensions.md`](#glfm_internal_extensionsmd) for the GLFM internal extensions.
1. This official specification meets these requirements:
1. The specification is a strict superset of the GitHub Flavored Markdown
(GFM) specification, just as
<abbr title="GitHub Flavored Markdown">GFM</abbr> is a strict superset
[of the CommonMark specification](https://github.github.com/gfm/#what-is-github-flavored-markdown-).
1. Therefore, it contains the superset of all [Markdown examples](#markdown-examples)
for CommonMark and GFM, as well as the GLFM
[official specification](#official-specifications) and [internal extensions](#internal-extensions).
1. It contains a prose introduction section which is specific to GitLab and GLFM.
1. It contains all other non-introduction sections verbatim from the
[GFM specification](#github-flavored-markdown-specification).
1. It contains new, extra sections for all the additional Markdown contained in the GLFM
1. It contains sections and examples for all the additional Markdown contained in the GLFM
[official specification](#official-specifications) and [internal extensions](#internal-extensions),
with [Markdown examples](#markdown-examples) and accompanying prose, just like the CommonMark and GFM examples.
1. All its headers and [Markdown examples](#markdown-examples) should be in the standard format which can be processed by the standard
with [Markdown examples](#markdown-examples) and any accompanying prose, just like the CommonMark and GFM examples.
1. All headers and [Markdown examples](#markdown-examples) should be in the standard format, which can be processed by the standard
CommonMark tool [`spec_tests.py`](https://github.com/github/cmark-gfm/blob/master/test/spec_tests.py) used to perform
[Markdown conformance testing](#markdown-conformance-testing)
against all examples contained in a `spec.txt`.
1. The GLFM parsers and HTML renderers for
both the static backend (Ruby) and WYSIWYG frontend (JavaScript) implementations
support _consistent_ rendering of all canonical Markdown + HTML examples in the
GLFM `spec.txt` specification, as verified by `spec_tests.py`.
specification, as verified by [`run-spec-tests.sh`](#run-spec-testssh-script).
NOTE:
Consistent does not mean that both of these implementations render
@ -616,26 +617,41 @@ them from the corresponding implementation class entry point files under
#### `update-specification.rb` script
The `scripts/glfm/update-specification.rb` script uses [input specification files](#input-specification-files) to
generate and update `spec.txt` (Markdown) and `spec.html` (HTML). The `spec.html` is
generated by passing the generated (or updated) `spec.txt` Markdown to the backend API
for rendering to static HTML:
generate and update Markdown and HTML output files for the
[`spec.txt`](#spectxt) and [`spec.html`](#spechtml)
[output specification files](#output-specification-files) as well as the
[`snapshot_spec.md`](#snapshot_specmd) and [`snapshot_spec.html`](#snapshot_spechtml)
[output example snapshot files](#output-example-snapshot-files).
The HTML files are created by passing the generated (or updated) Markdown to the backend API
for rendering to HTML.
```mermaid
graph LR
subgraph script:
A{update-specification.rb}
A --> B{Backend Markdown API}
S{update-specification.rb}
end
subgraph input:<br/>input specification files
C[ghfm_spec_v_0.29.md] --> A
D[glfm_intro.md] --> A
E[glfm_official_specification.md] --> A
F[glfm_internal_extensions.md] --> A
subgraph input - markdown files
I1[glfm_official_specification.md - GLFM official specification examples] --> S
end
subgraph output:<br/>GLFM specification files
A --> G[spec.txt]
G --> B
B --> H[spec.html]
subgraph output - specification files
S --> O1[spec.txt - GLFM official specification examples]
S --> O2[spec.html - GLFM official specification examples]
end
```
```mermaid
graph LR
subgraph script:
S{update-specification.rb}
end
subgraph input - markdown files
I1[ghfm_spec_v_0.29.md - CommonMark and GHFM specification examples] --> S
I2[glfm_internal_extensions.md - GLFM internal extension examples] --> S
end
subgraph output - example snapshot files
S --> O1[snapshot_spec.md - CommonMark, GHFM, GLFM internal extension examples]
S --> O2[snapshot_spec.html - CommonMark, GHFM, GLFM internal extension examples]
end
```
@ -655,7 +671,8 @@ script, which expects canonical HTML, against the GitLab renderer implementation
`scripts/glfm/run-spec-tests.sh` is a convenience shell script which runs
conformance specs via the CommonMark standard `spec_tests.py` script,
which uses the `glfm_specification/output_spec/spec.txt` file and `scripts/glfm/canonicalize-html.rb`
which uses the `ghfm_spec_v_0.29.md` and `glfm_specification/output_spec/spec.txt` files
with the `scripts/glfm/canonicalize-html.rb`
helper script to test the GLFM renderer implementations' support for rendering Markdown
specification examples to canonical HTML.
@ -669,7 +686,8 @@ subgraph scripts:
end
end
subgraph input
D[spec.txt GLFM specification] --> C
D1[ghfm_spec_v_0.29.md GLFM specification] --> C
D2[spec.txt GLFM specification] --> C
E((GLFM static<br/>renderer implementation)) --> B
F((GLFM WYSIWYG<br/>renderer implementation)) --> B
end
@ -680,22 +698,28 @@ end
#### `update-example-snapshots.rb` script
The `scripts/glfm/update-example-snapshots.rb` script uses the GLFM
`glfm_specification/output_spec/spec.txt` specification file and the
`glfm_specification/input/gitlab_flavored_markdown/glfm_example_status.yml`
file to create and update the [example snapshot](#output-example-snapshot-files)
YAML files:
The `scripts/glfm/update-example-snapshots.rb` script creates and updates the
[example snapshot](#output-example-snapshot-files) YAML files.
Its inputs are:
- The `glfm_specification/output_spec/snapshot_spec.md` file, which contains the
superset of all CommonMark, GFM, and GLFM official and internal examples.
- The `glfm_specification/input/gitlab_flavored_markdown/glfm_example_*.yml` YAML files, which
contain metadata to control how to generate the example snapshot files.
```mermaid
graph LR
subgraph script:
A{update-example-snapshots.rb}
end
subgraph input:<br/>input specification file
B[spec.txt] --> A
C[glfm_example_status.yml] --> A
subgraph input: markdown input specification files
B1[snapshot_spec.md] --> A
C1[glfm_example_status.yml] --> A
C2[glfm_example_normalizations.yml] --> A
C3[glfm_example_metadata.yml] --> A
end
subgraph output:<br/>example snapshot files
subgraph output: YAML example snapshot files
A --> E[examples_index.yml]
A --> F[markdown.yml]
A --> G[html.yml]
@ -736,15 +760,15 @@ end
subgraph script:
A{run-snapshopt-tests.sh} -->|invokes| B
end
subgraph output:<br/>test results/output
B --> H[rspec+jest output]
end
subgraph input:<br/>YAML
C[examples_index.yml] --> B
D[markdown.yml] --> B
E[html.yml] --> B
F[prosemirror_json.yml] --> B
end
subgraph output:<br/>test results/output
B --> H[rspec+jest output]
end
```
#### `verify-all-generated-files-are-up-to-date.rb` script
@ -1111,8 +1135,12 @@ move or copy a hosted version of the rendered HTML `spec.html` version to anothe
is a Markdown specification file, in the standard format
with prose and Markdown + canonical HTML examples.
In the GLFM specification, `spex.txt` only contains the official specifiaction examples from
[`glfm_official_specification.md`](#glfm_official_specificationmd). It does not contain
the internal extension examples from [`glfm_internal_extensions.md`](#glfm_internal_extensionsmd).
It also serves as input for other scripts such as
`run-spec-tests.sh`.
[`run-spec-tests.sh`](#run-spec-testssh-script).
It is generated or updated by the `update-specification.rb` script, using the
[input specification files](#input-specification-files) as input.

View File

@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Sidekiq guides
We use [Sidekiq](https://github.com/mperham/sidekiq) as our background
job processor. These guides are for writing jobs that will work well on
job processor. These guides are for writing jobs that works well on
GitLab.com and be consistent with our existing worker classes. For
information on administering GitLab, see [configuring Sidekiq](../../administration/sidekiq/index.md).
@ -74,7 +74,7 @@ A lower retry count may be applicable if any of the below apply:
1. The worker is not idempotent and running it multiple times could
leave the system in an inconsistent state. For example, a worker that
posts a system note and then performs an action: if the second step
fails and the worker retries, the system note will be posted again.
fails and the worker retries, the system note is posted again.
1. The worker is a cronjob that runs frequently. For example, if a cron
job runs every hour, then we don't need to retry beyond an hour
because we don't need two of the same job running at once.
@ -156,7 +156,7 @@ queues in a namespace (technically: all queues prefixed with the namespace name)
when a namespace is provided instead of a simple queue name in the `--queue`
(`-q`) option, or in the `:queues:` section in `config/sidekiq_queues.yml`.
Note that adding a worker to an existing namespace should be done with care, as
Adding a worker to an existing namespace should be done with care, as
the extra jobs take resources away from jobs from workers that were already
there, if the resources available to the Sidekiq process handling the namespace
are not adjusted appropriately.
@ -195,9 +195,9 @@ can read the number or type of provided arguments.
GitLab stores Sidekiq jobs and their arguments in Redis. To avoid
excessive memory usage, we compress the arguments of Sidekiq jobs
if their original size is bigger than 100KB.
if their original size is bigger than 100 KB.
After compression, if their size still exceeds 5MB, it raises an
After compression, if their size still exceeds 5 MB, it raises an
[`ExceedLimitError`](https://gitlab.com/gitlab-org/gitlab/-/blob/f3dd89e5e510ea04b43ffdcb58587d8f78a8d77c/lib/gitlab/sidekiq_middleware/size_limiter/exceed_limit_error.rb#L8)
error when scheduling the job.
@ -227,6 +227,6 @@ tests should be placed in `spec/workers`.
## Interacting with Sidekiq Redis and APIs
The application should minimise interaction with of any `Sidekiq.redis` and Sidekiq [APIs](https://github.com/mperham/sidekiq/blob/main/lib/sidekiq/api.rb). Such interactions in generic application logic should be abstracted to a [Sidekiq middleware](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/sidekiq_middleware) for re-use across teams. By decoupling application logic from Sidekiq's datastore, it allows for greater freedom when horizontally scaling the GitLab background processing setup.
The application should minimise interaction with of any `Sidekiq.redis` and Sidekiq [APIs](https://github.com/mperham/sidekiq/blob/main/lib/sidekiq/api.rb). Such interactions in generic application logic should be abstracted to a [Sidekiq middleware](https://gitlab.com/gitlab-org/gitlab/-/tree/master/lib/gitlab/sidekiq_middleware) for re-use across teams. By decoupling application logic from Sidekiq datastore, it allows for greater freedom when horizontally scaling the GitLab background processing setup.
Some exceptions to this rule would be migration-related logic or administration operations.

View File

@ -107,6 +107,23 @@ triggering the job.
The job token is secured by its short life-time and limited scope. It could possibly be leaked if multiple jobs run on the same machine ([like with the shell runner](https://docs.gitlab.com/runner/security/#usage-of-shell-executor)). On Docker Machine runners, configuring [`MaxBuilds=1`](https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runnersmachine-section) is recommended to make sure runner machines only ever run one build and are destroyed afterwards. This may impact performance, as provisioning machines takes some time.
## Other tokens
### Feed token
Each user has a long-lived feed token that does not expire. This token allows authentication for:
- RSS readers to load a personalized RSS feed.
- Calendar applications to load a personalized calendar.
This token is visible in those feed URLs. You cannot use this token to access any other data.
Anyone who has your token can read activity and issue RSS feeds or your calendar feed as if they were you, including confidential issues. If that happens, [reset the token](../user/profile/contributions_calendar.md#reset-the-user-activity-feed-token).
### Incoming email token
Each user has a long-lived incoming email token that does not expire. This token allows a user to [create a new issue by email](../user/project/issues/create_issues.md#by-sending-an-email), and is included in that user's personal project-specific email addresses. You cannot use this token to access any other data. Anyone who has your token can create issues and merge requests as if they were you. If that happens, reset the token.
## Available scopes
This table shows available scopes per token. Scopes can be limited further on token creation.

View File

@ -636,7 +636,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
sudo SKIP_POST_DEPLOYMENT_MIGRATIONS=true gitlab-rake db:migrate
```
1. If this deploy node is normally used to serve requests or process jobs,
1. If this deploy node is used to serve requests or process jobs,
then you may return it to service at this point.
- To serve requests, add the deploy node to the load balancer.
@ -706,7 +706,7 @@ sudo touch /etc/gitlab/skip-auto-reconfigure
sudo SKIP_POST_DEPLOYMENT_MIGRATIONS=true gitlab-rake db:migrate:geo
```
1. If this deploy node is normally used to serve requests or perform
1. If this deploy node is used to serve requests or perform
background processing, then you may return it to service at this point.
- To serve requests, add the deploy node to the load balancer.

View File

@ -121,34 +121,39 @@ It can also help to compare the XML response from your provider with our [exampl
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/9152) in GitLab 13.11 with enforcing open SSO session to use Git if this setting is switched on.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/339888) in GitLab 14.7 to not enforce SSO checks for Git activity originating from CI/CD jobs.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/215155) in GitLab 15.5 [with a flag](../../../administration/feature_flags.md) named `transparent_sso_enforcement` to include transparent enforcement even when SSO enforcement is not enabled. Disabled on GitLab.com.
> - [Improved](https://gitlab.com/gitlab-org/gitlab/-/issues/375788) in GitLab 15.8 by enabling transparent SSO by default on GitLab.com.
FLAG:
On self-managed GitLab, transparent SSO enforcement is unavailable. On GitLab.com, see the [Transparent SSO rollout](https://gitlab.com/gitlab-org/gitlab/-/issues/375788) issue for the current status.
On self-managed GitLab, transparent SSO enforcement is unavailable. An
[issue exists](https://gitlab.com/gitlab-org/gitlab/-/issues/382917) to add
transparent SSO enforcement to self-managed GitLab.
On GitLab.com, transparent SSO enforcement is available by default. To turn off
transparent SSO, ask a support or production team to enable the
`transparent_sso_enforcement_override` feature flag for a specific customer
group.
SSO is enforced when users access groups and projects in the organization's group hierarchy. Users can view other groups and projects without SSO sign in.
#### Transparent SSO enforcement
SSO is enforced for each user with an existing SAML identity when the following is enabled:
By default, transparent SSO enforcement is enabled in GitLab.com. This means SSO is enforced:
- SAML SSO.
- The `:transparent_sso_enforcement` feature flag.
- When users access groups and projects in the organization's
group hierarchy. Users can view other groups and projects without SSO sign in.
- For each user with an existing SAML identity.
When transparent SSO enforcement is enabled, users:
- Are not prompted to sign in through SSO on each visit. GitLab checks
whether a user has authenticated through SSO. If the user last signed in more
than 24 hours ago, GitLab prompts the user to sign in again through SSO.
- Without SAML identities are not required to use SSO unless **Enforce
SSO-only authentication for web activity for this group** is enabled.
A user has a SAML identity if one or both of the following are true:
- They have signed in to GitLab by using their GitLab group's single sign-on URL.
- They were provisioned by SCIM.
Users without SAML identities are not required to use SSO unless explicit enforcement is enabled.
When the **Enforce SSO-only authentication for web activity for this group** option is enabled, all users must access GitLab by using their GitLab group's single sign-on URL to access group resources,
regardless of whether they have an existing SAML identity.
Users also cannot be added as new members manually.
Users with the Owner role can use the standard sign in process to make necessary changes to top-level group settings.
However, users are not prompted to sign in through SSO on each visit. GitLab checks whether a user
has authenticated through SSO. If it's been more than 1 day since the last sign-in, GitLab
prompts the user to sign in again through SSO.
When the transparent SSO enforcement feature flag is enabled, SSO is enforced as follows:
With transparent SSO enabled, SSO is enforced as follows:
| Project/Group visibility | Enforce SSO setting | Member with identity | Member without identity | Non-member or not signed in |
|--------------------------|---------------------|--------------------| ------ |------------------------------|
@ -157,36 +162,45 @@ When the transparent SSO enforcement feature flag is enabled, SSO is enforced as
| Public | Off | Enforced | Not enforced | Not enforced |
| Public | On | Enforced | Enforced | Not enforced |
An [issue exists](https://gitlab.com/gitlab-org/gitlab/-/issues/297389) to add a similar SSO requirement for API and GitLab Pages activities.
An [issue exists](https://gitlab.com/gitlab-org/gitlab/-/issues/297389) to add a similar SSO requirement for API activity.
SSO enforcement has the following effects when enabled:
#### SSO-only for web activity enforcement
- For groups, users can't share a project in the group outside the top-level group,
even if the project is forked.
- For Git activity over SSH and HTTPS, users must have at least one active session signed-in through SSO before they can push to or
When the **Enforce SSO-only authentication for web activity for this group** option is enabled:
- All users must access GitLab by using their GitLab group's single sign-on URL
to access group resources, regardless of whether they have an existing SAML
identity.
- SSO is enforced when users access groups and projects in the organization's
group hierarchy. Users can view other groups and projects without SSO sign in.
- Users cannot be added as new members manually.
- Users with the Owner role can use the standard sign in process to make
necessary changes to top-level group settings.
SSO enforcement for web activity has the following effects when enabled:
- For groups, users cannot share a project in the group outside the top-level
group, even if the project is forked.
- For Git activity over SSH and HTTPS, users must have at least one active
session signed-in through SSO before they can push to or
pull from a GitLab repository.
- Git activity originating from CI/CD jobs do not have the SSO check enforced.
- Credentials that are not tied to regular users (for example, project and group access tokens, and deploy keys) do not have the SSO check enforced.
- Users must be signed-in through SSO before they can pull images using the [Dependency Proxy](../../packages/dependency_proxy/index.md).
- When the **Enforce SSO-only authentication for Git and Dependency Proxy activity for this group** option is enabled, any API endpoint that involves Git activity is under SSO
enforcement. For example, creating or deleting a branch, commit, or tag.
- Credentials that are not tied to regular users (for example, project and group
access tokens, and deploy keys) do not have the SSO check enforced.
- Users must be signed-in through SSO before they can pull images using the
[Dependency Proxy](../../packages/dependency_proxy/index.md).
- When the **Enforce SSO-only authentication for Git and Dependency Proxy
activity for this group** option is enabled, any API endpoint that involves
Git activity is under SSO enforcement. For example, creating or deleting a
branch, commit, or tag.
When SSO is enforced, users are not immediately revoked. If the user:
When SSO for web activity is enforced, non-SSO group members do not lose access
immediately. If the user:
- Is signed out, they cannot access the group after being removed from the identity provider.
- Has an active session, they can continue accessing the group for up to 24 hours until the identity
provider session times out.
### Selectively enable and disable transparent SSO enforcement
There are two feature flags associated with this feature to allow precise control. If a customer has a problem with transparent SSO on GitLab.com, GitLab can help troubleshoot and override the feature flag as necessary.
**`transparent_sso_enforcement`:** This feature flag should only be enabled or disabled by the Authentication and Authorization group
or in the case of a serious and widespread issue affecting many groups or users. See [issue 375788](https://gitlab.com/gitlab-org/gitlab/-/issues/375788) for the current GitLab.com rollout status.
**`transparent_sso_enforcement_override`:** When the `transparent_sso_enforcement` feature flag is enabled, support or production teams can
turn off transparent SSO by enabling this feature flag for a specific customer group. **Enabling** this feature flag
disables transparent SSO enforcement.
- Has an active session, they can continue accessing the group for up to 24
hours until the identity provider session times out.
- Is signed out, they cannot access the group after being removed from the
identity provider.
## Providers

View File

@ -66,8 +66,8 @@ Depending on your role, to manage your transfer usage you can [reduce Container
## Project storage limit
Projects on GitLab SaaS have a 10GB storage limit on their Git repository and LFS storage.
After namespace-level storage limits are applied, the project limit will be removed. A namespace has either a namespace-level storage limit or a project-level storage limit, but not both.
Projects on GitLab SaaS have a 10 GB storage limit on their Git repository and LFS storage.
After namespace-level storage limits are applied, the project limit is removed. A namespace has either a namespace-level storage limit or a project-level storage limit, but not both.
When a project's repository and LFS reaches the quota, the project is locked.
You cannot push changes to a locked project. To monitor the size of each
@ -122,7 +122,7 @@ available decreases. All projects remain unlocked because 40 GB purchased storag
## Namespace storage limit
Namespaces on GitLab SaaS have a storage limit. For more information, see our [pricing page](https://about.gitlab.com/pricing/).
This limit is not visible on the **Usage quotas** page, but will be prior to the limit being [applied](#namespace-storage-limit-application-schedule). Self-managed deployments are not affected.
This limit is not visible on the **Usage quotas** page, but is prior to the limit being [applied](#namespace-storage-limit-application-schedule). Self-managed deployments are not affected.
Storage types that add to the total namespace storage are:
@ -142,10 +142,10 @@ To prevent exceeding the namespace storage quota, you can:
- Reduce storage consumption by following the suggestions in the [Manage Your Storage Usage](#manage-your-storage-usage) section of this page.
- Apply for [GitLab for Education](https://about.gitlab.com/solutions/education/join/), [GitLab for Open Source](https://about.gitlab.com/solutions/open-source/join/), or [GitLab for Startups](https://about.gitlab.com/solutions/startups/) if you meet the eligibility requirements.
- Consider using a [self-managed instance](../subscriptions/self_managed/index.md) of GitLab which does not have these limits on the free tier.
- [Purchase additional storage](../subscriptions/gitlab_com/index.md#purchase-more-storage-and-transfer) units at $60/year for 10GB of storage.
- [Purchase additional storage](../subscriptions/gitlab_com/index.md#purchase-more-storage-and-transfer) units at $60/year for 10 GB of storage.
- [Start a trial](https://about.gitlab.com/free-trial/) or [upgrade to GitLab Premium or Ultimate](https://about.gitlab.com/pricing/) which include higher limits and features that enable growing teams to ship faster without sacrificing on quality.
- [Talk to an expert](https://page.gitlab.com/usage_limits_help.html) to learn more about your options and ask questions.
### Namespace storage limit application schedule
Information on when namespace-level storage limits will be applied is available on these FAQ pages for the [Free](https://about.gitlab.com/pricing/faq-efficient-free-tier/#storage-limits-on-gitlab-saas-free-tier) and [Paid](https://about.gitlab.com/pricing/faq-paid-storage-transfer/) tier.
Information on when namespace-level storage limits are applied is available on these FAQ pages for the [Free](https://about.gitlab.com/pricing/faq-efficient-free-tier/#storage-limits-on-gitlab-saas-free-tier) and [Paid](https://about.gitlab.com/pricing/faq-paid-storage-transfer/) tier.

View File

@ -191,8 +191,8 @@ module API
optional :group_runner_token_expiration_interval, type: Integer, desc: 'Token expiration interval for group runners, in seconds'
optional :project_runner_token_expiration_interval, type: Integer, desc: 'Token expiration interval for project runners, in seconds'
optional :pipeline_limit_per_project_user_sha, type: Integer, desc: "Maximum number of pipeline creation requests allowed per minute per user and commit. Set to 0 for unlimited requests per minute."
optional :jira_connect_application_key, type: String, desc: "Application ID of the OAuth application that should be used to authenticate with the GitLab.com for Jira Cloud app"
optional :jira_connect_proxy_url, type: String, desc: "URL of the GitLab instance that should be used as a proxy for the GitLab.com for Jira Cloud app"
optional :jira_connect_application_key, type: String, desc: "Application ID of the OAuth application that should be used to authenticate with the GitLab for Jira Cloud app"
optional :jira_connect_proxy_url, type: String, desc: "URL of the GitLab instance that should be used as a proxy for the GitLab for Jira Cloud app"
optional :bulk_import_enabled, type: Boolean, desc: 'Enable migrating GitLab groups and projects by direct transfer'
optional :allow_runner_registration_token, type: Boolean, desc: 'Allow registering runners using a registration token'

View File

@ -56,7 +56,7 @@
"@cubejs-client/vue": "^0.31.19",
"@gitlab/at.js": "1.5.7",
"@gitlab/favicon-overlay": "2.0.0",
"@gitlab/fonts": "^1.1.2",
"@gitlab/fonts": "^1.2.0",
"@gitlab/svgs": "3.18.0",
"@gitlab/ui": "54.1.1",
"@gitlab/visual-review-tools": "1.7.3",

View File

@ -98,7 +98,7 @@ function rspec_args() {
local rspec_opts="${1}"
local junit_report_file="${2:-${JUNIT_RESULT_FILE}}"
echo "-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out ${junit_report_file} ${rspec_opts}"
echo "-Ispec -rspec_helper --color --failure-exit-code 1 --error-exit-code 2 --format documentation --format RspecJunitFormatter --out ${junit_report_file} ${rspec_opts}"
}
function rspec_simple_job() {
@ -110,10 +110,18 @@ function rspec_simple_job() {
eval "${rspec_cmd}"
}
function rspec_simple_job_with_retry () {
local rspec_run_status=0
rspec_simple_job "${1}" "${2}" || rspec_run_status=$?
handle_retry_rspec_in_new_process $rspec_run_status
}
function rspec_db_library_code() {
local db_files="spec/lib/gitlab/database/"
rspec_simple_job "-- ${db_files}"
rspec_simple_job_with_retry "-- ${db_files}"
}
function debug_rspec_variables() {
@ -131,11 +139,30 @@ function debug_rspec_variables() {
echoinfo "FLAKY_RSPEC_REPORT_PATH: ${FLAKY_RSPEC_REPORT_PATH}"
echoinfo "NEW_FLAKY_RSPEC_REPORT_PATH: ${NEW_FLAKY_RSPEC_REPORT_PATH}"
echoinfo "SKIPPED_FLAKY_TESTS_REPORT_PATH: ${SKIPPED_FLAKY_TESTS_REPORT_PATH}"
echoinfo "RETRIED_TESTS_REPORT_PATH: ${RETRIED_TESTS_REPORT_PATH}"
echoinfo "CRYSTALBALL: ${CRYSTALBALL}"
}
function handle_retry_rspec_in_new_process() {
local rspec_run_status="${1}"
if [[ $rspec_run_status -eq 2 ]]; then
echoerr "Not retrying failing examples since there were errors happening outside of the RSpec examples!"
elif [[ $rspec_run_status -eq 1 ]]; then
# Experiment to retry failed examples in a new RSpec process: https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148
if [[ "${RETRY_FAILED_TESTS_IN_NEW_PROCESS}" == "true" ]]; then
retry_failed_rspec_examples
rspec_run_status=$?
else
echoerr "Not retrying failing examples since \$RETRY_FAILED_TESTS_IN_NEW_PROCESS != 'true'!"
fi
else
echosuccess "No examples to retry, congrats!"
fi
exit $rspec_run_status
}
function rspec_paralellized_job() {
read -ra job_name <<< "${CI_JOB_NAME}"
local test_tool="${job_name[0]}"
@ -179,7 +206,6 @@ function rspec_paralellized_job() {
export FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}all_${report_name}_report.json"
export NEW_FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}new_${report_name}_report.json"
export SKIPPED_FLAKY_TESTS_REPORT_PATH="${rspec_flaky_folder_path}skipped_flaky_tests_${report_name}_report.txt"
export RETRIED_TESTS_REPORT_PATH="${rspec_flaky_folder_path}retried_tests_${report_name}_report.txt"
if [[ -d "ee/" ]]; then
export KNAPSACK_GENERATE_REPORT="true"
@ -204,17 +230,7 @@ function rspec_paralellized_job() {
echoinfo "RSpec exited with ${rspec_run_status}."
# Experiment to retry failed examples in a new RSpec process: https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148
if [[ $rspec_run_status -ne 0 ]]; then
if [[ "${RETRY_FAILED_TESTS_IN_NEW_PROCESS}" == "true" ]]; then
retry_failed_rspec_examples
rspec_run_status=$?
fi
else
echosuccess "No examples to retry, congrats!"
fi
exit $rspec_run_status
handle_retry_rspec_in_new_process $rspec_run_status
}
function retry_failed_rspec_examples() {
@ -228,6 +244,12 @@ function retry_failed_rspec_examples() {
# Keep track of the tests that are retried, later consolidated in a single file by the `rspec:flaky-tests-report` job
local failed_examples=$(grep " failed" ${RSPEC_LAST_RUN_RESULTS_FILE})
local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24'
local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
export RETRIED_TESTS_REPORT_PATH="${rspec_flaky_folder_path}retried_tests_${report_name}_report.txt"
echoinfo "RETRIED_TESTS_REPORT_PATH: ${RETRIED_TESTS_REPORT_PATH}"
echo "${CI_JOB_URL}" > "${RETRIED_TESTS_REPORT_PATH}"
echo $failed_examples >> "${RETRIED_TESTS_REPORT_PATH}"
@ -241,8 +263,11 @@ function retry_failed_rspec_examples() {
# Disable simplecov so retried tests don't override test coverage report
export SIMPLECOV=0
local default_knapsack_pattern="{,ee/,jh/}spec/{,**/}*_spec.rb"
local knapsack_test_file_pattern="${KNAPSACK_TEST_FILE_PATTERN:-$default_knapsack_pattern}"
# Retry only the tests that failed on first try
rspec_simple_job "--only-failures --pattern \"${KNAPSACK_TEST_FILE_PATTERN}\"" "${JUNIT_RETRY_FILE}"
rspec_simple_job "--only-failures --pattern \"${knapsack_test_file_pattern}\"" "${JUNIT_RETRY_FILE}"
rspec_run_status=$?
# Merge the JUnit report from retry into the first-try report
@ -295,7 +320,7 @@ function rspec_rerun_previous_failed_tests() {
fi
if [[ -n $test_files ]]; then
rspec_simple_job "${test_files}"
rspec_simple_job_with_retry "${test_files}"
else
echo "No failed test files to rerun"
fi
@ -316,7 +341,7 @@ function rspec_fail_fast() {
fi
if [[ -n $test_files ]]; then
rspec_simple_job "${rspec_opts} ${test_files}"
rspec_simple_job_with_retry "${rspec_opts} ${test_files}"
else
echo "No rspec fail-fast tests to run"
fi
@ -351,7 +376,7 @@ function generate_frontend_fixtures_mapping() {
mkdir -p $(dirname "$FRONTEND_FIXTURES_MAPPING_PATH")
rspec_simple_job "--pattern \"${pattern}\""
rspec_simple_job_with_retry "--pattern \"${pattern}\""
}
function cleanup_individual_job_reports() {

View File

@ -33,9 +33,9 @@ RSpec.describe 'Database schema', feature_category: :database do
chat_names: %w[chat_id team_id user_id integration_id],
chat_teams: %w[team_id],
ci_build_needs: %w[partition_id],
ci_build_pending_states: %w[partition_id],
ci_build_pending_states: %w[partition_id build_id],
ci_build_report_results: %w[partition_id],
ci_build_trace_chunks: %w[partition_id],
ci_build_trace_chunks: %w[partition_id build_id],
ci_build_trace_metadata: %w[partition_id],
ci_builds: %w[erased_by_id trigger_request_id partition_id],
ci_builds_runner_session: %w[partition_id build_id],
@ -52,7 +52,7 @@ RSpec.describe 'Database schema', feature_category: :database do
ci_sources_pipelines: %w[partition_id source_partition_id],
ci_stages: %w[partition_id],
ci_trigger_requests: %w[commit_id],
ci_unit_test_failures: %w[partition_id],
ci_unit_test_failures: %w[partition_id build_id],
cluster_providers_aws: %w[security_group_id vpc_id access_key_id],
cluster_providers_gcp: %w[gcp_project_id operation_id],
compliance_management_frameworks: %w[group_id],

View File

@ -206,7 +206,7 @@ describe('Api', () => {
expires_at: undefined,
};
mock.onPost(expectedUrl).reply(200, {
mock.onPost(expectedUrl).reply(HTTP_STATUS_OK, {
status: 'success',
});
@ -478,7 +478,7 @@ describe('Api', () => {
jest.spyOn(axios, 'post');
mock.onPost(expectedUrl).reply(200, {
mock.onPost(expectedUrl).reply(HTTP_STATUS_OK, {
status: 'success',
});
@ -494,7 +494,7 @@ describe('Api', () => {
const projectId = 1;
const options = { state: 'active' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/milestones`;
mock.onGet(expectedUrl).reply(200, [
mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
id: 1,
title: 'milestone1',
@ -514,7 +514,7 @@ describe('Api', () => {
const projectId = 1;
const issueIid = 11;
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/1/issues/11/todo`;
mock.onPost(expectedUrl).reply(200, {
mock.onPost(expectedUrl).reply(HTTP_STATUS_OK, {
id: 112,
project: {
id: 1,
@ -541,7 +541,7 @@ describe('Api', () => {
expires_at: undefined,
};
mock.onPost(expectedUrl).reply(200, {
mock.onPost(expectedUrl).reply(HTTP_STATUS_OK, {
status: 'success',
});
@ -644,7 +644,7 @@ describe('Api', () => {
jest.spyOn(axios, 'post');
mock.onPost(expectedUrl).reply(200, {
mock.onPost(expectedUrl).reply(HTTP_STATUS_OK, {
status: 'success',
});
@ -958,7 +958,7 @@ describe('Api', () => {
jest.spyOn(axios, 'post');
mock.onPost(expectedUrl).replyOnce(200, [
mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, [
{
id: 'abcdefghijklmnop',
short_id: 'abcdefg',
@ -984,7 +984,9 @@ describe('Api', () => {
mock
.onGet(expectedUrl)
.replyOnce(200, [{ id: 'abcdef', short_id: 'abcdefghi', title: 'Dummy commit title' }]);
.replyOnce(HTTP_STATUS_OK, [
{ id: 'abcdef', short_id: 'abcdefghi', title: 'Dummy commit title' },
]);
return Api.allContextCommits(projectPath, mergeRequestId).then(({ data }) => {
expect(data[0].title).toBe('Dummy commit title');

View File

@ -104,9 +104,11 @@ describe('JiraConnect API', () => {
response = await makeRequest();
expect(axiosInstance.get).toHaveBeenCalledWith(mockGroupsPath, {
headers: {},
params: {
page: mockPage,
per_page: mockPerPage,
search: undefined,
},
});
expect(response.data).toEqual(mockResponse);

View File

@ -27,6 +27,7 @@ jest.mock('~/jira_connect/subscriptions/api', () => {
});
const mockGroupsPath = '/groups';
const mockAccessToken = '123';
describe('GroupsList', () => {
let wrapper;
@ -39,6 +40,9 @@ describe('GroupsList', () => {
provide: {
groupsPath: mockGroupsPath,
},
computed: {
accessToken: () => mockAccessToken,
},
...options,
}),
);
@ -148,11 +152,15 @@ describe('GroupsList', () => {
});
it('calls `fetchGroups` with search term', () => {
expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
page: 1,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: mockSearchTeam,
});
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
page: 1,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: mockSearchTeam,
},
mockAccessToken,
);
});
it('disables GroupListItems', () => {
@ -222,11 +230,15 @@ describe('GroupsList', () => {
findSearchBox().vm.$emit('input', newSearch);
if (shouldSearch) {
expect(fetchGroups).toHaveBeenCalledWith(mockGroupsPath, {
page: 1,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: expectedSearchValue,
});
expect(fetchGroups).toHaveBeenCalledWith(
mockGroupsPath,
{
page: 1,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: expectedSearchValue,
},
mockAccessToken,
);
} else {
expect(fetchGroups).not.toHaveBeenCalled();
}
@ -257,11 +269,15 @@ describe('GroupsList', () => {
});
it('should load results for page 2', () => {
expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
page: 2,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: '',
});
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
page: 2,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: '',
},
mockAccessToken,
);
});
it.each`
@ -274,11 +290,15 @@ describe('GroupsList', () => {
const searchBox = findSearchBox();
searchBox.vm.$emit('input', searchTerm);
expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
page: expectedPage,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: expectedSearchTerm,
});
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
page: expectedPage,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: expectedSearchTerm,
},
mockAccessToken,
);
},
);
});
@ -324,11 +344,15 @@ describe('GroupsList', () => {
const paginationEl = findPagination();
paginationEl.vm.$emit('input', 2);
expect(fetchGroups).toHaveBeenLastCalledWith(mockGroupsPath, {
page: 2,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: '',
});
expect(fetchGroups).toHaveBeenLastCalledWith(
mockGroupsPath,
{
page: 2,
perPage: DEFAULT_GROUPS_PER_PAGE,
search: '',
},
mockAccessToken,
);
});
});
});

View File

@ -18,12 +18,14 @@ describe('generateRefDestinationPath', () => {
${`${projectRootPath}/-/blob/${currentRef}/dir1/dir2/test.js#L123`} | ${`${projectRootPath}/-/blob/${selectedRef}/dir1/dir2/test.js#L123`}
`('generates the correct destination path for $currentPath', ({ currentPath, result }) => {
setWindowLocation(currentPath);
expect(generateRefDestinationPath(projectRootPath, selectedRef)).toBe(result);
expect(generateRefDestinationPath(projectRootPath, currentRef, selectedRef)).toBe(result);
});
it('encodes the selected ref', () => {
const result = `${projectRootPath}/-/tree/${encodedRefWithSpecialCharMock}`;
expect(generateRefDestinationPath(projectRootPath, refWithSpecialCharMock)).toBe(result);
expect(generateRefDestinationPath(projectRootPath, currentRef, refWithSpecialCharMock)).toBe(
result,
);
});
});

View File

@ -0,0 +1,70 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Resolvers::Ci::VariablesResolver, feature_category: :pipeline_authoring do
include GraphqlHelpers
describe '#resolve' do
let_it_be(:user) { create(:user) }
let_it_be(:args) { {} }
let_it_be(:obj) { nil }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:ci_instance_variables) do
[
create(:ci_instance_variable, key: 'a'),
create(:ci_instance_variable, key: 'b')
]
end
let_it_be(:ci_group_variables) do
[
create(:ci_group_variable, group: group, key: 'a'),
create(:ci_group_variable, group: group, key: 'b')
]
end
let_it_be(:ci_project_variables) do
[
create(:ci_variable, project: project, key: 'a'),
create(:ci_variable, project: project, key: 'b')
]
end
subject(:resolve_variables) { resolve(described_class, obj: obj, ctx: { current_user: user }, args: args) }
context 'when parent object is nil' do
context 'when user is authorized', :enable_admin_mode do
let_it_be(:user) { create(:admin) }
it "returns the instance's variables" do
expect(resolve_variables.items.to_a).to match_array(ci_instance_variables)
end
end
context 'when user is not authorized' do
it "returns nil" do
expect(resolve_variables).to be_nil
end
end
end
context 'when parent object is a Group' do
let_it_be(:obj) { group }
it "returns the group's variables" do
expect(resolve_variables.items.to_a).to match_array(ci_group_variables)
end
end
context 'when parent object is a Project' do
let_it_be(:obj) { project }
it "returns the project's variables" do
expect(resolve_variables.items.to_a).to match_array(ci_project_variables)
end
end
end
end

View File

@ -0,0 +1,12 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Types::Ci::VariableSortEnum, feature_category: :pipeline_authoring do
it 'exposes the available order methods' do
expect(described_class.values).to match(
'KEY_ASC' => have_attributes(value: :key_asc),
'KEY_DESC' => have_attributes(value: :key_desc)
)
end
end

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::HasVariable do
RSpec.describe Ci::HasVariable, feature_category: :continuous_integration do
subject { build(:ci_variable) }
it { is_expected.to validate_presence_of(:key) }
@ -113,4 +113,36 @@ RSpec.describe Ci::HasVariable do
end
end
end
describe '.order_by' do
let_it_be(:relation) { Ci::Variable.all }
it 'supports ordering by key ascending' do
expect(relation).to receive(:reorder).with({ key: :asc })
relation.order_by('key_asc')
end
it 'supports ordering by key descending' do
expect(relation).to receive(:reorder).with({ key: :desc })
relation.order_by('key_desc')
end
context 'when order method is unknown' do
it 'does not call reorder' do
expect(relation).not_to receive(:reorder)
relation.order_by('unknown')
end
end
context 'when order method is nil' do
it 'does not call reorder' do
expect(relation).not_to receive(:reorder)
relation.order_by(nil)
end
end
end
end

View File

@ -72,4 +72,32 @@ RSpec.describe 'Query.group(fullPath).ciVariables', feature_category: :pipeline_
expect(graphql_data.dig('group', 'ciVariables')).to be_nil
end
end
describe 'sorting and pagination' do
let_it_be(:current_user) { user }
let_it_be(:data_path) { [:group, :ci_variables] }
let_it_be(:variables) do
[
create(:ci_group_variable, group: group, key: 'd'),
create(:ci_group_variable, group: group, key: 'a'),
create(:ci_group_variable, group: group, key: 'c'),
create(:ci_group_variable, group: group, key: 'e'),
create(:ci_group_variable, group: group, key: 'b')
]
end
def pagination_query(params)
graphql_query_for(
:group,
{ fullPath: group.full_path },
query_graphql_field('ciVariables', params, "#{page_info} nodes { id }")
)
end
before do
group.add_owner(current_user)
end
it_behaves_like 'sorted paginated variables'
end
end

View File

@ -69,4 +69,28 @@ RSpec.describe 'Query.ciVariables', feature_category: :pipeline_authoring do
expect(graphql_data.dig('ciVariables')).to be_nil
end
end
describe 'sorting and pagination' do
let_it_be(:current_user) { create(:admin) }
let_it_be(:data_path) { [:ci_variables] }
let_it_be(:variables) do
[
create(:ci_instance_variable, key: 'd'),
create(:ci_instance_variable, key: 'a'),
create(:ci_instance_variable, key: 'c'),
create(:ci_instance_variable, key: 'e'),
create(:ci_instance_variable, key: 'b')
]
end
def pagination_query(params)
graphql_query_for(
:ci_variables,
params,
"#{page_info} nodes { id }"
)
end
it_behaves_like 'sorted paginated variables'
end
end

View File

@ -66,4 +66,32 @@ RSpec.describe 'Query.project(fullPath).ciVariables', feature_category: :pipelin
expect(graphql_data.dig('project', 'ciVariables')).to be_nil
end
end
describe 'sorting and pagination' do
let_it_be(:current_user) { user }
let_it_be(:data_path) { [:project, :ci_variables] }
let_it_be(:variables) do
[
create(:ci_variable, project: project, key: 'd'),
create(:ci_variable, project: project, key: 'a'),
create(:ci_variable, project: project, key: 'c'),
create(:ci_variable, project: project, key: 'e'),
create(:ci_variable, project: project, key: 'b')
]
end
def pagination_query(params)
graphql_query_for(
:project,
{ fullPath: project.full_path },
query_graphql_field('ciVariables', params, "#{page_info} nodes { id }")
)
end
before do
project.add_maintainer(current_user)
end
it_behaves_like 'sorted paginated variables'
end
end

View File

@ -5,18 +5,88 @@ require 'spec_helper'
RSpec.describe Projects::ContainerRepository::DestroyService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:params) { {} }
subject { described_class.new(project, user) }
subject { described_class.new(project, user, params) }
before do
stub_container_registry_config(enabled: true)
end
context 'when user does not have access to registry' do
let!(:repository) { create(:container_repository, :root, project: project) }
shared_examples 'returning an error status with message' do |error_message|
it 'returns an error status' do
response = subject.execute(repository)
it 'does not delete a repository' do
expect { subject.execute(repository) }.not_to change { ContainerRepository.count }
expect(response).to include(status: :error, message: error_message)
end
end
shared_examples 'executing with permissions' do
let_it_be_with_refind(:repository) { create(:container_repository, :root, project: project) }
before do
stub_container_registry_tags(repository: :any, tags: %w[latest stable])
end
it 'deletes the repository' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success)
expect { subject.execute(repository) }.to change { ContainerRepository.count }.by(-1)
end
it 'sends disable_timeout = true as part of the params as default' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success, disable_timeout: true)
expect { subject.execute(repository) }.to change { ContainerRepository.count }.by(-1)
end
it 'sends disable_timeout = false as part of the params if it is set to false' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success, disable_timeout: false)
expect { subject.execute(repository, disable_timeout: false) }.to change { ContainerRepository.count }.by(-1)
end
context 'when deleting the tags fails' do
before do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :error)
allow(Gitlab::AppLogger).to receive(:error).and_call_original
end
it 'sets status as deleted_failed' do
subject.execute(repository)
expect(repository).to be_delete_failed
end
it 'logs the error' do
subject.execute(repository)
expect(Gitlab::AppLogger).to have_received(:error)
.with("Container repository with ID: #{repository.id} and path: #{repository.path} failed with message: error in deleting tags")
end
it_behaves_like 'returning an error status with message', 'Deletion failed for container repository'
end
context 'when destroying the repository fails' do
before do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success)
allow(repository).to receive(:destroy).and_return(false)
allow(repository.errors).to receive(:full_messages).and_return(['Error 1', 'Error 2'])
allow(Gitlab::AppLogger).to receive(:error).and_call_original
end
it 'sets status as deleted_failed' do
subject.execute(repository)
expect(repository).to be_delete_failed
end
it 'logs the error' do
subject.execute(repository)
expect(Gitlab::AppLogger).to have_received(:error)
.with("Container repository with ID: #{repository.id} and path: #{repository.path} failed with message: Error 1. Error 2")
end
it_behaves_like 'returning an error status with message', 'Deletion failed for container repository'
end
end
@ -25,66 +95,41 @@ RSpec.describe Projects::ContainerRepository::DestroyService do
project.add_developer(user)
end
context 'when root container repository exists' do
let!(:repository) { create(:container_repository, :root, project: project) }
it_behaves_like 'executing with permissions'
end
before do
stub_container_registry_tags(repository: :any, tags: %w[latest stable])
end
context 'when user does not have access to registry' do
let_it_be(:repository) { create(:container_repository, :root, project: project) }
it 'deletes the repository' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success)
expect { subject.execute(repository) }.to change { ContainerRepository.count }.by(-1)
end
it 'sends disable_timeout = true as part of the params as default' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success, disable_timeout: true)
expect { subject.execute(repository) }.to change { ContainerRepository.count }.by(-1)
end
it 'sends disable_timeout = false as part of the params if it is set to false' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success, disable_timeout: false)
expect { subject.execute(repository, disable_timeout: false) }.to change { ContainerRepository.count }.by(-1)
end
context 'when deleting the tags fails' do
it 'sets status as deleted_failed' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :error)
allow(Gitlab::AppLogger).to receive(:error).and_call_original
subject.execute(repository)
expect(repository).to be_delete_failed
expect(Gitlab::AppLogger).to have_received(:error)
.with("Container repository with ID: #{repository.id} and path: #{repository.path} failed with message: error in deleting tags")
end
end
context 'when destroying the repository fails' do
it 'sets status as deleted_failed' do
expect_cleanup_tags_service_with(container_repository: repository, return_status: :success)
allow(repository).to receive(:destroy).and_return(false)
allow(repository.errors).to receive(:full_messages).and_return(['Error 1', 'Error 2'])
allow(Gitlab::AppLogger).to receive(:error).and_call_original
subject.execute(repository)
expect(repository).to be_delete_failed
expect(Gitlab::AppLogger).to have_received(:error)
.with("Container repository with ID: #{repository.id} and path: #{repository.path} failed with message: Error 1. Error 2")
end
end
def expect_cleanup_tags_service_with(container_repository:, return_status:, disable_timeout: true)
delete_tags_service = instance_double(Projects::ContainerRepository::CleanupTagsService)
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new).with(
container_repository: container_repository,
params: described_class::CLEANUP_TAGS_SERVICE_PARAMS.merge('disable_timeout' => disable_timeout)
).and_return(delete_tags_service)
expect(delete_tags_service).to receive(:execute).and_return(status: return_status)
end
it 'does not delete a repository' do
expect { subject.execute(repository) }.not_to change { ContainerRepository.count }
end
it_behaves_like 'returning an error status with message', 'Unauthorized access'
end
context 'when called during project deletion' do
let(:user) { nil }
let(:params) { { skip_permission_check: true } }
it_behaves_like 'executing with permissions'
end
context 'when there is no user' do
let(:user) { nil }
let(:repository) { create(:container_repository, :root, project: project) }
it_behaves_like 'returning an error status with message', 'Unauthorized access'
end
def expect_cleanup_tags_service_with(container_repository:, return_status:, disable_timeout: true)
delete_tags_service = instance_double(Projects::ContainerRepository::CleanupTagsService)
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new).with(
container_repository: container_repository,
params: described_class::CLEANUP_TAGS_SERVICE_PARAMS.merge('disable_timeout' => disable_timeout)
).and_return(delete_tags_service)
expect(delete_tags_service).to receive(:execute).and_return(status: return_status)
end
end

View File

@ -343,18 +343,30 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
end
context 'when image repository deletion succeeds' do
it 'removes tags' do
expect_any_instance_of(Projects::ContainerRepository::CleanupTagsService)
.to receive(:execute).and_return({ status: :success })
it 'returns true' do
expect_next_instance_of(Projects::ContainerRepository::CleanupTagsService) do |instance|
expect(instance).to receive(:execute).and_return(status: :success)
end
destroy_project(project, user)
expect(destroy_project(project, user)).to be true
end
end
context 'when image repository deletion raises an error' do
it 'returns false' do
expect_next_instance_of(Projects::ContainerRepository::CleanupTagsService) do |service|
expect(service).to receive(:execute).and_raise(RuntimeError)
end
expect(destroy_project(project, user)).to be false
end
end
context 'when image repository deletion fails' do
it 'raises an exception' do
expect_any_instance_of(Projects::ContainerRepository::CleanupTagsService)
.to receive(:execute).and_raise(RuntimeError)
it 'returns false' do
expect_next_instance_of(Projects::ContainerRepository::DestroyService) do |service|
expect(service).to receive(:execute).and_return({ status: :error })
end
expect(destroy_project(project, user)).to be false
end
@ -381,8 +393,9 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when image repository tags deletion succeeds' do
it 'removes tags' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(true)
expect_next_instance_of(Projects::ContainerRepository::DestroyService) do |service|
expect(service).to receive(:execute).and_return({ status: :sucess })
end
destroy_project(project, user)
end
@ -390,13 +403,27 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
context 'when image repository tags deletion fails' do
it 'raises an exception' do
expect_any_instance_of(ContainerRepository)
.to receive(:delete_tags!).and_return(false)
expect_next_instance_of(Projects::ContainerRepository::DestroyService) do |service|
expect(service).to receive(:execute).and_return({ status: :error })
end
expect(destroy_project(project, user)).to be false
end
end
end
context 'when there are no tags for legacy root repository' do
before do
stub_container_registry_tags(repository: project.full_path,
tags: [])
end
it 'does not try to destroy the repository' do
expect(Projects::ContainerRepository::DestroyService).not_to receive(:new)
destroy_project(project, user)
end
end
end
context 'for a forked project with LFS objects' do

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
# Requires `current_user`, `data_path`, `variables`, and `pagination_query(params)` bindings
RSpec.shared_examples 'sorted paginated variables' do
subject(:expected_ordered_variables) { ordered_variables.map { |var| var.to_global_id.to_s } }
context 'when sorted by key ascending' do
let(:ordered_variables) { variables.sort_by(&:key) }
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :KEY_ASC }
let(:first_param) { 2 }
let(:all_records) { expected_ordered_variables }
end
end
context 'when sorted by key descending' do
let(:ordered_variables) { variables.sort_by(&:key).reverse }
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :KEY_DESC }
let(:first_param) { 2 }
let(:all_records) { expected_ordered_variables }
end
end
end

View File

@ -1126,10 +1126,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/favicon-overlay/-/favicon-overlay-2.0.0.tgz#2f32d0b6a4d5b8ac44e2927083d9ab478a78c984"
integrity sha512-GNcORxXJ98LVGzOT9dDYKfbheqH6lNgPDD72lyXRnQIH7CjgGyos8i17aSBPq1f4s3zF3PyedFiAR4YEZbva2Q==
"@gitlab/fonts@^1.1.2":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@gitlab/fonts/-/fonts-1.1.2.tgz#b557dcc9b5f266934c024d39c500ffb58aea0feb"
integrity sha512-//wtklHUWO6AMctAGnuZq4MRx0khaoSjVgVsKBlTrZT5+iW2X7P+uXrfgfdQ1QW7bepjMVKFUBUDjaXTFLtrSw==
"@gitlab/fonts@^1.2.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@gitlab/fonts/-/fonts-1.2.0.tgz#140bea5316e71d9b20d084fb10030db4e4ae73bd"
integrity sha512-9wn+xnMgzvs9EAwMU0kbkcX5IKLXDOVUf6oBOi0zrFpwtM0wY4pfXkswSMl1kfMyNn6nW2n35g7sbv/Uy/QezQ==
"@gitlab/stylelint-config@4.1.0":
version "4.1.0"