diff --git a/.rubocop_todo/gitlab/avoid_gitlab_instance_checks.yml b/.rubocop_todo/gitlab/avoid_gitlab_instance_checks.yml index 77a880115da..8a10208663e 100644 --- a/.rubocop_todo/gitlab/avoid_gitlab_instance_checks.yml +++ b/.rubocop_todo/gitlab/avoid_gitlab_instance_checks.yml @@ -135,7 +135,6 @@ Gitlab/AvoidGitlabInstanceChecks: - 'lib/gitlab/gon_helper.rb' - 'lib/gitlab/monitor/demo_projects.rb' - 'lib/gitlab/qa.rb' - - 'lib/gitlab/tracking/destinations/database_events_snowplow.rb' - 'lib/gitlab/tracking/standard_context.rb' - 'lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric.rb' - 'lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric.rb' diff --git a/.rubocop_todo/rspec/feature_category.yml b/.rubocop_todo/rspec/feature_category.yml index 163eeb207ad..20903904144 100644 --- a/.rubocop_todo/rspec/feature_category.yml +++ b/.rubocop_todo/rspec/feature_category.yml @@ -4357,7 +4357,6 @@ RSpec/FeatureCategory: - 'spec/models/concerns/counter_attribute_spec.rb' - 'spec/models/concerns/cron_schedulable_spec.rb' - 'spec/models/concerns/cross_database_modification_spec.rb' - - 'spec/models/concerns/database_event_tracking_spec.rb' - 'spec/models/concerns/database_reflection_spec.rb' - 'spec/models/concerns/delete_with_limit_spec.rb' - 'spec/models/concerns/deployment_platform_spec.rb' diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml index b1835f31c63..eda8e34b6f4 100644 --- a/.rubocop_todo/rspec/named_subject.yml +++ b/.rubocop_todo/rspec/named_subject.yml @@ -2425,7 +2425,6 @@ RSpec/NamedSubject: - 'spec/lib/gitlab/terraform_registry_token_spec.rb' - 'spec/lib/gitlab/throttle_spec.rb' - 'spec/lib/gitlab/time_tracking_formatter_spec.rb' - - 'spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb' - 'spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb' - 'spec/lib/gitlab/tracking/destinations/snowplow_spec.rb' - 'spec/lib/gitlab/tracking_spec.rb' diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml index 78a34c6cd57..98690775f3c 100644 --- a/.rubocop_todo/style/inline_disable_annotation.yml +++ b/.rubocop_todo/style/inline_disable_annotation.yml @@ -3052,7 +3052,6 @@ Style/InlineDisableAnnotation: - 'spec/models/ci/pipeline_spec.rb' - 'spec/models/ci/runner_manager_build_spec.rb' - 'spec/models/concerns/bulk_insertable_associations_spec.rb' - - 'spec/models/concerns/database_event_tracking_spec.rb' - 'spec/models/concerns/encrypted_user_password_spec.rb' - 'spec/models/concerns/legacy_bulk_insert_spec.rb' - 'spec/models/concerns/manual_inverse_association_spec.rb' diff --git a/app/assets/javascripts/ml/model_registry/components/candidate_list.vue b/app/assets/javascripts/ml/model_registry/components/candidate_list.vue index fc24a538293..fca4462d7d2 100644 --- a/app/assets/javascripts/ml/model_registry/components/candidate_list.vue +++ b/app/assets/javascripts/ml/model_registry/components/candidate_list.vue @@ -1,22 +1,17 @@ diff --git a/app/assets/javascripts/ml/model_registry/components/model_version_list.vue b/app/assets/javascripts/ml/model_registry/components/model_version_list.vue index 6b44cb2f613..5a649a9596a 100644 --- a/app/assets/javascripts/ml/model_registry/components/model_version_list.vue +++ b/app/assets/javascripts/ml/model_registry/components/model_version_list.vue @@ -1,23 +1,18 @@ diff --git a/app/assets/javascripts/ml/model_registry/components/searchable_list.vue b/app/assets/javascripts/ml/model_registry/components/searchable_list.vue new file mode 100644 index 00000000000..05062ae6fbf --- /dev/null +++ b/app/assets/javascripts/ml/model_registry/components/searchable_list.vue @@ -0,0 +1,79 @@ + + + diff --git a/app/models/analytics/cycle_analytics/stage.rb b/app/models/analytics/cycle_analytics/stage.rb index 6f152e7749e..4686dc3aedd 100644 --- a/app/models/analytics/cycle_analytics/stage.rb +++ b/app/models/analytics/cycle_analytics/stage.rb @@ -7,7 +7,6 @@ module Analytics self.table_name = :analytics_cycle_analytics_group_stages - include DatabaseEventTracking include Analytics::CycleAnalytics::Stageable include Analytics::CycleAnalytics::Parentable @@ -38,22 +37,6 @@ module Analytics .select("DISTINCT ON(stage_event_hash_id) #{quoted_table_name}.*") end - SNOWPLOW_ATTRIBUTES = %i[ - id - created_at - updated_at - relative_position - start_event_identifier - end_event_identifier - group_id - start_event_label_id - end_event_label_id - hidden - custom - name - group_value_stream_id - ].freeze - private def max_stages_count diff --git a/app/models/concerns/database_event_tracking.rb b/app/models/concerns/database_event_tracking.rb deleted file mode 100644 index 7e2f445189e..00000000000 --- a/app/models/concerns/database_event_tracking.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module DatabaseEventTracking - extend ActiveSupport::Concern - - included do - after_create_commit :publish_database_create_event - after_destroy_commit :publish_database_destroy_event - after_update_commit :publish_database_update_event - end - - def publish_database_create_event - publish_database_event('create') - end - - def publish_database_destroy_event - publish_database_event('destroy') - end - - def publish_database_update_event - publish_database_event('update') - end - - def publish_database_event(name) - # Gitlab::Tracking#event is triggering Snowplow event - # Snowplow events are sent with usage of - # https://snowplow.github.io/snowplow-ruby-tracker/SnowplowTracker/AsyncEmitter.html - # that reports data asynchronously and does not impact performance nor carries a risk of - # rollback in case of error - - Gitlab::Tracking.database_event( - self.class.to_s, - "database_event_#{name}", - label: self.class.table_name, - project: try(:project), - namespace: (try(:group) || try(:namespace)) || try(:project)&.namespace, - property: name, - **filtered_record_attributes - ) - rescue StandardError => err - # this rescue should be a dead code due to utilization of AsyncEmitter, however - # since this concern is expected to be included in every model, it is better to - # prevent against any unexpected outcome - Gitlab::ErrorTracking.track_and_raise_for_dev_exception(err) - end - - def filtered_record_attributes - attributes - .with_indifferent_access - .slice(*self.class::SNOWPLOW_ATTRIBUTES) - end -end diff --git a/app/models/integrations/external_wiki.rb b/app/models/integrations/external_wiki.rb index 7408f86d231..e5360e58426 100644 --- a/app/models/integrations/external_wiki.rb +++ b/app/models/integrations/external_wiki.rb @@ -7,6 +7,7 @@ module Integrations field :external_wiki_url, section: SECTION_TYPE_CONNECTION, title: -> { s_('ExternalWikiService|External wiki URL') }, + description: -> { s_('ExternalWikiService|URL of the external wiki.') }, placeholder: -> { s_('ExternalWikiService|https://example.com/xxx/wiki/...') }, help: -> { s_('ExternalWikiService|Enter the URL to the external wiki.') }, required: true diff --git a/app/models/merge_request/metrics.rb b/app/models/merge_request/metrics.rb index 3c592c0008f..6d6c0ee07af 100644 --- a/app/models/merge_request/metrics.rb +++ b/app/models/merge_request/metrics.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true class MergeRequest::Metrics < ApplicationRecord - include DatabaseEventTracking - belongs_to :merge_request, inverse_of: :metrics belongs_to :pipeline, class_name: 'Ci::Pipeline', foreign_key: :pipeline_id belongs_to :latest_closed_by, class_name: 'User' @@ -33,8 +31,7 @@ class MergeRequest::Metrics < ApplicationRecord RETURNING id, #{inserted_columns.join(', ')} SQL - result = connection.execute(sql).first - new(result).publish_database_create_event + connection.execute(sql) end end @@ -48,31 +45,6 @@ class MergeRequest::Metrics < ApplicationRecord with_valid_time_to_merge .pick(time_to_merge_expression) end - - SNOWPLOW_ATTRIBUTES = %i[ - id - merge_request_id - latest_build_started_at - latest_build_finished_at - first_deployed_to_production_at - merged_at - created_at - updated_at - pipeline_id - merged_by_id - latest_closed_by_id - latest_closed_at - first_comment_at - first_commit_at - last_commit_at - diff_size - modified_paths_size - commits_count - first_approved_at - first_reassigned_at - added_lines - removed_lines - ].freeze end MergeRequest::Metrics.prepend_mod_with('MergeRequest::Metrics') diff --git a/config/feature_flags/development/use_merge_approval_rules_when_merged.yml b/config/feature_flags/development/use_merge_approval_rules_when_merged.yml deleted file mode 100644 index 1b7007a96df..00000000000 --- a/config/feature_flags/development/use_merge_approval_rules_when_merged.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: use_merge_approval_rules_when_merged -introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/129165 -rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/422380 -milestone: '16.4' -type: development -group: group::code review -default_enabled: false diff --git a/doc/administration/backup_restore/backup_gitlab.md b/doc/administration/backup_restore/backup_gitlab.md index 707b209faf3..40f93a2b7de 100644 --- a/doc/administration/backup_restore/backup_gitlab.md +++ b/doc/administration/backup_restore/backup_gitlab.md @@ -150,8 +150,8 @@ You may also want to back up any TLS keys and certificates (`/etc/gitlab/ssl`, ` [SSH host keys](https://superuser.com/questions/532040/copy-ssh-keys-from-one-server-to-another-server/532079#532079) to avoid man-in-the-middle attack warnings if you have to perform a full machine restore. -In the unlikely event that the secrets file is lost, see the -[troubleshooting section](#when-the-secrets-file-is-lost). +In the unlikely event that the secrets file is lost, see +[When the secrets file is lost](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost). ### Other data @@ -1452,618 +1452,3 @@ There are a few possible downsides to this: There is an **experimental** script that attempts to automate this process in [the Geo team Runbooks project](https://gitlab.com/gitlab-org/geo-team/runbooks/-/tree/main/experimental-online-backup-through-rsync). - -## Troubleshooting - -The following are possible problems you might encounter, along with potential -solutions. - -### When the secrets file is lost - -If you didn't [back up the secrets file](#storing-configuration-files), you -must complete several steps to get GitLab working properly again. - -The secrets file is responsible for storing the encryption key for the columns -that contain required, sensitive information. If the key is lost, GitLab can't -decrypt those columns, preventing access to the following items: - -- [CI/CD variables](../../ci/variables/index.md) -- [Kubernetes / GCP integration](../../user/infrastructure/clusters/index.md) -- [Custom Pages domains](../../user/project/pages/custom_domains_ssl_tls_certification/index.md) -- [Project error tracking](../../operations/error_tracking.md) -- [Runner authentication](../../ci/runners/index.md) -- [Project mirroring](../../user/project/repository/mirror/index.md) -- [Integrations](../../user/project/integrations/index.md) -- [Web hooks](../../user/project/integrations/webhooks.md) - -In cases like CI/CD variables and runner authentication, you can experience -unexpected behaviors, such as: - -- Stuck jobs. -- 500 errors. - -In this case, you must reset all the tokens for CI/CD variables and -runner authentication, which is described in more detail in the following -sections. After resetting the tokens, you should be able to visit your project -and the jobs begin running again. - -WARNING: -The steps in this section can potentially lead to **data loss** on the above listed items. -Consider opening a [Support Request](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer. - -#### Verify that all values can be decrypted - -You can determine if your database contains values that can't be decrypted by using a -[Rake task](../raketasks/check.md#verify-database-values-can-be-decrypted-using-the-current-secrets). - -#### Take a backup - -You must directly modify GitLab data to work around your lost secrets file. - -WARNING: -Be sure to create a full database backup before attempting any changes. - -#### Disable user two-factor authentication (2FA) - -Users with 2FA enabled can't sign in to GitLab. In that case, you must -[disable 2FA for everyone](../../security/two_factor_authentication.md#for-all-users), -after which users must reactivate 2FA. - -#### Reset CI/CD variables - -1. Enter the database console: - - For the Linux package (Omnibus) GitLab 14.1 and earlier: - - ```shell - sudo gitlab-rails dbconsole - ``` - - For the Linux package (Omnibus) GitLab 14.2 and later: - - ```shell - sudo gitlab-rails dbconsole --database main - ``` - - For self-compiled installations, GitLab 14.1 and earlier: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production - ``` - - For self-compiled installations, GitLab 14.2 and later: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production --database main - ``` - -1. Examine the `ci_group_variables` and `ci_variables` tables: - - ```sql - SELECT * FROM public."ci_group_variables"; - SELECT * FROM public."ci_variables"; - ``` - - These are the variables that you need to delete. - -1. Delete all variables: - - ```sql - DELETE FROM ci_group_variables; - DELETE FROM ci_variables; - ``` - -1. If you know the specific group or project from which you wish to delete variables, you can include a `WHERE` statement to specify that in your `DELETE`: - - ```sql - DELETE FROM ci_group_variables WHERE group_id = ; - DELETE FROM ci_variables WHERE project_id = ; - ``` - -You may need to reconfigure or restart GitLab for the changes to take effect. - -#### Reset runner registration tokens - -1. Enter the database console: - - For the Linux package (Omnibus) GitLab 14.1 and earlier: - - ```shell - sudo gitlab-rails dbconsole - ``` - - For the Linux package (Omnibus) GitLab 14.2 and later: - - ```shell - sudo gitlab-rails dbconsole --database main - ``` - - For self-compiled installations, GitLab 14.1 and earlier: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production - ``` - - For self-compiled installations, GitLab 14.2 and later: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production --database main - ``` - -1. Clear all tokens for projects, groups, and the entire instance: - - WARNING: - The final `UPDATE` operation stops the runners from being able to pick - up new jobs. You must register new runners. - - ```sql - -- Clear project tokens - UPDATE projects SET runners_token = null, runners_token_encrypted = null; - -- Clear group tokens - UPDATE namespaces SET runners_token = null, runners_token_encrypted = null; - -- Clear instance tokens - UPDATE application_settings SET runners_registration_token_encrypted = null; - -- Clear key used for JWT authentication - -- This may break the $CI_JWT_TOKEN job variable: - -- https://gitlab.com/gitlab-org/gitlab/-/issues/325965 - UPDATE application_settings SET encrypted_ci_jwt_signing_key = null; - -- Clear runner tokens - UPDATE ci_runners SET token = null, token_encrypted = null; - ``` - -#### Reset pending pipeline jobs - -1. Enter the database console: - - For the Linux package (Omnibus) GitLab 14.1 and earlier: - - ```shell - sudo gitlab-rails dbconsole - ``` - - For the Linux package (Omnibus) GitLab 14.2 and later: - - ```shell - sudo gitlab-rails dbconsole --database main - ``` - - For self-compiled installations, GitLab 14.1 and earlier: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production - ``` - - For self-compiled installations, GitLab 14.2 and later: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production --database main - ``` - -1. Clear all the tokens for pending jobs: - - For GitLab 15.3 and earlier: - - ```sql - -- Clear build tokens - UPDATE ci_builds SET token = null, token_encrypted = null; - ``` - - For GitLab 15.4 and later: - - ```sql - -- Clear build tokens - UPDATE ci_builds SET token_encrypted = null; - ``` - -A similar strategy can be employed for the remaining features. By removing the -data that can't be decrypted, GitLab can be returned to operation, and the -lost data can be manually replaced. - -#### Fix integrations and webhooks - -If you've lost your secrets, the [integrations settings](../../user/project/integrations/index.md) -and [webhooks settings](../../user/project/integrations/webhooks.md) pages might display `500` error messages. Lost secrets might also produce `500` errors when you try to access a repository in a project with a previously configured integration or webhook. - -The fix is to truncate the affected tables (those containing encrypted columns). -This deletes all your configured integrations, webhooks, and related metadata. -You should verify that the secrets are the root cause before deleting any data. - -1. Enter the database console: - - For the Linux package (Omnibus) GitLab 14.1 and earlier: - - ```shell - sudo gitlab-rails dbconsole - ``` - - For the Linux package (Omnibus) GitLab 14.2 and later: - - ```shell - sudo gitlab-rails dbconsole --database main - ``` - - For self-compiled installations, GitLab 14.1 and earlier: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production - ``` - - For self-compiled installations, GitLab 14.2 and later: - - ```shell - sudo -u git -H bundle exec rails dbconsole -e production --database main - ``` - -1. Truncate the following tables: - - ```sql - -- truncate web_hooks table - TRUNCATE integrations, chat_names, issue_tracker_data, jira_tracker_data, slack_integrations, web_hooks, zentao_tracker_data, web_hook_logs CASCADE; - ``` - -### Container registry push failures after restoring from a backup - -If you use the [container registry](../../user/packages/container_registry/index.md), -pushes to the registry may fail after restoring your backup on a Linux package (Omnibus) -instance after restoring the registry data. - -These failures mention permission issues in the registry logs, similar to: - -```plaintext -level=error -msg="response completed with error" -err.code=unknown -err.detail="filesystem: mkdir /var/opt/gitlab/gitlab-rails/shared/registry/docker/registry/v2/repositories/...: permission denied" -err.message="unknown error" -``` - -This issue is caused by the restore running as the unprivileged user `git`, -which is unable to assign the correct ownership to the registry files during -the restore process ([issue #62759](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/62759 "Incorrect permissions on registry filesystem after restore")). - -To get your registry working again: - -```shell -sudo chown -R registry:registry /var/opt/gitlab/gitlab-rails/shared/registry/docker -``` - -If you changed the default file system location for the registry, run `chown` -against your custom location, instead of `/var/opt/gitlab/gitlab-rails/shared/registry/docker`. - -### Backup fails to complete with Gzip error - -When running the backup, you may receive a Gzip error message: - -```shell -sudo /opt/gitlab/bin/gitlab-backup create -... -Dumping ... -... -gzip: stdout: Input/output error - -Backup failed -``` - -If this happens, examine the following: - -- Confirm there is sufficient disk space for the Gzip operation. It's not uncommon for backups that - use the [default strategy](#backup-strategy-option) to require half the instance size - in free disk space during backup creation. -- If NFS is being used, check if the mount option `timeout` is set. The - default is `600`, and changing this to smaller values results in this error. - -### Backup fails with `File name too long` error - -During backup, you can get the `File name too long` error ([issue #354984](https://gitlab.com/gitlab-org/gitlab/-/issues/354984)). For example: - -```plaintext -Problem: |\r\n]+$'))[1]) > 246; - - CREATE INDEX ON uploads_with_long_filenames(row_id); - - SELECT - u.id, - u.path, - -- Current file name - (regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] AS current_filename, - -- New file name - CONCAT( - LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), - COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) - ) AS new_filename, - -- New path - CONCAT( - COALESCE((regexp_match(u.path, '(.*\/).*'))[1], ''), - CONCAT( - LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), - COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) - ) - ) AS new_path - FROM uploads_with_long_filenames AS u - WHERE u.row_id > 0 AND u.row_id <= 10000; - ``` - - Output example: - - ```postgresql - -[ RECORD 1 ]----+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - id | 34 - path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt - current_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt - new_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt - new_path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt - ``` - - Where: - - - `current_filename`: a file name that is currently more than 246 characters long. - - `new_filename`: a file name that has been truncated to 246 characters maximum. - - `new_path`: new path considering the `new_filename` (truncated). - - After you validate the batch results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table. - -1. Rename the files found in the `uploads` table from long file names to new truncated file names. The following query rolls back the update so you can check the results safely in a transaction wrapper: - - ```sql - CREATE TEMP TABLE uploads_with_long_filenames AS - SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id - FROM uploads AS u - WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246; - - CREATE INDEX ON uploads_with_long_filenames(row_id); - - BEGIN; - WITH updated_uploads AS ( - UPDATE uploads - SET - path = - CONCAT( - COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''), - CONCAT( - LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), - COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) - ) - ) - FROM - uploads_with_long_filenames AS updatable_uploads - WHERE - uploads.id = updatable_uploads.id - AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000 - RETURNING uploads.* - ) - SELECT id, path FROM updated_uploads; - ROLLBACK; - ``` - - After you validate the batch update results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table. - -1. Validate that the new file names from the previous query are the expected ones. If you are sure you want to truncate the records found in the previous step to 246 characters, run the following: - - WARNING: - The following action is **irreversible**. - - ```sql - CREATE TEMP TABLE uploads_with_long_filenames AS - SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id - FROM uploads AS u - WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246; - - CREATE INDEX ON uploads_with_long_filenames(row_id); - - UPDATE uploads - SET - path = - CONCAT( - COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''), - CONCAT( - LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), - COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) - ) - ) - FROM - uploads_with_long_filenames AS updatable_uploads - WHERE - uploads.id = updatable_uploads.id - AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000; - ``` - - After you finish the batch update, you must change the batch size (`updatable_uploads.row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table. - -Truncate the file names in the references found: - -1. Check if those records are referenced somewhere. One way to do this is to dump the database and search for the parent directory name and file name: - - 1. To dump your database, you can use the following command as an example: - - ```shell - pg_dump -h /var/opt/gitlab/postgresql/ -d gitlabhq_production > gitlab-dump.tmp - ``` - - 1. Then you can search for the references using the `grep` command. Combining the parent directory and the file name can be a good idea. For example: - - ```shell - grep public/alongfilenamehere.txt gitlab-dump.tmp - ``` - -1. Replace those long file names using the new file names obtained from querying the `uploads` table. - -Truncate the file names on the file system. You must manually rename the files in your file system to the new file names obtained from querying the `uploads` table. - -#### Re-run the backup task - -After following all the previous steps, re-run the backup task. - -### Restoring database backup fails when `pg_stat_statements` was previously enabled - -The GitLab backup of the PostgreSQL database includes all SQL statements required to enable extensions that were -previously enabled in the database. - -The `pg_stat_statements` extension can only be enabled or disabled by a PostgreSQL user with `superuser` role. -As the restore process uses a database user with limited permissions, it can't execute the following SQL statements: - -```sql -DROP EXTENSION IF EXISTS pg_stat_statements; -CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public; -``` - -When trying to restore the backup in a PostgreSQL instance that doesn't have the `pg_stats_statements` extension, -the following error message is displayed: - -```plaintext -ERROR: permission denied to create extension "pg_stat_statements" -HINT: Must be superuser to create this extension. -ERROR: extension "pg_stat_statements" does not exist -``` - -When trying to restore in an instance that has the `pg_stats_statements` extension enabled, the cleaning up step -fails with an error message similar to the following: - -```plaintext -rake aborted! -ActiveRecord::StatementInvalid: PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in ' -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each' -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in ' -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in ' -/opt/gitlab/embedded/bin/bundle:23:in `load' -/opt/gitlab/embedded/bin/bundle:23:in `
' -Caused by: -PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in ' -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each' -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in ' -/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in ' -/opt/gitlab/embedded/bin/bundle:23:in `load' -/opt/gitlab/embedded/bin/bundle:23:in `
' -Tasks: TOP => gitlab:db:drop_tables -(See full trace by running task with --trace) -``` - -#### Prevent the dump file to include `pg_stat_statements` - -To prevent the inclusion of the extension in the PostgreSQL dump file that is part of the backup bundle, -enable the extension in any schema except the `public` schema: - -```sql -CREATE SCHEMA adm; -CREATE EXTENSION pg_stat_statements SCHEMA adm; -``` - -If the extension was previously enabled in the `public` schema, move it to a new one: - -```sql -CREATE SCHEMA adm; -ALTER EXTENSION pg_stat_statements SET SCHEMA adm; -``` - -To query the `pg_stat_statements` data after changing the schema, prefix the view name with the new schema: - -```sql -SELECT * FROM adm.pg_stat_statements limit 0; -``` - -To make it compatible with third-party monitoring solutions that expect it to be enabled in the `public` schema, -you need to include it in the `search_path`: - -```sql -set search_path to public,adm; -``` - -#### Fix an existing dump file to remove references to `pg_stat_statements` - -To fix an existing backup file, do the following changes: - -1. Extract from the backup the following file: `db/database.sql.gz`. -1. Decompress the file or use an editor that is capable of handling it compressed. -1. Remove the following lines, or similar ones: - - ```sql - CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public; - ``` - - ```sql - COMMENT ON EXTENSION pg_stat_statements IS 'track planning and execution statistics of all SQL statements executed'; - ``` - -1. Save the changes and recompress the file. -1. Update the backup file with the modified `db/database.sql.gz`. diff --git a/doc/administration/backup_restore/restore_gitlab.md b/doc/administration/backup_restore/restore_gitlab.md index 51ada659acd..0b5bf3cc0ff 100644 --- a/doc/administration/backup_restore/restore_gitlab.md +++ b/doc/administration/backup_restore/restore_gitlab.md @@ -38,8 +38,7 @@ before restoring the backup. To restore a backup, **you must also restore the GitLab secrets**. These include the database encryption key, [CI/CD variables](../../ci/variables/index.md), and variables used for [two-factor authentication](../../user/profile/account/two_factor_authentication.md). -Without the keys, [multiple issues occur](backup_gitlab.md#when-the-secrets-file-is-lost), -including loss of access by users with [two-factor authentication enabled](../../user/profile/account/two_factor_authentication.md), +Without the keys, [multiple issues occur](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost), including loss of access by users with [two-factor authentication enabled](../../user/profile/account/two_factor_authentication.md), and GitLab Runners cannot log in. Restore: diff --git a/doc/administration/backup_restore/troubleshooting_backup_gitlab.md b/doc/administration/backup_restore/troubleshooting_backup_gitlab.md new file mode 100644 index 00000000000..83a02b52741 --- /dev/null +++ b/doc/administration/backup_restore/troubleshooting_backup_gitlab.md @@ -0,0 +1,619 @@ +--- +stage: Systems +group: Geo +info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments +--- + +# Troubleshooting GitLab backups + +When you back up GitLab, you might encounter the following issues. + +## When the secrets file is lost + +If you didn't [back up the secrets file](../../administration/backup_restore/backup_gitlab.md#storing-configuration-files), you +must complete several steps to get GitLab working properly again. + +The secrets file is responsible for storing the encryption key for the columns +that contain required, sensitive information. If the key is lost, GitLab can't +decrypt those columns, preventing access to the following items: + +- [CI/CD variables](../../ci/variables/index.md) +- [Kubernetes / GCP integration](../../user/infrastructure/clusters/index.md) +- [Custom Pages domains](../../user/project/pages/custom_domains_ssl_tls_certification/index.md) +- [Project error tracking](../../operations/error_tracking.md) +- [Runner authentication](../../ci/runners/index.md) +- [Project mirroring](../../user/project/repository/mirror/index.md) +- [Integrations](../../user/project/integrations/index.md) +- [Web hooks](../../user/project/integrations/webhooks.md) + +In cases like CI/CD variables and runner authentication, you can experience +unexpected behaviors, such as: + +- Stuck jobs. +- 500 errors. + +In this case, you must reset all the tokens for CI/CD variables and +runner authentication, which is described in more detail in the following +sections. After resetting the tokens, you should be able to visit your project +and the jobs begin running again. + +WARNING: +The steps in this section can potentially lead to **data loss** on the above listed items. +Consider opening a [Support Request](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer. + +### Verify that all values can be decrypted + +You can determine if your database contains values that can't be decrypted by using a +[Rake task](../raketasks/check.md#verify-database-values-can-be-decrypted-using-the-current-secrets). + +### Take a backup + +You must directly modify GitLab data to work around your lost secrets file. + +WARNING: +Be sure to create a full database backup before attempting any changes. + +### Disable user two-factor authentication (2FA) + +Users with 2FA enabled can't sign in to GitLab. In that case, you must +[disable 2FA for everyone](../../security/two_factor_authentication.md#for-all-users), +after which users must reactivate 2FA. + +### Reset CI/CD variables + +1. Enter the database console: + + For the Linux package (Omnibus) GitLab 14.1 and earlier: + + ```shell + sudo gitlab-rails dbconsole + ``` + + For the Linux package (Omnibus) GitLab 14.2 and later: + + ```shell + sudo gitlab-rails dbconsole --database main + ``` + + For self-compiled installations, GitLab 14.1 and earlier: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production + ``` + + For self-compiled installations, GitLab 14.2 and later: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production --database main + ``` + +1. Examine the `ci_group_variables` and `ci_variables` tables: + + ```sql + SELECT * FROM public."ci_group_variables"; + SELECT * FROM public."ci_variables"; + ``` + + These are the variables that you need to delete. + +1. Delete all variables: + + ```sql + DELETE FROM ci_group_variables; + DELETE FROM ci_variables; + ``` + +1. If you know the specific group or project from which you wish to delete variables, you can include a `WHERE` statement to specify that in your `DELETE`: + + ```sql + DELETE FROM ci_group_variables WHERE group_id = ; + DELETE FROM ci_variables WHERE project_id = ; + ``` + +You may need to reconfigure or restart GitLab for the changes to take effect. + +### Reset runner registration tokens + +1. Enter the database console: + + For the Linux package (Omnibus) GitLab 14.1 and earlier: + + ```shell + sudo gitlab-rails dbconsole + ``` + + For the Linux package (Omnibus) GitLab 14.2 and later: + + ```shell + sudo gitlab-rails dbconsole --database main + ``` + + For self-compiled installations, GitLab 14.1 and earlier: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production + ``` + + For self-compiled installations, GitLab 14.2 and later: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production --database main + ``` + +1. Clear all tokens for projects, groups, and the entire instance: + + WARNING: + The final `UPDATE` operation stops the runners from being able to pick + up new jobs. You must register new runners. + + ```sql + -- Clear project tokens + UPDATE projects SET runners_token = null, runners_token_encrypted = null; + -- Clear group tokens + UPDATE namespaces SET runners_token = null, runners_token_encrypted = null; + -- Clear instance tokens + UPDATE application_settings SET runners_registration_token_encrypted = null; + -- Clear key used for JWT authentication + -- This may break the $CI_JWT_TOKEN job variable: + -- https://gitlab.com/gitlab-org/gitlab/-/issues/325965 + UPDATE application_settings SET encrypted_ci_jwt_signing_key = null; + -- Clear runner tokens + UPDATE ci_runners SET token = null, token_encrypted = null; + ``` + +### Reset pending pipeline jobs + +1. Enter the database console: + + For the Linux package (Omnibus) GitLab 14.1 and earlier: + + ```shell + sudo gitlab-rails dbconsole + ``` + + For the Linux package (Omnibus) GitLab 14.2 and later: + + ```shell + sudo gitlab-rails dbconsole --database main + ``` + + For self-compiled installations, GitLab 14.1 and earlier: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production + ``` + + For self-compiled installations, GitLab 14.2 and later: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production --database main + ``` + +1. Clear all the tokens for pending jobs: + + For GitLab 15.3 and earlier: + + ```sql + -- Clear build tokens + UPDATE ci_builds SET token = null, token_encrypted = null; + ``` + + For GitLab 15.4 and later: + + ```sql + -- Clear build tokens + UPDATE ci_builds SET token_encrypted = null; + ``` + +A similar strategy can be employed for the remaining features. By removing the +data that can't be decrypted, GitLab can be returned to operation, and the +lost data can be manually replaced. + +### Fix integrations and webhooks + +If you've lost your secrets, the [integrations settings](../../user/project/integrations/index.md) +and [webhooks settings](../../user/project/integrations/webhooks.md) pages might display `500` error messages. Lost secrets might also produce `500` errors when you try to access a repository in a project with a previously configured integration or webhook. + +The fix is to truncate the affected tables (those containing encrypted columns). +This deletes all your configured integrations, webhooks, and related metadata. +You should verify that the secrets are the root cause before deleting any data. + +1. Enter the database console: + + For the Linux package (Omnibus) GitLab 14.1 and earlier: + + ```shell + sudo gitlab-rails dbconsole + ``` + + For the Linux package (Omnibus) GitLab 14.2 and later: + + ```shell + sudo gitlab-rails dbconsole --database main + ``` + + For self-compiled installations, GitLab 14.1 and earlier: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production + ``` + + For self-compiled installations, GitLab 14.2 and later: + + ```shell + sudo -u git -H bundle exec rails dbconsole -e production --database main + ``` + +1. Truncate the following tables: + + ```sql + -- truncate web_hooks table + TRUNCATE integrations, chat_names, issue_tracker_data, jira_tracker_data, slack_integrations, web_hooks, zentao_tracker_data, web_hook_logs CASCADE; + ``` + +## Container registry push failures after restoring from a backup + +If you use the [container registry](../../user/packages/container_registry/index.md), +pushes to the registry may fail after restoring your backup on a Linux package (Omnibus) +instance after restoring the registry data. + +These failures mention permission issues in the registry logs, similar to: + +```plaintext +level=error +msg="response completed with error" +err.code=unknown +err.detail="filesystem: mkdir /var/opt/gitlab/gitlab-rails/shared/registry/docker/registry/v2/repositories/...: permission denied" +err.message="unknown error" +``` + +This issue is caused by the restore running as the unprivileged user `git`, +which is unable to assign the correct ownership to the registry files during +the restore process ([issue #62759](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/62759 "Incorrect permissions on registry filesystem after restore")). + +To get your registry working again: + +```shell +sudo chown -R registry:registry /var/opt/gitlab/gitlab-rails/shared/registry/docker +``` + +If you changed the default file system location for the registry, run `chown` +against your custom location, instead of `/var/opt/gitlab/gitlab-rails/shared/registry/docker`. + +## Backup fails to complete with Gzip error + +When running the backup, you may receive a Gzip error message: + +```shell +sudo /opt/gitlab/bin/gitlab-backup create +... +Dumping ... +... +gzip: stdout: Input/output error + +Backup failed +``` + +If this happens, examine the following: + +- Confirm there is sufficient disk space for the Gzip operation. It's not uncommon for backups that + use the [default strategy](../../administration/backup_restore/backup_gitlab.md#backup-strategy-option) to require half the instance size + in free disk space during backup creation. +- If NFS is being used, check if the mount option `timeout` is set. The + default is `600`, and changing this to smaller values results in this error. + +## Backup fails with `File name too long` error + +During backup, you can get the `File name too long` error ([issue #354984](https://gitlab.com/gitlab-org/gitlab/-/issues/354984)). For example: + +```plaintext +Problem: |\r\n]+$'))[1]) > 246; + + CREATE INDEX ON uploads_with_long_filenames(row_id); + + SELECT + u.id, + u.path, + -- Current file name + (regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] AS current_filename, + -- New file name + CONCAT( + LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), + COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) + ) AS new_filename, + -- New path + CONCAT( + COALESCE((regexp_match(u.path, '(.*\/).*'))[1], ''), + CONCAT( + LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), + COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) + ) + ) AS new_path + FROM uploads_with_long_filenames AS u + WHERE u.row_id > 0 AND u.row_id <= 10000; + ``` + + Output example: + + ```postgresql + -[ RECORD 1 ]----+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + id | 34 + path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt + current_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt + new_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt + new_path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt + ``` + + Where: + + - `current_filename`: a file name that is currently more than 246 characters long. + - `new_filename`: a file name that has been truncated to 246 characters maximum. + - `new_path`: new path considering the `new_filename` (truncated). + + After you validate the batch results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table. + +1. Rename the files found in the `uploads` table from long file names to new truncated file names. The following query rolls back the update so you can check the results safely in a transaction wrapper: + + ```sql + CREATE TEMP TABLE uploads_with_long_filenames AS + SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id + FROM uploads AS u + WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246; + + CREATE INDEX ON uploads_with_long_filenames(row_id); + + BEGIN; + WITH updated_uploads AS ( + UPDATE uploads + SET + path = + CONCAT( + COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''), + CONCAT( + LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), + COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) + ) + ) + FROM + uploads_with_long_filenames AS updatable_uploads + WHERE + uploads.id = updatable_uploads.id + AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000 + RETURNING uploads.* + ) + SELECT id, path FROM updated_uploads; + ROLLBACK; + ``` + + After you validate the batch update results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table. + +1. Validate that the new file names from the previous query are the expected ones. If you are sure you want to truncate the records found in the previous step to 246 characters, run the following: + + WARNING: + The following action is **irreversible**. + + ```sql + CREATE TEMP TABLE uploads_with_long_filenames AS + SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id + FROM uploads AS u + WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246; + + CREATE INDEX ON uploads_with_long_filenames(row_id); + + UPDATE uploads + SET + path = + CONCAT( + COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''), + CONCAT( + LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242), + COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$')) + ) + ) + FROM + uploads_with_long_filenames AS updatable_uploads + WHERE + uploads.id = updatable_uploads.id + AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000; + ``` + + After you finish the batch update, you must change the batch size (`updatable_uploads.row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table. + +Truncate the file names in the references found: + +1. Check if those records are referenced somewhere. One way to do this is to dump the database and search for the parent directory name and file name: + + 1. To dump your database, you can use the following command as an example: + + ```shell + pg_dump -h /var/opt/gitlab/postgresql/ -d gitlabhq_production > gitlab-dump.tmp + ``` + + 1. Then you can search for the references using the `grep` command. Combining the parent directory and the file name can be a good idea. For example: + + ```shell + grep public/alongfilenamehere.txt gitlab-dump.tmp + ``` + +1. Replace those long file names using the new file names obtained from querying the `uploads` table. + +Truncate the file names on the file system. You must manually rename the files in your file system to the new file names obtained from querying the `uploads` table. + +### Re-run the backup task + +After following all the previous steps, re-run the backup task. + +## Restoring database backup fails when `pg_stat_statements` was previously enabled + +The GitLab backup of the PostgreSQL database includes all SQL statements required to enable extensions that were +previously enabled in the database. + +The `pg_stat_statements` extension can only be enabled or disabled by a PostgreSQL user with `superuser` role. +As the restore process uses a database user with limited permissions, it can't execute the following SQL statements: + +```sql +DROP EXTENSION IF EXISTS pg_stat_statements; +CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public; +``` + +When trying to restore the backup in a PostgreSQL instance that doesn't have the `pg_stats_statements` extension, +the following error message is displayed: + +```plaintext +ERROR: permission denied to create extension "pg_stat_statements" +HINT: Must be superuser to create this extension. +ERROR: extension "pg_stat_statements" does not exist +``` + +When trying to restore in an instance that has the `pg_stats_statements` extension enabled, the cleaning up step +fails with an error message similar to the following: + +```plaintext +rake aborted! +ActiveRecord::StatementInvalid: PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in ' +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each' +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in ' +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in ' +/opt/gitlab/embedded/bin/bundle:23:in `load' +/opt/gitlab/embedded/bin/bundle:23:in `
' +Caused by: +PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in ' +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each' +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in ' +/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in ' +/opt/gitlab/embedded/bin/bundle:23:in `load' +/opt/gitlab/embedded/bin/bundle:23:in `
' +Tasks: TOP => gitlab:db:drop_tables +(See full trace by running task with --trace) +``` + +### Prevent the dump file to include `pg_stat_statements` + +To prevent the inclusion of the extension in the PostgreSQL dump file that is part of the backup bundle, +enable the extension in any schema except the `public` schema: + +```sql +CREATE SCHEMA adm; +CREATE EXTENSION pg_stat_statements SCHEMA adm; +``` + +If the extension was previously enabled in the `public` schema, move it to a new one: + +```sql +CREATE SCHEMA adm; +ALTER EXTENSION pg_stat_statements SET SCHEMA adm; +``` + +To query the `pg_stat_statements` data after changing the schema, prefix the view name with the new schema: + +```sql +SELECT * FROM adm.pg_stat_statements limit 0; +``` + +To make it compatible with third-party monitoring solutions that expect it to be enabled in the `public` schema, +you need to include it in the `search_path`: + +```sql +set search_path to public,adm; +``` + +### Fix an existing dump file to remove references to `pg_stat_statements` + +To fix an existing backup file, do the following changes: + +1. Extract from the backup the following file: `db/database.sql.gz`. +1. Decompress the file or use an editor that is capable of handling it compressed. +1. Remove the following lines, or similar ones: + + ```sql + CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public; + ``` + + ```sql + COMMENT ON EXTENSION pg_stat_statements IS 'track planning and execution statistics of all SQL statements executed'; + ``` + +1. Save the changes and recompress the file. +1. Update the backup file with the modified `db/database.sql.gz`. diff --git a/doc/administration/raketasks/check.md b/doc/administration/raketasks/check.md index 207dbc7b509..39d7cae5dde 100644 --- a/doc/administration/raketasks/check.md +++ b/doc/administration/raketasks/check.md @@ -215,7 +215,7 @@ secrets file (`gitlab-secrets.json`). Automatic resolution is not yet implemented. If you have values that cannot be decrypted, you can follow steps to reset them, see our -documentation on what to do [when the secrets file is lost](../../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost). +documentation on what to do [when the secrets file is lost](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost). This can take a very long time, depending on the size of your database, as it checks all rows in all tables. diff --git a/doc/api/integrations.md b/doc/api/integrations.md index 62c433f9b54..bc6e93199f2 100644 --- a/doc/api/integrations.md +++ b/doc/api/integrations.md @@ -644,7 +644,7 @@ Parameters: | Parameter | Type | Required | Description | | --------- | ---- | -------- | ----------- | -| `external_wiki_url` | string | true | The URL of the external wiki. | +| `external_wiki_url` | string | true | URL of the external wiki. | ### Disable an external wiki diff --git a/doc/api/vulnerability_exports.md b/doc/api/vulnerability_exports.md index 3be1dccea29..f2e0784cda7 100644 --- a/doc/api/vulnerability_exports.md +++ b/doc/api/vulnerability_exports.md @@ -187,11 +187,12 @@ The response is `404 Not Found` if the vulnerability export is not finished yet Example response: ```csv -Group Name,Project Name,Tool,Scanner Name,Status,Vulnerability,Details,Additional Info,Severity,CVE,CWE,Other Identifiers,Detected At,Location,Activity,Comments,Full Path -Gitlab.org,Defend,container_scanning,Trivy,resolved,CVE-2019-14697 in musl-utils-1.1.20-r4,"musl libc through 1.1.23 has an x87 floating-point stack adjustment imbalance, related to the math/i386/ directory. In some cases, use of this library could introduce out-of-bounds writes that are not present in an application's source code.",CVE-2019-14697 in musl-utils-1.1.20-r4,critical,CVE-2019-14697,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl-utils""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"2022-10-07 13:41:08 UTC|root|resolved|changed vulnerability status to resolved",group/project/1 -Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2019-19242 in sqlite-libs-3.26.0-r3,"SQLite 3.30.1 mishandles pExpr->y.pTab, as demonstrated by the TK_COLUMN case in sqlite3ExprCodeTarget in expr.c.",CVE-2019-19242 in sqlite-libs-3.26.0-r3,medium,CVE-2019-19242,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""sqlite-libs""}, ""version""=>""3.26.0-r3""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/2 -Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2020-28928 in musl-1.1.20-r4,"In musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow).",CVE-2020-28928 in musl-1.1.20-r4,medium,CVE-2020-28928,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/3 -Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,Carefully crafted requests can cause shell escape sequences to be written to the terminal via Rack's Lint middleware and CommonLogger middleware. These escape sequences can be leveraged to possibly execute commands in the victim's terminal.,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,unknown,Gemfile.lock:rack:gemnasium:60b5a27f-4e4d-4ab4-8ae7-74b4b212e177,,Gemnasium-60b5a27f-4e4d-4ab4-8ae7-74b4b212e177; GHSA-wq4h-7r42-5hrr,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,"",group/project/4 -Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Denial of Service Vulnerability in Rack Multipart Parsing in rack,"Carefully crafted multipart POST requests can cause Rack's multipart parser to take much longer than expected, leading to a possible denial of service vulnerability. Impacted code will use Rack's multipart parser to parse multipart posts.",Denial of Service Vulnerability in Rack Multipart Parsing in rack,unknown,Gemfile.lock:rack:gemnasium:20daa17a-47b5-4f79-80c2-cd8f2db9805c,,Gemnasium-20daa17a-47b5-4f79-80c2-cd8f2db9805c; GHSA-hxqx-xwvh-44m2,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,"",group/project/5 -Gitlab.org,Defend,sast,Brakeman,detected,Possible SQL injection,,Possible SQL injection,medium,e52f23a259cd489168b4313317ac94a3f13bffde57b9635171c1a44a9f329e9a,,"""Brakeman Warning Code 0""",2022-10-13 15:16:36 UTC,"{""file""=>""main.rb"", ""class""=>""User"", ""method""=>""index"", ""start_line""=>3}",false,"",group/project/6 +Group Name,Project Name,Tool,Scanner Name,Status,Vulnerability,Details,Additional Info,Severity,CVE,CWE,Other Identifiers,Detected At,Location,Activity,Comments,Full Path,CVSS Vectors,Dismissal Reason +Gitlab.org,Defend,container_scanning,Trivy,resolved,CVE-2019-14697 in musl-utils-1.1.20-r4,"musl libc through 1.1.23 has an x87 floating-point stack adjustment imbalance, related to the math/i386/ directory. In some cases, use of this library could introduce out-of-bounds writes that are not present in an application's source code.",CVE-2019-14697 in musl-utils-1.1.20-r4,critical,CVE-2019-14697,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl-utils""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"2022-10-07 13:41:08 UTC|root|resolved|changed vulnerability status to resolved",group/project/1,,, +Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2019-19242 in sqlite-libs-3.26.0-r3,"SQLite 3.30.1 mishandles pExpr->y.pTab, as demonstrated by the TK_COLUMN case in sqlite3ExprCodeTarget in expr.c.",CVE-2019-19242 in sqlite-libs-3.26.0-r3,medium,CVE-2019-19242,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""sqlite-libs""}, ""version""=>""3.26.0-r3""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/2,,, +Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2020-28928 in musl-1.1.20-r4,"In musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow).",CVE-2020-28928 in musl-1.1.20-r4,medium,CVE-2020-28928,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/3,,, +Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,Carefully crafted requests can cause shell escape sequences to be written to the terminal via Rack's Lint middleware and CommonLogger middleware. These escape sequences can be leveraged to possibly execute commands in the victim's terminal.,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,unknown,Gemfile.lock:rack:gemnasium:60b5a27f-4e4d-4ab4-8ae7-74b4b212e177,,Gemnasium-60b5a27f-4e4d-4ab4-8ae7-74b4b212e177; GHSA-wq4h-7r42-5hrr,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,group/project/4,,, +Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Denial of Service Vulnerability in Rack Multipart Parsing in rack,"Carefully crafted multipart POST requests can cause Rack's multipart parser to take much longer than expected, leading to a possible denial of service vulnerability. Impacted code will use Rack's multipart parser to parse multipart posts.",Denial of Service Vulnerability in Rack Multipart Parsing in rack,unknown,Gemfile.lock:rack:gemnasium:20daa17a-47b5-4f79-80c2-cd8f2db9805c,,Gemnasium-20daa17a-47b5-4f79-80c2-cd8f2db9805c; GHSA-hxqx-xwvh-44m2,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,group/project/5,,, +Gitlab.org,Defend,sast,Brakeman,detected,Possible SQL injection,,Possible SQL injection,medium,e52f23a259cd489168b4313317ac94a3f13bffde57b9635171c1a44a9f329e9a,,"""Brakeman Warning Code 0""",2022-10-13 15:16:36 UTC,"{""file""=>""main.rb"", ""class""=>""User"", ""method""=>""index"", ""start_line""=>3}",false,"",group/project/6,,, +Gitlab.org,Defend,sast,Semgrep,dismissed,Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection'),"SQL Injection is a critical vulnerability that can lead to data or system compromise...",,critical,,CWE-89,SCS0002,2023-12-28 10:48:34 UTC,"{""file""=>""WebGoat/App_Code/DB/SqliteDbProvider.cs"", ""start_line""=>274}",false,"2023-12-28 10:51:32 UTC|root|Dismissed|""changed vulnerability status to Dismissed: Not Applicable and the following comment: ""dismiss 5""",gitlab-org/defend/579,,Not applicable, ``` diff --git a/doc/ci/index.md b/doc/ci/index.md index b74b2e1735b..0dcae04c47c 100644 --- a/doc/ci/index.md +++ b/doc/ci/index.md @@ -2,7 +2,6 @@ stage: Verify group: Pipeline Execution info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments -description: "Learn how to use GitLab CI/CD, the GitLab built-in Continuous Integration, Continuous Deployment, and Continuous Delivery toolset to build, test, and deploy your application." --- # Get started with GitLab CI/CD **(FREE ALL)** diff --git a/doc/ci/variables/index.md b/doc/ci/variables/index.md index 86fc813b0cc..f42ffc0020d 100644 --- a/doc/ci/variables/index.md +++ b/doc/ci/variables/index.md @@ -244,7 +244,7 @@ malicious code can compromise both masked and protected variables. Variable values are encrypted using [`aes-256-cbc`](https://en.wikipedia.org/wiki/Advanced_Encryption_Standard) and stored in the database. This data can only be read and decrypted with a -valid [secrets file](../../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost). +valid [secrets file](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost). ### Mask a CI/CD variable diff --git a/doc/install/docker.md b/doc/install/docker.md index 252f34f7120..51b3aa28396 100644 --- a/doc/install/docker.md +++ b/doc/install/docker.md @@ -621,7 +621,7 @@ to back up the `gitlab.rb` file. WARNING: [Backing up the GitLab secrets file](../administration/backup_restore/backup_gitlab.md#storing-configuration-files) is required -to avoid [complicated steps](../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost) when recovering +to avoid [complicated steps](../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost) when recovering GitLab from backup. The secrets file is stored at `/etc/gitlab/gitlab-secrets.json` inside the container, or `$GITLAB_HOME/config/gitlab-secrets.json` [on the container host](#set-up-the-volumes-location). diff --git a/doc/operations/index.md b/doc/operations/index.md index 5690e16600f..e14d371e75d 100644 --- a/doc/operations/index.md +++ b/doc/operations/index.md @@ -1,6 +1,7 @@ --- stage: Service Management group: Respond +description: Error tracking, incident management. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/policy/experiment-beta-support.md b/doc/policy/experiment-beta-support.md index 0c58380d304..a563ce7919d 100644 --- a/doc/policy/experiment-beta-support.md +++ b/doc/policy/experiment-beta-support.md @@ -1,6 +1,7 @@ --- stage: Systems group: Distribution +description: Support details. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/topics/build_your_application.md b/doc/topics/build_your_application.md index 50d94cc5b9f..787e056a1c7 100644 --- a/doc/topics/build_your_application.md +++ b/doc/topics/build_your_application.md @@ -1,13 +1,13 @@ --- stage: none group: unassigned +description: Runners, jobs, pipelines, variables. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- # Use CI/CD to build your application **(FREE ALL)** -Add your source code to a repository, create merge requests to check in -code, and use CI/CD to generate your application. Include packages in your app and output it to a variety of environments. +Use CI/CD to generate your application. - [Getting started](../ci/index.md) - [CI/CD YAML syntax reference](../ci/yaml/index.md) diff --git a/doc/topics/git/index.md b/doc/topics/git/index.md index 5a6a1aecded..b8740414faa 100644 --- a/doc/topics/git/index.md +++ b/doc/topics/git/index.md @@ -1,6 +1,7 @@ --- stage: Create group: Source Code +description: Common commands and workflows. info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments" --- diff --git a/doc/topics/manage_code.md b/doc/topics/manage_code.md index 5fbdbee7017..4cbd97f8898 100644 --- a/doc/topics/manage_code.md +++ b/doc/topics/manage_code.md @@ -1,12 +1,13 @@ --- stage: none group: unassigned +description: Repositories, merge requests, remote development. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- # Manage your code **(FREE ALL)** -Store your source files in a repository and create merge requests. Write, debug, and compile code hosted on GitLab. +Store your source files in a repository and create merge requests. Write, debug, and collaborate on code. - [Repositories](../user/project/repository/index.md) - [Merge requests](../user/project/merge_requests/index.md) diff --git a/doc/topics/plan_and_track.md b/doc/topics/plan_and_track.md index 3712d73929c..61c359e63ba 100644 --- a/doc/topics/plan_and_track.md +++ b/doc/topics/plan_and_track.md @@ -1,6 +1,7 @@ --- stage: Plan group: Project Management +description: Epics, issues, milestones, labels. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/topics/release_your_application.md b/doc/topics/release_your_application.md index 27c5cc50e5f..d46ae98d47c 100644 --- a/doc/topics/release_your_application.md +++ b/doc/topics/release_your_application.md @@ -1,6 +1,7 @@ --- stage: none group: unassigned +description: Environments, packages, review apps, GitLab Pages. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/topics/set_up_organization.md b/doc/topics/set_up_organization.md index 84d7bb1add0..22a594b6117 100644 --- a/doc/topics/set_up_organization.md +++ b/doc/topics/set_up_organization.md @@ -1,6 +1,7 @@ --- stage: none group: unassigned +description: Users, groups, namespaces, SSH keys. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/user/analytics/index.md b/doc/user/analytics/index.md index d58426bd76b..eef34214c23 100644 --- a/doc/user/analytics/index.md +++ b/doc/user/analytics/index.md @@ -1,6 +1,7 @@ --- stage: Plan group: Optimize +description: Instance, group, and project analytics. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/user/application_security/secure_your_application.md b/doc/user/application_security/secure_your_application.md index b35de7827e8..095796f3dc4 100644 --- a/doc/user/application_security/secure_your_application.md +++ b/doc/user/application_security/secure_your_application.md @@ -1,6 +1,7 @@ --- stage: Secure group: Static Analysis +description: Container, dependency, and vulnerability scans. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/user/application_security/vulnerability_report/index.md b/doc/user/application_security/vulnerability_report/index.md index 8defee7ae51..620d8c75e52 100644 --- a/doc/user/application_security/vulnerability_report/index.md +++ b/doc/user/application_security/vulnerability_report/index.md @@ -205,6 +205,8 @@ To sort vulnerabilities by the date each vulnerability was detected, select the ## Export vulnerability details +> Added "Dismissal Reason" as a column in the CSV export [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/434076) in GitLab 16.8. + You can export details of the vulnerabilities listed in the Vulnerability Report. The export format is CSV (comma separated values). All vulnerabilities are included because filters do not apply to the export. @@ -229,6 +231,7 @@ Fields included are: - Comments - Full Path - CVSS Vectors +- [Dismissal Reason](../vulnerabilities/index.md#vulnerability-dismissal-reasons) NOTE: Full details are available through our diff --git a/doc/user/infrastructure/index.md b/doc/user/infrastructure/index.md index 04d6caff0ba..327b6743d01 100644 --- a/doc/user/infrastructure/index.md +++ b/doc/user/infrastructure/index.md @@ -1,6 +1,7 @@ --- stage: Deploy group: Environments +description: Terraform and Kubernetes deployments. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/user/project/organize_work_with_projects.md b/doc/user/project/organize_work_with_projects.md index d41825af613..1371f5e77d0 100644 --- a/doc/user/project/organize_work_with_projects.md +++ b/doc/user/project/organize_work_with_projects.md @@ -1,6 +1,7 @@ --- stage: Data Stores group: Tenant Scale +description: Project visibility, search, badges, layout. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/doc/user/project/repository/mirror/index.md b/doc/user/project/repository/mirror/index.md index 34a2757bb67..9d5048a4fed 100644 --- a/doc/user/project/repository/mirror/index.md +++ b/doc/user/project/repository/mirror/index.md @@ -208,4 +208,4 @@ Older versions of SSH may require you to remove `-E md5` from the command. - [Troubleshooting](troubleshooting.md) for repository mirroring. - Configure a [Pull Mirroring Interval](../../../../administration/instance_limits.md#pull-mirroring-interval) - [Disable mirrors for a project](../../../../administration/settings/visibility_and_access_controls.md#enable-project-mirroring) -- [Secrets file and mirroring](../../../../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost) +- [Secrets file and mirroring](../../../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost) diff --git a/doc/user/version.md b/doc/user/version.md index d39c0394610..2dcaf16d53b 100644 --- a/doc/user/version.md +++ b/doc/user/version.md @@ -1,6 +1,7 @@ --- stage: none group: none +description: Version information. info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments --- diff --git a/lib/api/helpers/integrations_helpers.rb b/lib/api/helpers/integrations_helpers.rb index 6025ffddc33..d9696a33e84 100644 --- a/lib/api/helpers/integrations_helpers.rb +++ b/lib/api/helpers/integrations_helpers.rb @@ -268,14 +268,7 @@ module API desc: 'Branches for which notifications are to be sent' } ], - 'external-wiki' => [ - { - required: true, - name: :external_wiki_url, - type: String, - desc: 'The URL of the external wiki' - } - ], + 'external-wiki' => ::Integrations::ExternalWiki.api_fields, 'google-play' => ::Integrations::GooglePlay.api_fields, 'hangouts-chat' => [ { diff --git a/lib/gitlab/tracking.rb b/lib/gitlab/tracking.rb index 0b606b712c7..df10555f006 100644 --- a/lib/gitlab/tracking.rb +++ b/lib/gitlab/tracking.rb @@ -33,21 +33,6 @@ module Gitlab track_struct_event(tracker, category, action, label: label, property: property, value: value, contexts: contexts) end - def database_event(category, action, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists - action = action.to_s - destination = Gitlab::Tracking::Destinations::DatabaseEventsSnowplow.new - contexts = [ - Tracking::StandardContext.new( - namespace_id: namespace&.id, - plan_name: namespace&.actual_plan_name, - project_id: project&.id, - user_id: user&.id, - **extra).to_context, *context - ] - - track_struct_event(destination, category, action, label: label, property: property, value: value, contexts: contexts) - end - def definition(basename, category: nil, action: nil, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists definition = YAML.load_file(Rails.root.join("config/events/#{basename}.yml")) diff --git a/lib/gitlab/tracking/destinations/database_events_snowplow.rb b/lib/gitlab/tracking/destinations/database_events_snowplow.rb deleted file mode 100644 index 458d7f0c129..00000000000 --- a/lib/gitlab/tracking/destinations/database_events_snowplow.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Tracking - module Destinations - class DatabaseEventsSnowplow < Snowplow - extend ::Gitlab::Utils::Override - - HOSTNAME = 'db-snowplow.trx.gitlab.net' - - override :enabled? - # database events are only collected for SaaS instance - def enabled? - ::Gitlab.dev_or_test_env? || ::Gitlab.com? - end - - override :hostname - def hostname - return Gitlab::CurrentSettings.snowplow_database_collector_hostname || HOSTNAME if ::Gitlab.com? - - 'localhost:9091' - end - - private - - override :increment_failed_events_emissions - def increment_failed_events_emissions(value) - Gitlab::Metrics.counter( - :gitlab_db_events_snowplow_failed_events_total, - 'Number of failed Snowplow events emissions' - ).increment({}, value.to_i) - end - - override :increment_successful_events_emissions - def increment_successful_events_emissions(value) - Gitlab::Metrics.counter( - :gitlab_db_events_snowplow_successful_events_total, - 'Number of successful Snowplow events emissions' - ).increment({}, value.to_i) - end - - override :increment_total_events_counter - def increment_total_events_counter - Gitlab::Metrics.counter( - :gitlab_db_events_snowplow_events_total, - 'Number of Snowplow events' - ).increment - end - end - end - end -end diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 3a0a7680f41..f66d21bc1ad 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -172,11 +172,6 @@ msgid_plural "%d authors" msgstr[0] "" msgstr[1] "" -msgid "%d candidate" -msgid_plural "%d candidates" -msgstr[0] "" -msgstr[1] "" - msgid "%d changed file" msgid_plural "%d changed files" msgstr[0] "" @@ -20343,6 +20338,9 @@ msgstr "" msgid "ExternalWikiService|Link to an external wiki from the sidebar." msgstr "" +msgid "ExternalWikiService|URL of the external wiki." +msgstr "" + msgid "ExternalWikiService|https://example.com/xxx/wiki/..." msgstr "" @@ -54013,6 +54011,9 @@ msgstr "" msgid "VulnerabilityExport|Detected At" msgstr "" +msgid "VulnerabilityExport|Dismissal Reason" +msgstr "" + msgid "VulnerabilityExport|Full Path" msgstr "" diff --git a/qa/Gemfile b/qa/Gemfile index ca919fd7c00..3c0ddf98008 100644 --- a/qa/Gemfile +++ b/qa/Gemfile @@ -3,7 +3,7 @@ source 'https://rubygems.org' gem 'gitlab-qa', '~> 13', '>= 13.1.0', require: 'gitlab/qa' -gem 'gitlab_quality-test_tooling', '~> 1.9.0', require: false +gem 'gitlab_quality-test_tooling', '~> 1.10.0', require: false gem 'gitlab-utils', path: '../gems/gitlab-utils' gem 'activesupport', '~> 7.0.8' # This should stay in sync with the root's Gemfile gem 'allure-rspec', '~> 2.23.0' diff --git a/qa/Gemfile.lock b/qa/Gemfile.lock index a1372ed4292..5db23d82db7 100644 --- a/qa/Gemfile.lock +++ b/qa/Gemfile.lock @@ -129,7 +129,7 @@ GEM rainbow (>= 3, < 4) table_print (= 1.5.7) zeitwerk (>= 2, < 3) - gitlab_quality-test_tooling (1.9.0) + gitlab_quality-test_tooling (1.10.0) activesupport (>= 6.1, < 7.2) amatch (~> 0.4.1) gitlab (~> 4.19) @@ -361,7 +361,7 @@ DEPENDENCIES fog-google (~> 1.19) gitlab-qa (~> 13, >= 13.1.0) gitlab-utils! - gitlab_quality-test_tooling (~> 1.9.0) + gitlab_quality-test_tooling (~> 1.10.0) influxdb-client (~> 3.0) knapsack (~> 4.0) nokogiri (~> 1.15, >= 1.15.5) diff --git a/spec/controllers/admin/projects_controller_spec.rb b/spec/controllers/admin/projects_controller_spec.rb index d81b067ffb6..95986b5c034 100644 --- a/spec/controllers/admin/projects_controller_spec.rb +++ b/spec/controllers/admin/projects_controller_spec.rb @@ -49,11 +49,11 @@ RSpec.describe Admin::ProjectsController do it 'does not have N+1 queries', :use_clean_rails_memory_store_caching, :request_store do get :index - control_count = ActiveRecord::QueryRecorder.new { get :index }.count + control = ActiveRecord::QueryRecorder.new { get :index } create(:project) - expect { get :index }.not_to exceed_query_limit(control_count) + expect { get :index }.not_to exceed_query_limit(control) end end diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb index d88fe41a869..186e1b13856 100644 --- a/spec/controllers/admin/runners_controller_spec.rb +++ b/spec/controllers/admin/runners_controller_spec.rb @@ -89,11 +89,11 @@ RSpec.describe Admin::RunnersController, feature_category: :fleet_visibility do it 'avoids N+1 queries', :request_store do get :edit, params: { id: runner.id } - control_count = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } }.count + control = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } } # There is one additional query looking up subject.group in ProjectPolicy for the # needs_new_sso_session permission - expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control_count + 1) + expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control).with_threshold(1) expect(response).to have_gitlab_http_status(:ok) end diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb index f4384dbaa69..d920fbc3c32 100644 --- a/spec/controllers/application_controller_spec.rb +++ b/spec/controllers/application_controller_spec.rb @@ -470,7 +470,7 @@ RSpec.describe ApplicationController, feature_category: :shared do enforce_terms - expect { get :index }.not_to exceed_query_limit(control.count).with_threshold(1) + expect { get :index }.not_to exceed_query_limit(control).with_threshold(1) end context 'when terms are enforced' do diff --git a/spec/controllers/concerns/renders_commits_spec.rb b/spec/controllers/concerns/renders_commits_spec.rb index 45f194b63e7..754107efee8 100644 --- a/spec/controllers/concerns/renders_commits_spec.rb +++ b/spec/controllers/concerns/renders_commits_spec.rb @@ -46,15 +46,15 @@ RSpec.describe RendersCommits do it 'avoids N + 1', :request_store do stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 5) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do go - end.count + end stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 15) expect do go - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end @@ -73,7 +73,7 @@ RSpec.describe RendersCommits do expect do subject.prepare_commits_for_rendering(merge_request.commits) merge_request.commits.each(&:latest_pipeline) - end.not_to exceed_all_query_limit(control.count) + end.not_to exceed_all_query_limit(control) end end end diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb index 3dcf41941bb..38e39da2733 100644 --- a/spec/controllers/groups/labels_controller_spec.rb +++ b/spec/controllers/groups/labels_controller_spec.rb @@ -62,7 +62,9 @@ RSpec.describe Groups::LabelsController, feature_category: :team_planning do create_list(:group_label, 3, group: group) # some n+1 queries still exist - expect { get :index, params: { group_id: group.to_param } }.not_to exceed_all_query_limit(control.count).with_threshold(10) + expect do + get :index, params: { group_id: group.to_param } + end.not_to exceed_all_query_limit(control).with_threshold(10) expect(assigns(:labels).count).to eq(4) end end diff --git a/spec/controllers/groups/releases_controller_spec.rb b/spec/controllers/groups/releases_controller_spec.rb index 4b4333dea0e..1ca540ebb99 100644 --- a/spec/controllers/groups/releases_controller_spec.rb +++ b/spec/controllers/groups/releases_controller_spec.rb @@ -62,12 +62,12 @@ RSpec.describe Groups::ReleasesController do context 'N+1 queries' do it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new { subject }.count + control = ActiveRecord::QueryRecorder.new { subject } create_list(:release, 5, project: project) create_list(:release, 5, project: private_project) - expect { subject }.not_to exceed_query_limit(control_count) + expect { subject }.not_to exceed_query_limit(control) end end end diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb index b29a172f5b1..721125749a5 100644 --- a/spec/controllers/projects/issues_controller_spec.rb +++ b/spec/controllers/projects/issues_controller_spec.rb @@ -987,11 +987,11 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te labels = create_list(:label, 10, project: project).map(&:to_reference) issue = create(:issue, project: project, description: 'Test issue') - control_count = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) }.count + control = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) } # Follow-up to get rid of this `2 * label.count` requirement: https://gitlab.com/gitlab-org/gitlab-foss/issues/52230 expect { issue.update!(description: [issue.description, labels].join(' ')) } - .not_to exceed_query_limit(control_count + 2 * labels.count) + .not_to exceed_query_limit(control).with_threshold(2 * labels.count) end it 'logs the view with Gitlab::Search::RecentIssues' do @@ -1849,15 +1849,17 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te RequestStore.clear! - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } - end.count + end RequestStore.clear! create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference) - expect { get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } }.not_to exceed_query_limit(control_count) + expect do + get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } + end.not_to exceed_query_limit(control) end end diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb index db8cac8bb4a..2333ff0a937 100644 --- a/spec/controllers/projects/labels_controller_spec.rb +++ b/spec/controllers/projects/labels_controller_spec.rb @@ -108,7 +108,7 @@ RSpec.describe Projects::LabelsController, feature_category: :team_planning do # some n+1 queries still exist # calls to get max project authorization access level - expect { list_labels }.not_to exceed_all_query_limit(control.count).with_threshold(25) + expect { list_labels }.not_to exceed_all_query_limit(control).with_threshold(25) expect(assigns(:labels).count).to eq(10) end end diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb index 678991b91a5..6b440b90f37 100644 --- a/spec/controllers/projects/notes_controller_spec.rb +++ b/spec/controllers/projects/notes_controller_spec.rb @@ -249,15 +249,15 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: : RequestStore.clear! - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get :index, params: request_params - end.count + end RequestStore.clear! create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference) - expect { get :index, params: request_params }.not_to exceed_query_limit(control_count) + expect { get :index, params: request_params }.not_to exceed_query_limit(control) end end end diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb index 7cd4f43d4da..9fe2e4c23e0 100644 --- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb +++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb @@ -108,11 +108,11 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu end it 'avoids N + 1 queries', :request_store do - control_count = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }.count + control = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules } create_list(:ci_pipeline_schedule, 2, project: project) - expect { visit_pipelines_schedules }.not_to exceed_query_limit(control_count) + expect { visit_pipelines_schedules }.not_to exceed_query_limit(control) end context 'when the scope is set to active' do diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb index deaed8e1162..82c1aa3e18c 100644 --- a/spec/controllers/projects/pipelines_controller_spec.rb +++ b/spec/controllers/projects/pipelines_controller_spec.rb @@ -381,7 +381,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte # Set up all required variables get_pipeline_json - control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count + control = ActiveRecord::QueryRecorder.new { get_pipeline_json } first_build = pipeline.builds.first first_build.tag_list << [:hello, :world] @@ -391,9 +391,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte second_build.tag_list << [:docker, :ruby] create(:deployment, deployable: second_build) - new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count - - expect(new_count).to be_within(1).of(control_count) + expect { get_pipeline_json }.not_to exceed_query_limit(control).with_threshold(1) end end @@ -1074,7 +1072,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte clear_controller_memoization - control_count = ActiveRecord::QueryRecorder.new { get_test_report_json }.count + control = ActiveRecord::QueryRecorder.new { get_test_report_json } create(:ci_build, name: 'karma', pipeline: pipeline).tap do |build| create(:ci_job_artifact, :junit, job: build) @@ -1082,7 +1080,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte clear_controller_memoization - expect { get_test_report_json }.not_to exceed_query_limit(control_count) + expect { get_test_report_json }.not_to exceed_query_limit(control) end end diff --git a/spec/controllers/projects/starrers_controller_spec.rb b/spec/controllers/projects/starrers_controller_spec.rb index 2148f495c31..236bb408d32 100644 --- a/spec/controllers/projects/starrers_controller_spec.rb +++ b/spec/controllers/projects/starrers_controller_spec.rb @@ -40,11 +40,11 @@ RSpec.describe Projects::StarrersController do it 'avoids N+1s loading users', :request_store do get_starrers - control_count = ActiveRecord::QueryRecorder.new { get_starrers }.count + control = ActiveRecord::QueryRecorder.new { get_starrers } create_list(:user, 5).each { |user| user.toggle_star(project) } - expect { get_starrers }.not_to exceed_query_limit(control_count) + expect { get_starrers }.not_to exceed_query_limit(control) end end diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb index 1020ea341ce..fee0f8a8f32 100644 --- a/spec/features/issuables/issuable_list_spec.rb +++ b/spec/features/issuables/issuable_list_spec.rb @@ -16,11 +16,11 @@ RSpec.describe 'issuable list', :js, feature_category: :team_planning do issuable_types.each do |issuable_type| it "avoids N+1 database queries for #{issuable_type.to_s.humanize.pluralize}", quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/231426' } do - control_count = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) }.count + control = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) } create_issuables(issuable_type) - expect { visit_issuable_list(issuable_type) }.not_to exceed_query_limit(control_count) + expect { visit_issuable_list(issuable_type) }.not_to exceed_query_limit(control) end it "counts upvotes, downvotes and notes count for each #{issuable_type.to_s.humanize}" do diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb index a29d643b15b..41b8ad7825c 100644 --- a/spec/features/projects/branches_spec.rb +++ b/spec/features/projects/branches_spec.rb @@ -171,12 +171,12 @@ RSpec.describe 'Branches', feature_category: :source_code_management do new_branches_count = 20 sql_queries_count_threshold = 10 - control_count = ActiveRecord::QueryRecorder.new { visit project_branches_path(project) }.count + control = ActiveRecord::QueryRecorder.new { visit project_branches_path(project) } (1..new_branches_count).each { |number| repository.add_branch(user, "new-branch-#{number}", 'master') } expect { visit project_branches_filtered_path(project, state: 'all') } - .not_to exceed_query_limit(control_count).with_threshold(sql_queries_count_threshold) + .not_to exceed_query_limit(control).with_threshold(sql_queries_count_threshold) end end diff --git a/spec/features/tags/developer_views_tags_spec.rb b/spec/features/tags/developer_views_tags_spec.rb index 154311853f8..bc2d33b3a02 100644 --- a/spec/features/tags/developer_views_tags_spec.rb +++ b/spec/features/tags/developer_views_tags_spec.rb @@ -41,11 +41,11 @@ RSpec.describe 'Developer views tags', feature_category: :source_code_management end it 'avoids a N+1 query in branches index' do - control_count = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) }.count + control = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) } %w[one two three four five].each { |tag| repository.add_tag(user, tag, 'master', 'foo') } - expect { visit project_tags_path(project) }.not_to exceed_query_limit(control_count) + expect { visit project_tags_path(project) }.not_to exceed_query_limit(control) end it 'views the tags list page' do diff --git a/spec/finders/deployments_finder_spec.rb b/spec/finders/deployments_finder_spec.rb index 807a7ca8e26..f45042d9c36 100644 --- a/spec/finders/deployments_finder_spec.rb +++ b/spec/finders/deployments_finder_spec.rb @@ -343,14 +343,14 @@ RSpec.describe DeploymentsFinder, feature_category: :deployment_management do it 'avoids N+1 queries' do execute_queries = -> { described_class.new({ group: group }).execute.first } - control_count = ActiveRecord::QueryRecorder.new { execute_queries }.count + control = ActiveRecord::QueryRecorder.new { execute_queries } new_project = create(:project, :repository, group: group) new_env = create(:environment, project: new_project, name: "production") create_list(:deployment, 2, status: :success, project: new_project, environment: new_env) group.reload - expect { execute_queries }.not_to exceed_query_limit(control_count) + expect { execute_queries }.not_to exceed_query_limit(control) end end end diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb index e0fc494d033..9c8b8658538 100644 --- a/spec/finders/members_finder_spec.rb +++ b/spec/finders/members_finder_spec.rb @@ -166,12 +166,12 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do # warm up # We need this warm up because there is 1 query being fired in one of the policies, - # and policy results are cached. Without a warm up, the control_count will be X queries + # and policy results are cached. Without a warm up, the control.count will be X queries # but the test phase will only fire X-1 queries, due the fact that the # result of the policy is already available in the cache. described_class.new(project, user2).execute.map(&:user) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do described_class.new(project, user2).execute.map(&:user) end @@ -179,7 +179,7 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do expect do described_class.new(project, user2).execute.map(&:user) - end.to issue_same_number_of_queries_as(control_count) + end.to issue_same_number_of_queries_as(control) end context 'with :shared_into_ancestors' do diff --git a/spec/finders/releases/group_releases_finder_spec.rb b/spec/finders/releases/group_releases_finder_spec.rb index daefc94828b..3430fe834d1 100644 --- a/spec/finders/releases/group_releases_finder_spec.rb +++ b/spec/finders/releases/group_releases_finder_spec.rb @@ -168,9 +168,9 @@ RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_proj let(:params) { query_params } it 'subgroups avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do releases - end.count + end subgroups = create_list(:group, 10, parent: group) projects = create_list(:project, 10, namespace: subgroups[0]) @@ -178,7 +178,7 @@ RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_proj expect do releases - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end end diff --git a/spec/finders/resource_milestone_event_finder_spec.rb b/spec/finders/resource_milestone_event_finder_spec.rb index 27e124afe2e..a05059328e3 100644 --- a/spec/finders/resource_milestone_event_finder_spec.rb +++ b/spec/finders/resource_milestone_event_finder_spec.rb @@ -49,8 +49,8 @@ RSpec.describe ResourceMilestoneEventFinder do milestone1 = create(:milestone, project: issue_project) milestone2 = create(:milestone, project: issue_project) - control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, issue).execute }.count - expect(control_count).to eq(1) # 1 events query + control = ActiveRecord::QueryRecorder.new { described_class.new(user, issue).execute } + expect(control.count).to eq(1) # 1 events query create_event(milestone1, :add) create_event(milestone1, :remove) @@ -60,7 +60,7 @@ RSpec.describe ResourceMilestoneEventFinder do create_event(milestone2, :remove) # 1 milestones + 1 project + 1 user + 4 ability - expect { described_class.new(user, issue).execute }.not_to exceed_query_limit(control_count + 6) + expect { described_class.new(user, issue).execute }.not_to exceed_query_limit(control).with_threshold(6) end end diff --git a/spec/frontend/ml/model_registry/components/candidate_list_spec.js b/spec/frontend/ml/model_registry/components/candidate_list_spec.js index c10222a99fd..8491c7be16f 100644 --- a/spec/frontend/ml/model_registry/components/candidate_list_spec.js +++ b/spec/frontend/ml/model_registry/components/candidate_list_spec.js @@ -1,13 +1,11 @@ import Vue from 'vue'; import VueApollo from 'vue-apollo'; -import { GlAlert } from '@gitlab/ui'; -import { shallowMount } from '@vue/test-utils'; +import { mount } from '@vue/test-utils'; import * as Sentry from '~/sentry/sentry_browser_wrapper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import CandidateList from '~/ml/model_registry/components/candidate_list.vue'; -import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; -import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue'; +import SearchableList from '~/ml/model_registry/components/searchable_list.vue'; import CandidateListRow from '~/ml/model_registry/components/candidate_list_row.vue'; import getModelCandidatesQuery from '~/ml/model_registry/graphql/queries/get_model_candidates.query.graphql'; import { GRAPHQL_PAGE_SIZE } from '~/ml/model_registry/constants'; @@ -24,10 +22,7 @@ describe('ml/model_registry/components/candidate_list.vue', () => { let wrapper; let apolloProvider; - const findAlert = () => wrapper.findComponent(GlAlert); - const findLoader = () => wrapper.findComponent(PackagesListLoader); - const findRegistryList = () => wrapper.findComponent(RegistryList); - const findListRow = () => wrapper.findComponent(CandidateListRow); + const findSearchableList = () => wrapper.findComponent(SearchableList); const findAllRows = () => wrapper.findAllComponents(CandidateListRow); const mountComponent = ({ @@ -37,15 +32,12 @@ describe('ml/model_registry/components/candidate_list.vue', () => { const requestHandlers = [[getModelCandidatesQuery, resolver]]; apolloProvider = createMockApollo(requestHandlers); - wrapper = shallowMount(CandidateList, { + wrapper = mount(CandidateList, { apolloProvider, propsData: { modelId: 2, ...props, }, - stubs: { - RegistryList, - }, }); }; @@ -60,25 +52,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => { await waitForPromises(); }); - it('displays empty slot message', () => { + it('shows empty state', () => { expect(wrapper.text()).toContain('This model has no candidates'); }); - - it('does not display loader', () => { - expect(findLoader().exists()).toBe(false); - }); - - it('does not display rows', () => { - expect(findListRow().exists()).toBe(false); - }); - - it('does not display registry list', () => { - expect(findRegistryList().exists()).toBe(false); - }); - - it('does not display alert', () => { - expect(findAlert().exists()).toBe(false); - }); }); describe('if load fails, alert', () => { @@ -90,19 +66,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => { }); it('is displayed', () => { - expect(findAlert().exists()).toBe(true); - }); - - it('shows error message', () => { - expect(findAlert().text()).toContain('Failed to load model candidates with error: Failure!'); - }); - - it('is not dismissible', () => { - expect(findAlert().props('dismissible')).toBe(false); - }); - - it('is of variant danger', () => { - expect(findAlert().attributes('variant')).toBe('danger'); + expect(findSearchableList().props('errorMessage')).toBe( + 'Failed to load model candidates with error: Failure!', + ); }); it('error is logged in sentry', () => { @@ -116,21 +82,11 @@ describe('ml/model_registry/components/candidate_list.vue', () => { await waitForPromises(); }); - it('displays package registry list', () => { - expect(findRegistryList().exists()).toEqual(true); + it('Passes items to list', () => { + expect(findSearchableList().props('items')).toEqual(graphqlCandidates); }); - it('binds the right props', () => { - expect(findRegistryList().props()).toMatchObject({ - items: graphqlCandidates, - pagination: {}, - isLoading: false, - hiddenDelete: true, - }); - }); - - it('displays candidate rows', () => { - expect(findAllRows().exists()).toEqual(true); + it('displays package version rows', () => { expect(findAllRows()).toHaveLength(graphqlCandidates.length); }); @@ -143,17 +99,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => { candidate: expect.objectContaining(graphqlCandidates[1]), }); }); - - it('does not display loader', () => { - expect(findLoader().exists()).toBe(false); - }); - - it('does not display empty message', () => { - expect(findAlert().exists()).toBe(false); - }); }); - describe('when user interacts with pagination', () => { + describe('when list requests update', () => { const resolver = jest.fn().mockResolvedValue(modelCandidatesQuery()); beforeEach(async () => { @@ -161,22 +109,18 @@ describe('ml/model_registry/components/candidate_list.vue', () => { await waitForPromises(); }); - it('when list emits next-page fetches the next set of records', async () => { - findRegistryList().vm.$emit('next-page'); + it('when list emits fetch-page fetches the next set of records', async () => { + findSearchableList().vm.$emit('fetch-page', { + after: 'eyJpZCI6IjIifQ', + first: 30, + id: 'gid://gitlab/Ml::Model/2', + }); + await waitForPromises(); expect(resolver).toHaveBeenLastCalledWith( expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }), ); }); - - it('when list emits prev-page fetches the prev set of records', async () => { - findRegistryList().vm.$emit('prev-page'); - await waitForPromises(); - - expect(resolver).toHaveBeenLastCalledWith( - expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }), - ); - }); }); }); diff --git a/spec/frontend/ml/model_registry/components/model_version_list_spec.js b/spec/frontend/ml/model_registry/components/model_version_list_spec.js index 41f7e71c543..f5d6acf3bae 100644 --- a/spec/frontend/ml/model_registry/components/model_version_list_spec.js +++ b/spec/frontend/ml/model_registry/components/model_version_list_spec.js @@ -1,13 +1,11 @@ import Vue from 'vue'; import VueApollo from 'vue-apollo'; -import { GlAlert } from '@gitlab/ui'; import * as Sentry from '~/sentry/sentry_browser_wrapper'; -import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import { mountExtended } from 'helpers/vue_test_utils_helper'; import createMockApollo from 'helpers/mock_apollo_helper'; import waitForPromises from 'helpers/wait_for_promises'; import ModelVersionList from '~/ml/model_registry/components/model_version_list.vue'; -import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; -import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue'; +import SearchableList from '~/ml/model_registry/components/searchable_list.vue'; import ModelVersionRow from '~/ml/model_registry/components/model_version_row.vue'; import getModelVersionsQuery from '~/ml/model_registry/graphql/queries/get_model_versions.query.graphql'; import EmptyState from '~/ml/model_registry/components/empty_state.vue'; @@ -25,11 +23,8 @@ describe('ModelVersionList', () => { let wrapper; let apolloProvider; - const findAlert = () => wrapper.findComponent(GlAlert); - const findLoader = () => wrapper.findComponent(PackagesListLoader); - const findRegistryList = () => wrapper.findComponent(RegistryList); + const findSearchableList = () => wrapper.findComponent(SearchableList); const findEmptyState = () => wrapper.findComponent(EmptyState); - const findListRow = () => wrapper.findComponent(ModelVersionRow); const findAllRows = () => wrapper.findAllComponents(ModelVersionRow); const mountComponent = ({ @@ -39,15 +34,12 @@ describe('ModelVersionList', () => { const requestHandlers = [[getModelVersionsQuery, resolver]]; apolloProvider = createMockApollo(requestHandlers); - wrapper = shallowMountExtended(ModelVersionList, { + wrapper = mountExtended(ModelVersionList, { apolloProvider, propsData: { modelId: 2, ...props, }, - stubs: { - RegistryList, - }, }); }; @@ -65,22 +57,6 @@ describe('ModelVersionList', () => { it('shows empty state', () => { expect(findEmptyState().props('entityType')).toBe(MODEL_ENTITIES.modelVersion); }); - - it('does not display loader', () => { - expect(findLoader().exists()).toBe(false); - }); - - it('does not display rows', () => { - expect(findListRow().exists()).toBe(false); - }); - - it('does not display registry list', () => { - expect(findRegistryList().exists()).toBe(false); - }); - - it('does not display alert', () => { - expect(findAlert().exists()).toBe(false); - }); }); describe('if load fails, alert', () => { @@ -92,19 +68,9 @@ describe('ModelVersionList', () => { }); it('is displayed', () => { - expect(findAlert().exists()).toBe(true); - }); - - it('shows error message', () => { - expect(findAlert().text()).toContain('Failed to load model versions with error: Failure!'); - }); - - it('is not dismissible', () => { - expect(findAlert().props('dismissible')).toBe(false); - }); - - it('is of variant danger', () => { - expect(findAlert().attributes('variant')).toBe('danger'); + expect(findSearchableList().props('errorMessage')).toBe( + 'Failed to load model versions with error: Failure!', + ); }); it('error is logged in sentry', () => { @@ -118,21 +84,11 @@ describe('ModelVersionList', () => { await waitForPromises(); }); - it('displays package registry list', () => { - expect(findRegistryList().exists()).toEqual(true); - }); - - it('binds the right props', () => { - expect(findRegistryList().props()).toMatchObject({ - items: graphqlModelVersions, - pagination: {}, - isLoading: false, - hiddenDelete: true, - }); + it('Passes items to list', () => { + expect(findSearchableList().props('items')).toEqual(graphqlModelVersions); }); it('displays package version rows', () => { - expect(findAllRows().exists()).toEqual(true); expect(findAllRows()).toHaveLength(graphqlModelVersions.length); }); @@ -145,17 +101,9 @@ describe('ModelVersionList', () => { modelVersion: expect.objectContaining(graphqlModelVersions[1]), }); }); - - it('does not display loader', () => { - expect(findLoader().exists()).toBe(false); - }); - - it('does not display empty state', () => { - expect(findEmptyState().exists()).toBe(false); - }); }); - describe('when user interacts with pagination', () => { + describe('when list requests update', () => { const resolver = jest.fn().mockResolvedValue(modelVersionsQuery()); beforeEach(async () => { @@ -163,22 +111,18 @@ describe('ModelVersionList', () => { await waitForPromises(); }); - it('when list emits next-page fetches the next set of records', async () => { - findRegistryList().vm.$emit('next-page'); + it('when list emits fetch-page fetches the next set of records', async () => { + findSearchableList().vm.$emit('fetch-page', { + after: 'eyJpZCI6IjIifQ', + first: 30, + id: 'gid://gitlab/Ml::Model/2', + }); + await waitForPromises(); expect(resolver).toHaveBeenLastCalledWith( expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }), ); }); - - it('when list emits prev-page fetches the prev set of records', async () => { - findRegistryList().vm.$emit('prev-page'); - await waitForPromises(); - - expect(resolver).toHaveBeenLastCalledWith( - expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }), - ); - }); }); }); diff --git a/spec/frontend/ml/model_registry/components/searchable_list_spec.js b/spec/frontend/ml/model_registry/components/searchable_list_spec.js new file mode 100644 index 00000000000..ea58a9a830a --- /dev/null +++ b/spec/frontend/ml/model_registry/components/searchable_list_spec.js @@ -0,0 +1,170 @@ +import { GlAlert } from '@gitlab/ui'; +import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; +import SearchableList from '~/ml/model_registry/components/searchable_list.vue'; +import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue'; +import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue'; +import { defaultPageInfo } from '../mock_data'; + +describe('ml/model_registry/components/searchable_list.vue', () => { + let wrapper; + + const findAlert = () => wrapper.findComponent(GlAlert); + const findLoader = () => wrapper.findComponent(PackagesListLoader); + const findRegistryList = () => wrapper.findComponent(RegistryList); + const findEmptyState = () => wrapper.findByTestId('empty-state-slot'); + const findFirstRow = () => wrapper.findByTestId('element'); + const findRows = () => wrapper.findAllByTestId('element'); + + const defaultProps = { + items: ['a', 'b', 'c'], + pageInfo: defaultPageInfo, + isLoading: false, + errorMessage: '', + }; + + const mountComponent = (props = {}) => { + wrapper = shallowMountExtended(SearchableList, { + propsData: { + ...defaultProps, + ...props, + }, + stubs: { + RegistryList, + }, + slots: { + 'empty-state': '
This is empty
', + item: '
', + }, + }); + }; + + describe('when list is loaded and has no data', () => { + beforeEach(() => mountComponent({ items: [] })); + + it('shows empty state', () => { + expect(findEmptyState().text()).toBe('This is empty'); + }); + + it('does not display loader', () => { + expect(findLoader().exists()).toBe(false); + }); + + it('does not display rows', () => { + expect(findFirstRow().exists()).toBe(false); + }); + + it('does not display registry list', () => { + expect(findRegistryList().exists()).toBe(false); + }); + + it('does not display alert', () => { + expect(findAlert().exists()).toBe(false); + }); + }); + + describe('if errorMessage', () => { + beforeEach(() => mountComponent({ errorMessage: 'Failure!' })); + + it('shows error message', () => { + expect(findAlert().text()).toContain('Failure!'); + }); + + it('is not dismissible', () => { + expect(findAlert().props('dismissible')).toBe(false); + }); + + it('is of variant danger', () => { + expect(findAlert().attributes('variant')).toBe('danger'); + }); + + it('hides loader', () => { + expect(findLoader().exists()).toBe(false); + }); + + it('hides registry list', () => { + expect(findRegistryList().exists()).toBe(false); + }); + + it('hides empty state', () => { + expect(findEmptyState().exists()).toBe(false); + }); + }); + + describe('if loading', () => { + beforeEach(() => mountComponent({ isLoading: true })); + + it('shows loader', () => { + expect(findLoader().exists()).toBe(true); + }); + + it('hides error message', () => { + expect(findAlert().exists()).toBe(false); + }); + + it('hides registry list', () => { + expect(findRegistryList().exists()).toBe(false); + }); + + it('hides empty state', () => { + expect(findEmptyState().exists()).toBe(false); + }); + }); + + describe('when list is loaded with data', () => { + beforeEach(() => mountComponent()); + + it('displays package registry list', () => { + expect(findRegistryList().exists()).toEqual(true); + }); + + it('binds the right props', () => { + expect(findRegistryList().props()).toMatchObject({ + items: ['a', 'b', 'c'], + isLoading: false, + pagination: defaultPageInfo, + hiddenDelete: true, + }); + }); + + it('displays package version rows', () => { + expect(findRows().exists()).toEqual(true); + expect(findRows()).toHaveLength(3); + }); + + it('does not display loader', () => { + expect(findLoader().exists()).toBe(false); + }); + + it('does not display empty state', () => { + expect(findEmptyState().exists()).toBe(false); + }); + }); + + describe('when user interacts with pagination', () => { + beforeEach(() => mountComponent()); + + it('when list emits next-page emits fetchPage with correct pageInfo', () => { + findRegistryList().vm.$emit('next-page'); + + const expectedNewPageInfo = { + after: 'eyJpZCI6IjIifQ', + first: 30, + last: null, + }; + + expect(wrapper.emitted('fetch-page')).toEqual([[expectedNewPageInfo]]); + }); + + it('when list emits prev-page emits fetchPage with correct pageInfo', () => { + findRegistryList().vm.$emit('prev-page'); + + const expectedNewPageInfo = { + before: 'eyJpZCI6IjE2In0', + first: null, + last: 30, + }; + + expect(wrapper.emitted('fetch-page')).toEqual([[expectedNewPageInfo]]); + }); + }); +}); diff --git a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb index eb39e5bafc5..744e7f35891 100644 --- a/spec/graphql/resolvers/design_management/versions_resolver_spec.rb +++ b/spec/graphql/resolvers/design_management/versions_resolver_spec.rb @@ -43,15 +43,15 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do context 'loading associations' do it 'prevents N+1 queries when loading author' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do resolve_versions(object).items.map(&:author) - end.count + end create_list(:design_version, 3, issue: issue) expect do resolve_versions(object).items.map(&:author) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end end diff --git a/spec/graphql/resolvers/group_labels_resolver_spec.rb b/spec/graphql/resolvers/group_labels_resolver_spec.rb index 08e17cedfcc..2e583a1703d 100644 --- a/spec/graphql/resolvers/group_labels_resolver_spec.rb +++ b/spec/graphql/resolvers/group_labels_resolver_spec.rb @@ -78,7 +78,7 @@ RSpec.describe Resolvers::GroupLabelsResolver do Gitlab::SafeRequestStore.ensure_request_store do resolve_labels(group, params).to_a end - end.not_to exceed_query_limit(control.count) + end.not_to exceed_query_limit(control) end end diff --git a/spec/graphql/resolvers/labels_resolver_spec.rb b/spec/graphql/resolvers/labels_resolver_spec.rb index 16cf2e73736..fd55c3131b4 100644 --- a/spec/graphql/resolvers/labels_resolver_spec.rb +++ b/spec/graphql/resolvers/labels_resolver_spec.rb @@ -78,7 +78,7 @@ RSpec.describe Resolvers::LabelsResolver do Gitlab::SafeRequestStore.ensure_request_store do resolve_labels(project, params).to_a end - end.not_to exceed_query_limit(control.count) + end.not_to exceed_query_limit(control) end end diff --git a/spec/graphql/types/current_user_todos_type_spec.rb b/spec/graphql/types/current_user_todos_type_spec.rb index febbe868f33..2b33a705ae2 100644 --- a/spec/graphql/types/current_user_todos_type_spec.rb +++ b/spec/graphql/types/current_user_todos_type_spec.rb @@ -159,17 +159,17 @@ RSpec.describe GitlabSchema.types['CurrentUserTodos'] do # AND ("todos"."state" IN ('done','pending')) # AND "todos"."target_id" = merge_request # AND "todos"."target_type" = 'MergeRequest' ORDER BY "todos"."id" DESC - baseline = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do execute_query(query_type, graphql: base_query) end expect do execute_query(query_type, graphql: query_without_state_arguments) - end.not_to exceed_query_limit(baseline) # at present this is 3 + end.not_to exceed_query_limit(control) # at present this is 3 expect do execute_query(query_type, graphql: with_state_arguments) - end.not_to exceed_query_limit(baseline.count + 1) + end.not_to exceed_query_limit(control).with_threshold(1) end it 'returns correct data' do diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb index 8aee337f51c..5f192701b33 100644 --- a/spec/helpers/groups_helper_spec.rb +++ b/spec/helpers/groups_helper_spec.rb @@ -114,13 +114,13 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do end it 'avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do helper.group_title(nested_group) end expect do helper.group_title(very_deep_nested_group) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end end diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb index e1c0aafc3c3..bad30b5033d 100644 --- a/spec/helpers/search_helper_spec.rb +++ b/spec/helpers/search_helper_spec.rb @@ -656,12 +656,12 @@ RSpec.describe SearchHelper, feature_category: :global_search do @project = create(:project) description = FFaker::Lorem.characters(210) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { search_md_sanitize(description) }.count + control = ActiveRecord::QueryRecorder.new(skip_cached: false) { search_md_sanitize(description) } issues = create_list(:issue, 4, project: @project) description_with_issues = description + ' ' + issues.map { |issue| "##{issue.iid}" }.join(' ') - expect { search_md_sanitize(description_with_issues) }.not_to exceed_all_query_limit(control_count) + expect { search_md_sanitize(description_with_issues) }.not_to exceed_all_query_limit(control) end end diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb index a692d76da77..e1159b9fab2 100644 --- a/spec/lib/atlassian/jira_connect/client_spec.rb +++ b/spec/lib/atlassian/jira_connect/client_spec.rb @@ -433,16 +433,16 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d end it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do subject.send(:store_dev_info, project: project, merge_requests: merge_requests) - end.count + end merge_requests << create(:merge_request, :unique_branches, source_project: project) expect do subject.send(:store_dev_info, project: project, merge_requests: merge_requests) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end end diff --git a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb index 0ed320e863c..15cb4994d46 100644 --- a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb +++ b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb @@ -24,9 +24,9 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c subject { described_class.represent(merge_requests, user_notes_count: user_notes_count).as_json } it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do described_class.represent(merge_requests, user_notes_count: user_notes_count) - end.count + end merge_requests << create(:merge_request, :unique_branches) @@ -35,7 +35,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c records: merge_requests, associations: { merge_request_reviewers: :reviewer } ).call - expect { subject }.not_to exceed_query_limit(control_count) + expect { subject }.not_to exceed_query_limit(control) end it 'uses counts from user_notes_count' do diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/repositories_spec.rb index 024f6c5db96..679be62393e 100644 --- a/spec/lib/backup/repositories_spec.rb +++ b/spec/lib/backup/repositories_spec.rb @@ -68,20 +68,20 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do end it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do subject.dump(destination, backup_id) - end.count + end create_list(:project, 2, :repository) create_list(:snippet, 2, :repository) - # Number of expected queries are 2 more than control_count + # Number of expected queries are 2 more than control.count # to account for the queries for project.design_management_repository # for each project. # We are using 2 projects here. expect do subject.dump(destination, backup_id) - end.not_to exceed_query_limit(control_count + 2) + end.not_to exceed_query_limit(control).with_threshold(2) end describe 'storages' do diff --git a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb index 4fc9d9dd4f6..701a45aa54d 100644 --- a/spec/lib/banzai/filter/custom_emoji_filter_spec.rb +++ b/spec/lib/banzai/filter/custom_emoji_filter_spec.rb @@ -47,13 +47,13 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planni it 'does not do N+1 query' do create(:custom_emoji, name: 'party-parrot', group: group) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do filter('

:tanuki:

') end expect do filter('

:tanuki:

:party-parrot:

') - end.not_to exceed_all_query_limit(control_count.count) + end.not_to exceed_all_query_limit(control) end it 'uses custom emoji from ancestor group' do diff --git a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb index 06bb0edc92c..d14f218763f 100644 --- a/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb +++ b/spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb @@ -259,15 +259,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor # warm up filter(link, context) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do filter(link, context) - end.count + end - expect(control_count).to eq 12 + expect(control.count).to eq 12 expect do filter("#{link} #{link2}", context) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end end @@ -419,15 +419,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor # warm up filter(link, context) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do filter(link, context) - end.count + end - expect(control_count).to eq 10 + expect(control.count).to eq 10 expect do filter("#{link} #{link2}", context) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end end diff --git a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb index 9a2e68aaae0..0bdd64c360d 100644 --- a/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb @@ -230,11 +230,11 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor it 'does not have N+1 per multiple references per project', :use_sql_query_cache do markdown = alert_reference.to_s - max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do reference_filter(markdown) - end.count + end - expect(max_count).to eq 1 + expect(control.count).to eq 1 markdown = "#{alert_reference} ^alert#2 ^alert#3 ^alert#4 #{alert2_reference}" @@ -248,11 +248,9 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor # 1x2 for alerts in each project # Total == 7 # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359 - max_count += 6 - expect do reference_filter(markdown) - end.not_to exceed_all_query_limit(max_count) + end.not_to exceed_all_query_limit(control).with_threshold(6) end end end diff --git a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb index 35a3f20f7b7..730554857df 100644 --- a/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb @@ -283,11 +283,11 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego it 'does not have N+1 per multiple references per project', :use_sql_query_cache do markdown = commit_reference.to_s - max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do reference_filter(markdown) - end.count + end - expect(max_count).to eq 0 + expect(control.count).to eq 0 markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}" @@ -298,11 +298,9 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego # 1 for loading the namespaces associated to the project # 1 for loading the routes associated with the namespace # Total = 5 - max_count += 5 - expect do reference_filter(markdown) - end.not_to exceed_all_query_limit(max_count) + end.not_to exceed_all_query_limit(control).with_threshold(5) end end end diff --git a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb index fd03d7c0d27..678d6619101 100644 --- a/spec/lib/banzai/filter/references/design_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb @@ -240,7 +240,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego * #1[not a valid reference.gif] MD - baseline = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) } + control = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) } # each project mentioned requires 2 queries: # @@ -253,7 +253,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego # In addition there is a 1 query overhead for all the projects at the # start. Currently, the baseline for 2 projects is `2 * 2 + 1 = 5` queries # - expect { process(multiple_references) }.not_to exceed_query_limit(baseline.count) + expect { process(multiple_references) }.not_to exceed_query_limit(control) end end diff --git a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb index 823f006c98a..acc59c85cbf 100644 --- a/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb @@ -338,9 +338,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature single_reference = "External Issue #{issue1.to_reference}" multiple_references = "External Issues #{issue1.to_reference} and #{issue2.to_reference}" - control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count + control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html } - expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count) + expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control) end end end diff --git a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb index d16188e99a3..fd947e3e9cb 100644 --- a/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb @@ -41,9 +41,9 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor single_reference = "Issue #{issue.to_reference}" multiple_references = "Issues #{issue.to_reference} and #{another_issue.to_reference}" - control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count + control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html } - expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count) + expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control) end end diff --git a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb index 81b08a4c516..bcc256813c9 100644 --- a/spec/lib/banzai/filter/references/label_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb @@ -35,13 +35,13 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor # Run this once to establish a baseline reference_filter("Label #{reference}") - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do reference_filter("Label #{reference}") end labels_markdown = Array.new(10, "Label #{reference}").join('\n') - expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control_count.count) + expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control) end it 'includes a data-project attribute' do diff --git a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb index ccc8478c7d8..e3036993f7b 100644 --- a/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb @@ -26,9 +26,9 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_ single_reference = "Merge request #{merge.to_reference}" multiple_references = "Merge requests #{merge.to_reference} and #{another_merge.to_reference}" - control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count + control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html } - expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count) + expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control) end end diff --git a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb index c55fff78756..12af94507b6 100644 --- a/spec/lib/banzai/filter/references/project_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb @@ -115,17 +115,17 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ # warm up first reference_filter(markdown) - max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do reference_filter(markdown) - end.count + end - expect(max_count).to eq 2 + expect(control.count).to eq 2 markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}" expect do reference_filter(markdown) - end.not_to exceed_all_query_limit(max_count) + end.not_to exceed_all_query_limit(control) end end end diff --git a/spec/lib/banzai/filter/references/reference_cache_spec.rb b/spec/lib/banzai/filter/references/reference_cache_spec.rb index 04877931610..b4d9a08e4c6 100644 --- a/spec/lib/banzai/filter/references/reference_cache_spec.rb +++ b/spec/lib/banzai/filter/references/reference_cache_spec.rb @@ -70,13 +70,13 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te filter_single = filter_class.new(doc_single, project: project) cache_single = described_class.new(filter_single, { project: project }, {}) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do cache_single.load_references_per_parent(filter_single.nodes) cache_single.load_parent_per_reference cache_single.load_records_per_parent - end.count + end - expect(control_count).to eq 3 + expect(control.count).to eq 3 # Since this is an issue filter that is not batching issue queries # across projects, we have to account for that. # 1 for for routes to find routes.source_id of projects matching paths @@ -88,13 +88,11 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te # 1x2 for groups # 1x2 for work_item_types # Total = 11 - max_count = control_count + 8 - expect do cache.load_references_per_parent(filter.nodes) cache.load_parent_per_reference cache.load_records_per_parent - end.not_to exceed_query_limit(max_count) + end.not_to exceed_query_limit(control).with_threshold(8) end end diff --git a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb index 00eac7262f4..51c5551dda8 100644 --- a/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb @@ -229,11 +229,11 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ it 'does not have N+1 per multiple references per project', :use_sql_query_cache do markdown = "#{reference} $9999990" - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do reference_filter(markdown) - end.count + end - expect(control_count).to eq 1 + expect(control.count).to eq 1 markdown = "#{reference} $9999990 $9999991 $9999992 $9999993 #{reference2} something/cool$12" @@ -247,11 +247,9 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ # 1x2 for snippets in each project == 2 # Total = 7 # TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359 - max_count = control_count + 6 - expect do reference_filter(markdown) - end.not_to exceed_all_query_limit(max_count) + end.not_to exceed_all_query_limit(control).with_threshold(6) end end end diff --git a/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb b/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb index e59e53891bf..cf245ccc72a 100644 --- a/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb +++ b/spec/lib/banzai/filter/references/work_item_reference_filter_spec.rb @@ -306,9 +306,9 @@ RSpec.describe Banzai::Filter::References::WorkItemReferenceFilter, feature_cate single_reference = "Work item #{work_item.to_reference}" multiple_references = "Work items #{work_item.to_reference} and #{another_work_item.to_reference}" - control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count + control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html } - expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count) + expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control) end end end diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb index 5bbd98592e7..fe1a2bd9a2e 100644 --- a/spec/lib/banzai/issuable_extractor_spec.rb +++ b/spec/lib/banzai/issuable_extractor_spec.rb @@ -45,9 +45,9 @@ RSpec.describe Banzai::IssuableExtractor, feature_category: :team_planning do second_call_queries = ActiveRecord::QueryRecorder.new do extractor.extract([issue_link, work_item_link, merge_request_link]) - end.count + end - expect(second_call_queries).to eq 0 + expect(second_call_queries.count).to eq 0 end end end diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb index 8f4148be2dc..0f3834c2dc8 100644 --- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb +++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb @@ -37,11 +37,11 @@ RSpec.describe Banzai::ReferenceParser::SnippetParser, feature_category: :team_p # Run this once to establish a baseline visible_references(:public) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do subject.nodes_visible_to_user(user, [link]) end - expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control_count.count) + expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control) end it 'creates a reference for guest for a public snippet' do diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb index ad7cd2dc736..5fa61b1680d 100644 --- a/spec/lib/gitlab/data_builder/pipeline_spec.rb +++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb @@ -184,14 +184,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte create(:ci_build, :deploy_to_production, :with_deployment, user: user, project: project, pipeline: pipeline) # We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy` - control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }.count + control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json } # Adding more builds to the pipeline and serializing the data again create_list(:ci_build, 3, user: user, project: project, pipeline: pipeline) create(:ci_build, :start_review_app, :with_deployment, user: user, project: project, pipeline: pipeline) create(:ci_build, :stop_review_app, :with_deployment, user: user, project: project, pipeline: pipeline) - expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control_count) + expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control) end it "with multiple retried builds" do @@ -201,14 +201,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte create(:ci_build, :deploy_to_production, :retried, :with_deployment, user: user, project: project, pipeline: pipeline) # We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy` - control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }.count + control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json } # Adding more builds to the pipeline and serializing the data again create_list(:ci_build, 3, :retried, user: user, project: project, pipeline: pipeline) create(:ci_build, :start_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline) create(:ci_build, :stop_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline) - expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control_count) + expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control) end end end diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb index 71f9a7d0f0e..82daaba6448 100644 --- a/spec/lib/gitlab/git_access_spec.rb +++ b/spec/lib/gitlab/git_access_spec.rb @@ -1062,14 +1062,14 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system # additional queries. access.check('git-receive-pack', changes) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do access.check('git-receive-pack', changes) end changes = ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature'] # There is still an N+1 query with protected branches - expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control_count).with_threshold(2) + expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control).with_threshold(2) end it 'raises TimeoutError when #check_access! raises a timeout error' do diff --git a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb deleted file mode 100644 index 5a5c7123971..00000000000 --- a/spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb +++ /dev/null @@ -1,140 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_stub_snowplow_by_default, feature_category: :application_instrumentation do - let(:emitter) { SnowplowTracker::Emitter.new(endpoint: 'localhost', options: { buffer_size: 1 }) } - - let(:tracker) do - SnowplowTracker::Tracker - .new( - emitters: [emitter], - subject: SnowplowTracker::Subject.new, - namespace: 'namespace', - app_id: 'app_id' - ) - end - - before do - stub_application_setting(snowplow_app_id: '_abc123_') - end - - around do |example| - freeze_time { example.run } - end - - context 'when snowplow is enabled' do - before do - allow(SnowplowTracker::AsyncEmitter) - .to receive(:new) - .with(endpoint: endpoint, - options: - { - protocol: 'https', - on_success: subject.method(:increment_successful_events_emissions), - on_failure: subject.method(:failure_callback) - } - ).and_return(emitter) - - allow(SnowplowTracker::Tracker) - .to receive(:new) - .with( - emitters: [emitter], - subject: an_instance_of(SnowplowTracker::Subject), - namespace: described_class::SNOWPLOW_NAMESPACE, - app_id: '_abc123_' - ).and_return(tracker) - end - - describe '#event' do - let(:endpoint) { 'localhost:9091' } - let(:event_params) do - { - category: 'category', - action: 'action', - label: 'label', - property: 'property', - value: 1.5, - context: nil, - tstamp: (Time.now.to_f * 1000).to_i - } - end - - context 'when on gitlab.com environment' do - let(:endpoint) { 'db-snowplow.trx.gitlab.net' } - - before do - stub_application_setting(snowplow_database_collector_hostname: endpoint) - end - - it 'sends event to tracker' do - allow(Gitlab).to receive(:com?).and_return(true) - allow(tracker).to receive(:track_struct_event).and_call_original - - subject.event('category', 'action', label: 'label', property: 'property', value: 1.5) - - expect(tracker).to have_received(:track_struct_event).with(event_params) - end - end - - it 'sends event to tracker' do - allow(tracker).to receive(:track_struct_event).and_call_original - - subject.event('category', 'action', label: 'label', property: 'property', value: 1.5) - - expect(tracker).to have_received(:track_struct_event).with(event_params) - end - - it 'increase total snowplow events counter' do - counter = double - - expect(counter).to receive(:increment) - expect(Gitlab::Metrics).to receive(:counter) - .with(:gitlab_db_events_snowplow_events_total, 'Number of Snowplow events') - .and_return(counter) - - subject.event('category', 'action', label: 'label', property: 'property', value: 1.5) - end - end - end - - context 'for callbacks' do - describe 'on success' do - it 'increase gitlab_successful_snowplow_events_total counter' do - counter = double - - expect(counter).to receive(:increment).with({}, 2) - expect(Gitlab::Metrics).to receive(:counter) - .with( - :gitlab_db_events_snowplow_successful_events_total, - 'Number of successful Snowplow events emissions').and_return(counter) - - subject.method(:increment_successful_events_emissions).call(2) - end - end - - describe 'on failure' do - it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do - counter = double - error_message = "Issue database_event_update failed to be reported to collector at localhost:9091" - failures = [{ "e" => "se", - "se_ca" => "Issue", - "se_la" => "issues", - "se_ac" => "database_event_update" }] - allow(Gitlab::Metrics).to receive(:counter) - .with( - :gitlab_db_events_snowplow_successful_events_total, - 'Number of successful Snowplow events emissions').and_call_original - - expect(Gitlab::AppLogger).to receive(:error).with(error_message) - expect(counter).to receive(:increment).with({}, 1) - expect(Gitlab::Metrics).to receive(:counter) - .with( - :gitlab_db_events_snowplow_failed_events_total, - 'Number of failed Snowplow events emissions').and_return(counter) - - subject.method(:failure_callback).call(2, failures) - end - end - end -end diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb index 46213532071..6e4ab00d04f 100644 --- a/spec/lib/gitlab/tracking_spec.rb +++ b/spec/lib/gitlab/tracking_spec.rb @@ -165,38 +165,6 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation end end - describe '.database_event' do - context 'when the action is not passed in as a string' do - it 'allows symbols' do - expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) - - described_class.database_event('category', :some_action) - end - - it 'allows nil' do - expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) - - described_class.database_event('category', nil) - end - - it 'allows integers' do - expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) - - described_class.database_event('category', 1) - end - end - - it_behaves_like 'rescued error raised by destination class' do - let(:category) { 'Issue' } - let(:action) { 'created' } - let(:destination_class) { Gitlab::Tracking::Destinations::DatabaseEventsSnowplow } - - subject(:tracking_method) { described_class.database_event(category, action) } - end - - it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :database_event - end - describe '.event' do context 'when the action is not passed in as a string' do it 'allows symbols' do diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb index e835681a233..34311a8ae22 100644 --- a/spec/mailers/notify_spec.rb +++ b/spec/mailers/notify_spec.rb @@ -2461,23 +2461,27 @@ RSpec.describe Notify, feature_category: :code_review_workflow do end it 'avoids N+1 cached queries when rendering html', :use_sql_query_cache, :request_store do - control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do subject.html_part end create_list(:diff_note_on_merge_request, 3, review: review, project: project, author: review.author, noteable: merge_request) - expect { described_class.new_review_email(recipient.id, review.id).html_part }.not_to exceed_all_query_limit(control_count) + expect do + described_class.new_review_email(recipient.id, review.id).html_part + end.not_to exceed_all_query_limit(control) end it 'avoids N+1 cached queries when rendering text', :use_sql_query_cache, :request_store do - control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do subject.text_part end create_list(:diff_note_on_merge_request, 3, review: review, project: project, author: review.author, noteable: merge_request) - expect { described_class.new_review_email(recipient.id, review.id).text_part }.not_to exceed_all_query_limit(control_count) + expect do + described_class.new_review_email(recipient.id, review.id).text_part + end.not_to exceed_all_query_limit(control) end end diff --git a/spec/models/analytics/cycle_analytics/stage_spec.rb b/spec/models/analytics/cycle_analytics/stage_spec.rb index 54ae0feca2c..abe041ae5d6 100644 --- a/spec/models/analytics/cycle_analytics/stage_spec.rb +++ b/spec/models/analytics/cycle_analytics/stage_spec.rb @@ -81,20 +81,4 @@ RSpec.describe Analytics::CycleAnalytics::Stage, feature_category: :value_stream expect(current_event_pairs).to eq(expected_event_pairs) end end - - it_behaves_like 'database events tracking' do - let(:namespace) { create(:group) } - let(:value_stream) { create(:cycle_analytics_value_stream) } - let(:record) { described_class.create!(stage_params) } - let(:update_params) { { name: 'st 2' } } - let(:stage_params) do - { - namespace: namespace, - name: 'st1', - start_event_identifier: :merge_request_created, - end_event_identifier: :merge_request_merged, - group_value_stream_id: value_stream.id - } - end - end end diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb index 5fc5bbd41ff..a95f56ea714 100644 --- a/spec/models/clusters/cluster_spec.rb +++ b/spec/models/clusters/cluster_spec.rb @@ -576,9 +576,9 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching, it 'avoids N+1 queries' do another_project = create(:project) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do described_class.ancestor_clusters_for_clusterable(another_project, hierarchy_order: hierarchy_order) - end.count + end cluster2 = create(:cluster, :provided_by_gcp, :group) child2 = cluster2.group @@ -587,7 +587,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching, expect do described_class.ancestor_clusters_for_clusterable(project, hierarchy_order: hierarchy_order) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end context 'for a group' do diff --git a/spec/models/concerns/database_event_tracking_spec.rb b/spec/models/concerns/database_event_tracking_spec.rb deleted file mode 100644 index a99b4737537..00000000000 --- a/spec/models/concerns/database_event_tracking_spec.rb +++ /dev/null @@ -1,85 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe DatabaseEventTracking, :snowplow, feature_category: :service_ping do - before do - allow(Gitlab::Tracking).to receive(:database_event).and_call_original - end - - let(:test_class) do - Class.new(ActiveRecord::Base) do - include DatabaseEventTracking - - self.table_name = 'application_setting_terms' - - self::SNOWPLOW_ATTRIBUTES = %w[id].freeze # rubocop:disable RSpec/LeakyConstantDeclaration - end - end - - subject(:create_test_class_record) { test_class.create!(id: 1, terms: "") } - - context 'if event emmiter failed' do - before do - allow(Gitlab::Tracking).to receive(:database_event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking - end - - it 'tracks the exception' do - expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) - - create_test_class_record - end - end - - describe 'event tracking' do - let(:category) { test_class.to_s } - let(:event) { 'database_event' } - - it 'when created' do - create_test_class_record - - expect_snowplow_event( - tracking_method: :database_event, - category: category, - action: "#{event}_create", - label: 'application_setting_terms', - property: 'create', - namespace: nil, - project: nil, - "id" => 1 - ) - end - - it 'when updated' do - create_test_class_record - test_class.first.update!(id: 3) - - expect_snowplow_event( - tracking_method: :database_event, - category: category, - action: "#{event}_update", - label: 'application_setting_terms', - property: 'update', - namespace: nil, - project: nil, - "id" => 3 - ) - end - - it 'when destroyed' do - create_test_class_record - test_class.first.destroy! - - expect_snowplow_event( - tracking_method: :database_event, - category: category, - action: "#{event}_destroy", - label: 'application_setting_terms', - property: 'destroy', - namespace: nil, - project: nil, - "id" => 1 - ) - end - end -end diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb index e71392f7bbc..c6135e769cc 100644 --- a/spec/models/concerns/routable_spec.rb +++ b/spec/models/concerns/routable_spec.rb @@ -89,7 +89,7 @@ RSpec.shared_examples 'routable resource' do context 'when use_includes: true' do it 'includes route information when loading records' do - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do described_class.where_full_path_in([record.full_path, record_2.full_path], use_includes: true) .map(&:route) end @@ -103,7 +103,7 @@ RSpec.shared_examples 'routable resource' do record_4.full_path ], use_includes: true) .map(&:route) - end.to issue_same_number_of_queries_as(control_count) + end.to issue_same_number_of_queries_as(control) end end diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb index e9e4956dc41..8d1d503b323 100644 --- a/spec/models/merge_request/metrics_spec.rb +++ b/spec/models/merge_request/metrics_spec.rb @@ -93,12 +93,4 @@ RSpec.describe MergeRequest::Metrics do end end end - - it_behaves_like 'database events tracking', feature_category: :service_ping do - let(:merge_request) { create(:merge_request) } - - let(:record) { merge_request.metrics } - let(:namespace) { nil } - let(:update_params) { { pipeline_id: 1, updated_at: Date.tomorrow } } - end end diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb index 5665bce8f08..67b8931f0c5 100644 --- a/spec/models/namespace_spec.rb +++ b/spec/models/namespace_spec.rb @@ -897,12 +897,14 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do it 'does not cause N+1 query in fetching registries' do stub_container_registry_tags(repository: :any, tags: []) - control_count = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }.count + control = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? } other_repositories = create_list(:container_repository, 2) create(:project, namespace: namespace, container_repositories: other_repositories) - expect { namespace.first_project_with_container_registry_tags }.not_to exceed_query_limit(control_count + 1) + expect do + namespace.first_project_with_container_registry_tags + end.not_to exceed_query_limit(control).with_threshold(1) end end diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb index 5aa3ac3a2ea..59795059642 100644 --- a/spec/models/note_spec.rb +++ b/spec/models/note_spec.rb @@ -448,13 +448,13 @@ RSpec.describe Note, feature_category: :team_planning do # Project authorization checks are cached, establish a baseline retrieve_participants - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do retrieve_participants end create(:note_on_commit, project: note.project, note: 'another note', noteable_id: commit.id) - expect { retrieve_participants }.not_to exceed_query_limit(control_count) + expect { retrieve_participants }.not_to exceed_query_limit(control) end end diff --git a/spec/models/preloaders/commit_status_preloader_spec.rb b/spec/models/preloaders/commit_status_preloader_spec.rb index 85ea784335c..0453b6267ed 100644 --- a/spec/models/preloaders/commit_status_preloader_spec.rb +++ b/spec/models/preloaders/commit_status_preloader_spec.rb @@ -21,13 +21,13 @@ RSpec.describe Preloaders::CommitStatusPreloader do it 'prevents N+1 for specified relations', :use_sql_query_cache do execute - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do call_each_relation(statuses.sample(3)) end expect do call_each_relation(statuses) - end.to issue_same_number_of_queries_as(control_count) + end.to issue_same_number_of_queries_as(control) end private diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb index a3db29e7ec4..7b8b6376aea 100644 --- a/spec/models/project_spec.rb +++ b/spec/models/project_spec.rb @@ -2342,11 +2342,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr it 'avoids n + 1', :aggregate_failures do create(:prometheus_integration) run_test = -> { described_class.include_integration(:prometheus_integration).map(&:prometheus_integration) } - control_count = ActiveRecord::QueryRecorder.new { run_test.call } + control = ActiveRecord::QueryRecorder.new { run_test.call } create(:prometheus_integration) expect(run_test.call.count).to eq(2) - expect { run_test.call }.not_to exceed_query_limit(control_count) + expect { run_test.call }.not_to exceed_query_limit(control) end end @@ -6593,17 +6593,17 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr let_it_be(:subject) { create(:project) } it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations }.count + control = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations } - expect(control_count).to be <= 4 + expect(control.count).to be <= 4 end it 'avoids N+1 database queries with more available integrations' do allow(Integration).to receive(:available_integration_names).and_return(%w[pushover]) - control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations } + control = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integrations } allow(Integration).to receive(:available_integration_names).and_call_original - expect { subject.find_or_initialize_integrations }.not_to exceed_query_limit(control_count) + expect { subject.find_or_initialize_integrations }.not_to exceed_query_limit(control) end context 'with disabled integrations' do @@ -6650,11 +6650,11 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr it 'avoids N+1 database queries' do allow(Integration).to receive(:available_integration_names).and_return(%w[prometheus pushover]) - control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integration('prometheus') }.count + control = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_integration('prometheus') } allow(Integration).to receive(:available_integration_names).and_call_original - expect { subject.find_or_initialize_integration('prometheus') }.not_to exceed_query_limit(control_count) + expect { subject.find_or_initialize_integration('prometheus') }.not_to exceed_query_limit(control) end it 'returns nil if integration is disabled' do diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 6d3e9058f91..58d64742c92 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -4573,13 +4573,13 @@ RSpec.describe User, feature_category: :user_profile do it 'avoids N+1 queries' do fresh_user = described_class.find(user.id) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do fresh_user.solo_owned_groups - end.count + end create(:group).add_owner(user) - expect { solo_owned_groups }.not_to exceed_query_limit(control_count) + expect { solo_owned_groups }.not_to exceed_query_limit(control) end end end diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb index 941aa3734a3..9e1203bc720 100644 --- a/spec/requests/api/ci/jobs_spec.rb +++ b/spec/requests/api/ci/jobs_spec.rb @@ -133,12 +133,12 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do end it 'avoids N+1 queries', :skip_before_request do - control_count = ActiveRecord::QueryRecorder.new { perform_request }.count + control = ActiveRecord::QueryRecorder.new { perform_request } running_job = create(:ci_build, :running, project: project, user: user, pipeline: pipeline, artifacts_expire_at: 1.day.since) running_job.save! - expect { perform_request }.not_to exceed_query_limit(control_count) + expect { perform_request }.not_to exceed_query_limit(control) end it_behaves_like 'returns common pipeline data' do @@ -432,7 +432,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do first_build.user = create(:user) first_build.save! - control_count = ActiveRecord::QueryRecorder.new { go }.count + control = ActiveRecord::QueryRecorder.new { go } second_pipeline = create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch) second_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: second_pipeline) @@ -440,7 +440,7 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do second_build.user = create(:user) second_build.save! - expect { go }.not_to exceed_query_limit(control_count) + expect { go }.not_to exceed_query_limit(control) end context 'filter project with one scope element' do diff --git a/spec/requests/api/ci/pipeline_schedules_spec.rb b/spec/requests/api/ci/pipeline_schedules_spec.rb index f534b093b7c..588991096b5 100644 --- a/spec/requests/api/ci/pipeline_schedules_spec.rb +++ b/spec/requests/api/ci/pipeline_schedules_spec.rb @@ -42,15 +42,15 @@ RSpec.describe API::Ci::PipelineSchedules, feature_category: :continuous_integra # We need at least two users to trigger a preload for that relation. create_pipeline_schedules(1) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get api("/projects/#{project.id}/pipeline_schedules", developer) - end.count + end create_pipeline_schedules(5) expect do get api("/projects/#{project.id}/pipeline_schedules", developer) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end %w[active inactive].each do |target| diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb index eef125e1bc3..ef169dbe872 100644 --- a/spec/requests/api/ci/pipelines_spec.rb +++ b/spec/requests/api/ci/pipelines_spec.rb @@ -471,15 +471,15 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do end it 'avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query - end.count + end create_list(:ci_build, 3, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline) expect do get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end context 'pipeline has retried jobs' do @@ -671,15 +671,15 @@ RSpec.describe API::Ci::Pipelines, feature_category: :continuous_integration do end it 'avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query - end.count + end 3.times { create_bridge(pipeline) } expect do get api("/projects/#{project.id}/pipelines/#{pipeline.id}/bridges", api_user), params: query - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end diff --git a/spec/requests/api/ci/runners_spec.rb b/spec/requests/api/ci/runners_spec.rb index 187880e16a4..b4394f47105 100644 --- a/spec/requests/api/ci/runners_spec.rb +++ b/spec/requests/api/ci/runners_spec.rb @@ -965,7 +965,7 @@ RSpec.describe API::Ci::Runners, :aggregate_failures, feature_category: :fleet_v expect do get api("/runners/#{shared_runner.id}/jobs", admin, admin_mode: true) - end.not_to exceed_query_limit(control.count) + end.not_to exceed_query_limit(control) end it 'batches loading of commits' do diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb index 4ec5d195ff8..67388f657f4 100644 --- a/spec/requests/api/commits_spec.rb +++ b/spec/requests/api/commits_spec.rb @@ -1842,11 +1842,11 @@ RSpec.describe API::Commits, feature_category: :source_code_management do it 'are returned without N + 1' do get api(route, current_user) # warm up the cache - control_count = ActiveRecord::QueryRecorder.new { get api(route, current_user) }.count + control = ActiveRecord::QueryRecorder.new { get api(route, current_user) } create(:diff_note_on_commit, project: project, author: create(:user)) - expect { get api(route, current_user) }.not_to exceed_query_limit(control_count) + expect { get api(route, current_user) }.not_to exceed_query_limit(control) end end end @@ -2386,11 +2386,11 @@ RSpec.describe API::Commits, feature_category: :source_code_management do it 'returns multiple merge requests without N + 1' do perform_request(user) - control_count = ActiveRecord::QueryRecorder.new { perform_request(user) }.count + control = ActiveRecord::QueryRecorder.new { perform_request(user) } create(:merge_request, :closed, source_project: project, source_branch: 'master', target_branch: 'feature') - expect { perform_request(user) }.not_to exceed_query_limit(control_count) + expect { perform_request(user) }.not_to exceed_query_limit(control) end end diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb index 30c345ef458..ca19a97ae49 100644 --- a/spec/requests/api/deploy_keys_spec.rb +++ b/spec/requests/api/deploy_keys_spec.rb @@ -135,11 +135,11 @@ RSpec.describe API::DeployKeys, :aggregate_failures, feature_category: :continuo it 'returns multiple deploy keys without N + 1' do perform_request - control_count = ActiveRecord::QueryRecorder.new { perform_request }.count + control = ActiveRecord::QueryRecorder.new { perform_request } create(:deploy_key, public: true, projects: [project], user: maintainer) - expect { perform_request }.not_to exceed_query_limit(control_count) + expect { perform_request }.not_to exceed_query_limit(control) end end diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb index 5a8e1649e75..f68307df779 100644 --- a/spec/requests/api/deployments_spec.rb +++ b/spec/requests/api/deployments_spec.rb @@ -143,11 +143,11 @@ RSpec.describe API::Deployments, feature_category: :continuous_delivery do it 'returns multiple deployments without N + 1' do perform_request # warm up the cache - control_count = ActiveRecord::QueryRecorder.new { perform_request }.count + control = ActiveRecord::QueryRecorder.new { perform_request } create(:deployment, :success, project: project, deployable: build, iid: 21, ref: 'master') - expect { perform_request }.not_to exceed_query_limit(control_count) + expect { perform_request }.not_to exceed_query_limit(control) end end diff --git a/spec/requests/api/feature_flags_spec.rb b/spec/requests/api/feature_flags_spec.rb index 4fb0dfbb070..2e513194627 100644 --- a/spec/requests/api/feature_flags_spec.rb +++ b/spec/requests/api/feature_flags_spec.rb @@ -67,12 +67,12 @@ RSpec.describe API::FeatureFlags, feature_category: :feature_flags do end it 'does not have N+1 problem' do - control_count = ActiveRecord::QueryRecorder.new { subject } + control = ActiveRecord::QueryRecorder.new { subject } create_list(:operations_feature_flag, 3, project: project) expect { get api("/projects/#{project.id}/feature_flags", user) } - .not_to exceed_query_limit(control_count) + .not_to exceed_query_limit(control) end it_behaves_like 'check user permission' diff --git a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb index 32048ea1432..94678bd18da 100644 --- a/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb +++ b/spec/requests/api/graphql/achievements/user_achievements_query_spec.rb @@ -89,14 +89,14 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do end it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do post_graphql(query, current_user: user) - end.count + end user2 = create(:user) create(:user_achievement, achievement: achievement, user: user2) - expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control_count) + expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control) end context 'when the achievements feature flag is disabled' do diff --git a/spec/requests/api/graphql/projects/projects_spec.rb b/spec/requests/api/graphql/projects/projects_spec.rb index 84b8c2285f0..dfebcb7c42c 100644 --- a/spec/requests/api/graphql/projects/projects_spec.rb +++ b/spec/requests/api/graphql/projects/projects_spec.rb @@ -45,14 +45,14 @@ RSpec.describe 'getting a collection of projects', feature_category: :source_cod it 'avoids N+1 queries', :use_sql_query_cache, :clean_gitlab_redis_cache do post_graphql(single_project_query, current_user: current_user) - query_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do post_graphql(single_project_query, current_user: current_user) - end.count + end # There is an N+1 query for max_member_access_for_user_ids expect do post_graphql(query, current_user: current_user) - end.not_to exceed_all_query_limit(query_count + 5) + end.not_to exceed_all_query_limit(control).with_threshold(5) end it 'returns the expected projects' do diff --git a/spec/requests/api/graphql/user/user_achievements_query_spec.rb b/spec/requests/api/graphql/user/user_achievements_query_spec.rb index 2e6c3dcba61..ccff5bdf919 100644 --- a/spec/requests/api/graphql/user/user_achievements_query_spec.rb +++ b/spec/requests/api/graphql/user/user_achievements_query_spec.rb @@ -60,14 +60,14 @@ RSpec.describe 'UserAchievements', feature_category: :user_profile do end it 'can lookahead to eliminate N+1 queries', :use_clean_rails_memory_store_caching do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do post_graphql(query, current_user: user) - end.count + end achievement2 = create(:achievement, namespace: group) create_list(:user_achievement, 2, achievement: achievement2, user: user) - expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control_count) + expect { post_graphql(query, current_user: user) }.not_to exceed_all_query_limit(control) end context 'when the achievements feature flag is disabled for a namespace' do diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb index fe77b7ae736..c6d44b057a7 100644 --- a/spec/requests/api/graphql/work_item_spec.rb +++ b/spec/requests/api/graphql/work_item_spec.rb @@ -199,7 +199,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do it 'avoids N+1 queries' do post_graphql(query, current_user: current_user) # warm up - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do post_graphql(query, current_user: current_user) end @@ -207,7 +207,7 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do expect do post_graphql(query, current_user: current_user) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end context 'when user is guest' do diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb index 82a4311f7d0..7b4075b3aeb 100644 --- a/spec/requests/api/group_milestones_spec.rb +++ b/spec/requests/api/group_milestones_spec.rb @@ -141,11 +141,11 @@ RSpec.describe API::GroupMilestones, feature_category: :team_planning do it 'returns multiple issues without performing N + 1' do perform_request - control_count = ActiveRecord::QueryRecorder.new { perform_request }.count + control = ActiveRecord::QueryRecorder.new { perform_request } create(:issue, project: project, milestone: milestone) - expect { perform_request }.not_to exceed_query_limit(control_count) + expect { perform_request }.not_to exceed_query_limit(control) end end diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb index d1158cba16e..6b949962e53 100644 --- a/spec/requests/api/groups_spec.rb +++ b/spec/requests/api/groups_spec.rb @@ -660,24 +660,24 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do get api("/groups/#{group1.id}", user1) expect(response).to have_gitlab_http_status(:ok) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get api("/groups/#{group1.id}", user1) - end.count + end create(:project, namespace: group1) expect do get api("/groups/#{group1.id}", user1) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end it 'avoids N+1 queries with shared group links' do # setup at least 1 shared group, so that we record the queries that preload the nested associations too. create(:group_group_link, shared_group: group1, shared_with_group: create(:group)) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get api("/groups/#{group1.id}", user1) - end.count + end # setup "n" more shared groups create(:group_group_link, shared_group: group1, shared_with_group: create(:group)) @@ -686,7 +686,7 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do # test that no of queries for 1 shared group is same as for n shared groups expect do get api("/groups/#{group1.id}", user1) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end end @@ -1364,15 +1364,15 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do get api("/groups/#{group1.id}/projects", user1) expect(response).to have_gitlab_http_status(:ok) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get api("/groups/#{group1.id}/projects", user1) - end.count + end create(:project, namespace: group1) expect do get api("/groups/#{group1.id}/projects", user1) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end end @@ -1563,15 +1563,15 @@ RSpec.describe API::Groups, feature_category: :groups_and_projects do subject expect(response).to have_gitlab_http_status(:ok) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do subject - end.count + end create(:project_group_link, project: create(:project), group: group1) expect do subject - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end end diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb index dc02e830027..60f3c4780eb 100644 --- a/spec/requests/api/invitations_spec.rb +++ b/spec/requests/api/invitations_spec.rb @@ -412,7 +412,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do expect do post invitations_url(project, maintainer), params: { email: emails, access_level: Member::DEVELOPER } - end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones) + end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones) end it 'does not exceed expected queries count for user_ids', :request_store, :use_sql_query_cache do @@ -430,7 +430,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do expect do post invitations_url(project, maintainer), params: { user_id: users.map(&:id).join(','), access_level: Member::DEVELOPER } - end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones) + end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones) end it 'does not exceed expected queries count with secondary emails', :request_store, :use_sql_query_cache do @@ -453,7 +453,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do expect do post invitations_url(project, maintainer), params: { email: emails, access_level: Member::DEVELOPER } - end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones) + end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones) end end @@ -491,7 +491,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do expect do post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER } - end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones) + end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones) end it 'does not exceed expected queries count for secondary emails', :request_store, :use_sql_query_cache do @@ -514,7 +514,7 @@ RSpec.describe API::Invitations, feature_category: :user_profile do expect do post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER } - end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones) + end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones) end end diff --git a/spec/requests/api/issue_links_spec.rb b/spec/requests/api/issue_links_spec.rb index fcb199a91a4..a4a9eca92b9 100644 --- a/spec/requests/api/issue_links_spec.rb +++ b/spec/requests/api/issue_links_spec.rb @@ -40,11 +40,11 @@ RSpec.describe API::IssueLinks, feature_category: :team_planning do it 'returns multiple links without N + 1' do perform_request(user) - control_count = ActiveRecord::QueryRecorder.new { perform_request(user) }.count + control = ActiveRecord::QueryRecorder.new { perform_request(user) } create(:issue_link, source: issue, target: create(:issue, project: project)) - expect { perform_request(user) }.not_to exceed_query_limit(control_count) + expect { perform_request(user) }.not_to exceed_query_limit(control) end end end diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb index 9e54ec08486..6719297f54f 100644 --- a/spec/requests/api/issues/get_project_issues_spec.rb +++ b/spec/requests/api/issues/get_project_issues_spec.rb @@ -233,9 +233,9 @@ RSpec.describe API::Issues, feature_category: :team_planning do issues = create_list(:issue, 3, project: project, closed_by: user) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api("/projects/#{project.id}/issues", user) - end.count + end milestone = create(:milestone, project: project) create(:issue, project: project, milestone: milestone, closed_by: create(:user)) @@ -245,7 +245,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect do get api("/projects/#{project.id}/issues", user) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end it 'returns 404 when project does not exist' do @@ -361,9 +361,9 @@ RSpec.describe API::Issues, feature_category: :team_planning do let(:label_c) { create(:label, title: 'bar', project: project) } it 'avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api("/projects/#{project.id}/issues?with_labels_details=true", user) - end.count + end new_issue = create(:issue, project: project) create(:label_link, label: label, target: new_issue) @@ -372,7 +372,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do expect do get api("/projects/#{project.id}/issues?with_labels_details=true", user) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb index 6000fa29dc4..6ba51080bf0 100644 --- a/spec/requests/api/merge_requests_spec.rb +++ b/spec/requests/api/merge_requests_spec.rb @@ -193,7 +193,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc control = ActiveRecord::QueryRecorder.new do get api(path, user) - end.count + end mr = create(:merge_request) create(:label_link, label: label, target: mr) @@ -1232,7 +1232,7 @@ RSpec.describe API::MergeRequests, :aggregate_failures, feature_category: :sourc it 'avoids N+1 queries', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/330335' do control = ActiveRecord::QueryRecorder.new do get api("/projects/#{project.id}/merge_requests", user) - end.count + end create(:merge_request, author: user, assignees: [user], source_project: project, target_project: project, created_at: base_time) diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb index 49471b98eba..a73f3366dcb 100644 --- a/spec/requests/api/project_import_spec.rb +++ b/spec/requests/api/project_import_spec.rb @@ -62,9 +62,9 @@ RSpec.describe API::ProjectImport, :aggregate_failures, feature_category: :impor it_behaves_like 'requires import source to be enabled' it 'executes a limited number of queries', :use_clean_rails_redis_caching do - control_count = ActiveRecord::QueryRecorder.new { subject }.count + control = ActiveRecord::QueryRecorder.new { subject } - expect(control_count).to be <= 111 + expect(control.count).to be <= 111 end it 'schedules an import using a namespace' do diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb index b8e029385e3..cf6152a9b67 100644 --- a/spec/requests/api/projects_spec.rb +++ b/spec/requests/api/projects_spec.rb @@ -1152,7 +1152,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and expect do request - end.not_to exceed_all_query_limit(control.count) + end.not_to exceed_all_query_limit(control) end end @@ -3799,7 +3799,7 @@ RSpec.describe API::Projects, :aggregate_failures, feature_category: :groups_and expect do post api("/projects/#{project.id}/import_project_members/#{measure_project.id}", user) - end.not_to exceed_all_query_limit(control.count).with_threshold(unresolved_n_plus_ones) + end.not_to exceed_all_query_limit(control).with_threshold(unresolved_n_plus_ones) end it 'returns 200 when it successfully imports members from another project' do diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb index 493dc4e72c6..0c811a21fb0 100644 --- a/spec/requests/api/releases_spec.rb +++ b/spec/requests/api/releases_spec.rb @@ -156,9 +156,9 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or create(:release, :with_evidence, project: project, tag: 'v0.1', author: maintainer) create(:release_link, release: project.releases.first) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api("/projects/#{project.id}/releases", maintainer) - end.count + end create_list(:release, 2, :with_evidence, project: project, author: maintainer) create_list(:release, 2, project: project) @@ -167,7 +167,7 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or expect do get api("/projects/#{project.id}/releases", maintainer) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end it 'serializes releases for the first time and read cached data from the second time' do @@ -1715,9 +1715,9 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or subject expect(response).to have_gitlab_http_status(:ok) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do subject - end.count + end subgroups = create_list(:group, 10, parent: group1) projects = create_list(:project, 10, namespace: subgroups[0]) @@ -1725,7 +1725,7 @@ RSpec.describe API::Releases, :aggregate_failures, feature_category: :release_or expect do subject - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end end diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb index 86c4e04ef71..de3460208b7 100644 --- a/spec/requests/api/users_spec.rb +++ b/spec/requests/api/users_spec.rb @@ -265,9 +265,9 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile end it 'avoids N+1 queries when requested by admin' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api(path, admin) - end.count + end create_list(:user, 3) @@ -277,19 +277,19 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile expect do get api(path, admin) - end.not_to exceed_all_query_limit(control_count + 3) + end.not_to exceed_all_query_limit(control).with_threshold(3) end it 'avoids N+1 queries when requested by a regular user' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get api(path, user) - end.count + end create_list(:user, 3) expect do get api(path, user) - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end @@ -2272,16 +2272,16 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile it 'avoids N+1 queries' do second_project.add_maintainer(user) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do get api(path, user) - end.count + end deploy_key = create(:deploy_key, user: second_user) create(:deploy_keys_project, project: second_project, deploy_key_id: deploy_key.id) expect do get api(path, user) - end.not_to exceed_query_limit(control_count) + end.not_to exceed_query_limit(control) end end end @@ -2328,15 +2328,15 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile end it 'avoids N+1 queries', :request_store do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do request - end.count + end create_list(:key, 2, user: user) expect do request - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end end @@ -3044,15 +3044,15 @@ RSpec.describe API::Users, :aggregate_failures, feature_category: :user_profile end it 'avoids N+1 queries', :request_store do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do request - end.count + end create_list(:key, 2, user: user) expect do request - end.not_to exceed_all_query_limit(control_count) + end.not_to exceed_all_query_limit(control) end end diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb index 54a25333c02..ed24ad6489f 100644 --- a/spec/requests/groups/milestones_controller_spec.rb +++ b/spec/requests/groups/milestones_controller_spec.rb @@ -18,14 +18,16 @@ RSpec.describe Groups::MilestonesController, feature_category: :team_planning do public_project = create(:project, :public, :merge_requests_enabled, :issues_enabled, group: public_group) create(:milestone, project: public_project) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get group_milestones_path(public_group, format: :json) }.count + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do + get group_milestones_path(public_group, format: :json) + end projects = create_list(:project, 2, :public, :merge_requests_enabled, :issues_enabled, group: public_group) projects.each do |project| create(:milestone, project: project) end - expect { get group_milestones_path(public_group, format: :json) }.not_to exceed_all_query_limit(control_count) + expect { get group_milestones_path(public_group, format: :json) }.not_to exceed_all_query_limit(control) expect(response).to have_gitlab_http_status(:ok) milestones = json_response @@ -66,11 +68,11 @@ RSpec.describe Groups::MilestonesController, feature_category: :team_planning do it 'avoids N+1 database queries' do perform_request # warm up the cache - control_count = ActiveRecord::QueryRecorder.new { perform_request }.count + control = ActiveRecord::QueryRecorder.new { perform_request } create(:merge_request, milestone: milestone, source_project: project, source_branch: 'fix') - expect { perform_request }.not_to exceed_query_limit(control_count) + expect { perform_request }.not_to exceed_query_limit(control) end end end diff --git a/spec/requests/groups/registry/repositories_controller_spec.rb b/spec/requests/groups/registry/repositories_controller_spec.rb index f54acf118bb..e4818676f81 100644 --- a/spec/requests/groups/registry/repositories_controller_spec.rb +++ b/spec/requests/groups/registry/repositories_controller_spec.rb @@ -20,13 +20,13 @@ RSpec.describe Groups::Registry::RepositoriesController, feature_category: :cont create(:container_repository, project: project) endpoint = group_container_registries_path(group, format: :json) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get(endpoint) }.count + control = ActiveRecord::QueryRecorder.new(skip_cached: false) { get(endpoint) } create_list(:project, 2, group: group).each do |project| create_list(:container_repository, 2, project: project) end - expect { get(endpoint) }.not_to exceed_all_query_limit(control_count) + expect { get(endpoint) }.not_to exceed_all_query_limit(control) # sanity check that response is 200 expect(response).to have_gitlab_http_status(:ok) diff --git a/spec/requests/projects/pipelines_controller_spec.rb b/spec/requests/projects/pipelines_controller_spec.rb index aa3fefdef14..8be4fecea04 100644 --- a/spec/requests/projects/pipelines_controller_spec.rb +++ b/spec/requests/projects/pipelines_controller_spec.rb @@ -25,14 +25,14 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte create_pipelines - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get_pipelines_index - end.count + end create_pipelines # There appears to be one extra query for Pipelines#has_warnings? for some reason - expect { get_pipelines_index }.not_to exceed_all_query_limit(control_count + 1) + expect { get_pipelines_index }.not_to exceed_all_query_limit(control).with_threshold(1) expect(response).to have_gitlab_http_status(:ok) expect(json_response['pipelines'].count).to eq(11) end @@ -56,9 +56,9 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte it 'does not execute N+1 queries' do request_build_stage - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do request_build_stage - end.count + end create(:ci_build, pipeline: pipeline, stage: 'build') @@ -70,7 +70,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte status: :failed) end - expect { request_build_stage }.not_to exceed_all_query_limit(control_count) + expect { request_build_stage }.not_to exceed_all_query_limit(control) expect(response).to have_gitlab_http_status(:ok) end @@ -134,14 +134,14 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte request_build_stage(retried: true) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do request_build_stage(retried: true) - end.count + end create(:ci_build, :retried, :failed, pipeline: pipeline, stage: 'build') create(:ci_build, :failed, pipeline: pipeline, stage: 'build') - expect { request_build_stage(retried: true) }.not_to exceed_all_query_limit(control_count) + expect { request_build_stage(retried: true) }.not_to exceed_all_query_limit(control) expect(response).to have_gitlab_http_status(:ok) end diff --git a/spec/requests/search_controller_spec.rb b/spec/requests/search_controller_spec.rb index 365b20ad4aa..eaf11653256 100644 --- a/spec/requests/search_controller_spec.rb +++ b/spec/requests/search_controller_spec.rb @@ -112,7 +112,7 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc control = ActiveRecord::QueryRecorder.new { send_search_request(params_for_one) } expect(response.body).to include('search-results') # Confirm search results to prevent false positives - expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control.count) + expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control) expect(response.body).to include('search-results') # Confirm search results to prevent false positives end end @@ -125,7 +125,7 @@ RSpec.describe SearchController, type: :request, feature_category: :global_searc control = ActiveRecord::QueryRecorder.new { send_search_request(params_for_one) } expect(response.body).to include('search-results') # Confirm search results to prevent false positives - expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control.count) + expect { send_search_request(params_for_many) }.not_to exceed_query_limit(control) expect(response.body).to include('search-results') # Confirm search results to prevent false positives end end diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb index 60f3ee11442..af6a70989c9 100644 --- a/spec/services/ci/abort_pipelines_service_spec.rb +++ b/spec/services/ci/abort_pipelines_service_spec.rb @@ -70,12 +70,12 @@ RSpec.describe Ci::AbortPipelinesService, feature_category: :continuous_integrat end it 'avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new { abort_project_pipelines }.count + control = ActiveRecord::QueryRecorder.new { abort_project_pipelines } pipelines = create_list(:ci_pipeline, 5, :running, project: project) create_list(:ci_build, 5, :running, pipeline: pipelines.first) - expect { abort_project_pipelines }.not_to exceed_query_limit(control_count) + expect { abort_project_pipelines }.not_to exceed_query_limit(control) end context 'with live build logs' do diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb index 3d0ce456aa5..a74b820de09 100644 --- a/spec/services/ci/expire_pipeline_cache_service_spec.rb +++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb @@ -106,7 +106,7 @@ RSpec.describe Ci::ExpirePipelineCacheService, feature_category: :continuous_int create(:ci_sources_pipeline, pipeline: pipeline) create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline)) - expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count) + expect { subject.execute(pipeline) }.not_to exceed_query_limit(control) end end diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb index c060c72ffb2..bdb4ed182dc 100644 --- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb +++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb @@ -44,7 +44,7 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s more_artifacts - expect { subject }.not_to exceed_query_limit(control.count) + expect { subject }.not_to exceed_query_limit(control) end end diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb index 1646afde21d..1708f475e6b 100644 --- a/spec/services/ci/retry_job_service_spec.rb +++ b/spec/services/ci/retry_job_service_spec.rb @@ -403,11 +403,11 @@ RSpec.describe Ci::RetryJobService, feature_category: :continuous_integration do end it 'does not cause an N+1 when updating the job ownership' do - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) }.count + control = ActiveRecord::QueryRecorder.new(skip_cached: false) { service.execute(job) } create_list(:ci_build, 2, :skipped, pipeline: pipeline, ci_stage: deploy_stage) - expect { service.execute(job) }.not_to exceed_all_query_limit(control_count) + expect { service.execute(job) }.not_to exceed_all_query_limit(control) end end diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb index 4fd4492278d..c5959127f34 100644 --- a/spec/services/ci/update_build_queue_service_spec.rb +++ b/spec/services/ci/update_build_queue_service_spec.rb @@ -331,11 +331,11 @@ RSpec.describe Ci::UpdateBuildQueueService, feature_category: :continuous_integr let!(:project_runner) { create(:ci_runner, :project, :online, projects: [project], tag_list: %w[a b c]) } it 'does execute the same amount of queries regardless of number of runners' do - control_count = ActiveRecord::QueryRecorder.new { subject.tick(build) }.count + control = ActiveRecord::QueryRecorder.new { subject.tick(build) } create_list(:ci_runner, 10, :project, :online, projects: [project], tag_list: %w[b c d]) - expect { subject.tick(build) }.not_to exceed_all_query_limit(control_count) + expect { subject.tick(build) }.not_to exceed_all_query_limit(control) end end end diff --git a/spec/services/issue_links/list_service_spec.rb b/spec/services/issue_links/list_service_spec.rb index b5cc8c4dcdc..f9e5e88aff0 100644 --- a/spec/services/issue_links/list_service_spec.rb +++ b/spec/services/issue_links/list_service_spec.rb @@ -33,7 +33,7 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do end it 'ensures no N+1 queries are made' do - control_count = ActiveRecord::QueryRecorder.new { subject }.count + control = ActiveRecord::QueryRecorder.new { subject } project = create :project, :public milestone = create :milestone, project: project @@ -44,7 +44,7 @@ RSpec.describe IssueLinks::ListService, feature_category: :team_planning do create :issue_link, source: issue_x, target: issue_z create :issue_link, source: issue_y, target: issue_z - expect { subject }.not_to exceed_query_limit(control_count) + expect { subject }.not_to exceed_query_limit(control) end it 'returns related issues JSON' do diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb index 83dfca923fb..016174f9888 100644 --- a/spec/services/issues/export_csv_service_spec.rb +++ b/spec/services/issues/export_csv_service_spec.rb @@ -175,11 +175,11 @@ RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_ let(:labeled_issues) { create_list(:labeled_issue, 2, project: project, author: user, labels: [feature_label, idea_label]) } it 'does not run a query for each label link' do - control_count = ActiveRecord::QueryRecorder.new { csv }.count + control = ActiveRecord::QueryRecorder.new { csv } labeled_issues - expect { csv }.not_to exceed_query_limit(control_count) + expect { csv }.not_to exceed_query_limit(control) expect(csv.count).to eq(4) end diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb index 4781daf7688..6748292d389 100644 --- a/spec/services/issues/referenced_merge_requests_service_spec.rb +++ b/spec/services/issues/referenced_merge_requests_service_spec.rb @@ -39,13 +39,13 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p context 'performance' do it 'does not run extra queries when extra namespaces are included', :use_clean_rails_memory_store_caching do service.execute(issue) # warm cache - control_count = ActiveRecord::QueryRecorder.new { service.execute(issue) }.count + control = ActiveRecord::QueryRecorder.new { service.execute(issue) } third_project = create(:project, :public) create_closing_mr(source_project: third_project) service.execute(issue) # warm cache - expect { service.execute(issue) }.not_to exceed_query_limit(control_count) + expect { service.execute(issue) }.not_to exceed_query_limit(control) end it 'preloads the head pipeline for each merge request, and its routes' do @@ -58,12 +58,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p end closing_mr_other_project.update!(head_pipeline: create(:ci_pipeline)) - control_count = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) } + control = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) } closing_mr.update!(head_pipeline: create(:ci_pipeline)) expect { service.execute(issue).each(&pipeline_routes) } - .not_to exceed_query_limit(control_count) + .not_to exceed_query_limit(control) end it 'only loads issue notes once' do @@ -95,12 +95,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p context 'performance' do it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do service.referenced_merge_requests(issue) # warm cache - control_count = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) }.count + control = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) } create(:note, project: project, noteable: issue, author: create(:user)) service.referenced_merge_requests(issue) # warm cache - expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control_count) + expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control) end end end @@ -121,12 +121,12 @@ RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_p context 'performance' do it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do service.closed_by_merge_requests(issue) # warm cache - control_count = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) }.count + control = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) } create(:note, :system, project: project, noteable: issue, author: create(:user)) service.closed_by_merge_requests(issue) # warm cache - expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control_count) + expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control) end end end diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb index 2b398210034..3a1474e4fef 100644 --- a/spec/services/labels/available_labels_service_spec.rb +++ b/spec/services/labels/available_labels_service_spec.rb @@ -42,11 +42,15 @@ RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning it 'do not cause additional query for finding labels' do label_titles = [project_label.title] - control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, project, labels: label_titles).find_or_create_by_titles } + control = ActiveRecord::QueryRecorder.new do + described_class.new(user, project, labels: label_titles).find_or_create_by_titles + end new_label = create(:label, project: project) label_titles = [project_label.title, new_label.title] - expect { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }.not_to exceed_query_limit(control_count) + expect do + described_class.new(user, project, labels: label_titles).find_or_create_by_titles + end.not_to exceed_query_limit(control) end end end diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb index de99fb244d3..bcde2fd5165 100644 --- a/spec/services/merge_requests/pushed_branches_service_spec.rb +++ b/spec/services/merge_requests/pushed_branches_service_spec.rb @@ -37,11 +37,11 @@ RSpec.describe MergeRequests::PushedBranchesService, feature_category: :source_c end it 'returns empty result without any SQL query performed' do - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do expect(service.execute).to be_empty - end.count + end - expect(control_count).to be_zero + expect(control.count).to be_zero end end end diff --git a/spec/services/merge_requests/reload_diffs_service_spec.rb b/spec/services/merge_requests/reload_diffs_service_spec.rb index 77056cbe541..a6654989374 100644 --- a/spec/services/merge_requests/reload_diffs_service_spec.rb +++ b/spec/services/merge_requests/reload_diffs_service_spec.rb @@ -45,11 +45,11 @@ RSpec.describe MergeRequests::ReloadDiffsService, :use_clean_rails_memory_store_ current_user merge_request - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do subject.execute - end.count + end - expect { subject.execute }.not_to exceed_query_limit(control_count) + expect { subject.execute }.not_to exceed_query_limit(control) end end end diff --git a/spec/services/notification_recipients/build_service_spec.rb b/spec/services/notification_recipients/build_service_spec.rb index bfd1dcd7d80..b4788428f14 100644 --- a/spec/services/notification_recipients/build_service_spec.rb +++ b/spec/services/notification_recipients/build_service_spec.rb @@ -21,13 +21,13 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla service.build_new_note_recipients(note) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do service.build_new_note_recipients(note) end create_user - expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control_count).with_threshold(threshold) + expect { service.build_new_note_recipients(note) }.not_to exceed_query_limit(control).with_threshold(threshold) end end @@ -76,13 +76,15 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla service.build_new_review_recipients(review) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do service.build_new_review_recipients(review) end create_user - expect { service.build_new_review_recipients(review) }.not_to exceed_query_limit(control_count).with_threshold(threshold) + expect do + service.build_new_review_recipients(review) + end.not_to exceed_query_limit(control).with_threshold(threshold) end end @@ -130,13 +132,13 @@ RSpec.describe NotificationRecipients::BuildService, feature_category: :team_pla service.build_requested_review_recipients(note) - control_count = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do service.build_requested_review_recipients(note) end create_user - expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control_count) + expect { service.build_requested_review_recipients(note) }.not_to exceed_query_limit(control) end end end diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb index 30fcdc27a78..39d7e6a84d4 100644 --- a/spec/services/projects/fork_service_spec.rb +++ b/spec/services/projects/fork_service_spec.rb @@ -224,6 +224,7 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management it "creates fork with lowest level" do forked_project = fork_project(@from_project, @to_user, using_service: true) + expect(forked_project).to be_persisted expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) end end @@ -233,10 +234,11 @@ RSpec.describe Projects::ForkService, feature_category: :source_code_management stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC, Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE]) end - it "creates fork with private visibility levels" do + it "doesn't create a fork" do forked_project = fork_project(@from_project, @to_user, using_service: true) - expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE) + expect(forked_project).not_to be_persisted + expect(forked_project.errors[:visibility_level]).to eq ['private has been restricted by your GitLab administrator'] end end end diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb index 2d048d5258d..167df7996ca 100644 --- a/spec/services/projects/participants_service_spec.rb +++ b/spec/services/projects/participants_service_spec.rb @@ -43,27 +43,27 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj it 'avoids N+1 UserDetail queries' do project.add_developer(create(:user)) - control_count = ActiveRecord::QueryRecorder.new { run_service.to_a }.count + control = ActiveRecord::QueryRecorder.new { run_service.to_a } BatchLoader::Executor.clear_current project.add_developer(create(:user, status: build(:user_status, availability: :busy))) - expect { run_service.to_a }.not_to exceed_query_limit(control_count) + expect { run_service.to_a }.not_to exceed_query_limit(control) end it 'avoids N+1 groups queries' do group_1 = create(:group) group_1.add_owner(user) - control_count = ActiveRecord::QueryRecorder.new { run_service }.count + control = ActiveRecord::QueryRecorder.new { run_service } BatchLoader::Executor.clear_current group_2 = create(:group) group_2.add_owner(user) - expect { run_service }.not_to exceed_query_limit(control_count) + expect { run_service }.not_to exceed_query_limit(control) end end diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb index 1b5300672e3..d77a68288a5 100644 --- a/spec/services/repositories/changelog_service_spec.rb +++ b/spec/services/repositories/changelog_service_spec.rb @@ -164,7 +164,7 @@ RSpec.describe Repositories::ChangelogService, feature_category: :source_code_ma RequestStore.clear! - expect { request.call(sha3) }.not_to exceed_query_limit(control.count) + expect { request.call(sha3) }.not_to exceed_query_limit(control) end context 'when one of commits does not exist' do diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb index 6e246f21e84..df00859fd52 100644 --- a/spec/services/todo_service_spec.rb +++ b/spec/services/todo_service_spec.rb @@ -1276,9 +1276,9 @@ RSpec.describe TodoService, feature_category: :team_planning do # Excluding queries for user permissions because those do execute N+1 queries allow_any_instance_of(User).to receive(:can?).and_return(true) - control_count = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }.count + control = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) } - expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control_count) + expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control) end end diff --git a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb index 63ff189ede5..cccf1a2cfa8 100644 --- a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb +++ b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: user) } it 'deletes todos for specified target ID and type' do - control_count = ActiveRecord::QueryRecorder.new { subject }.count + control = ActiveRecord::QueryRecorder.new { subject } # Create more todos for the target create(:todo, :pending, project: target.project, target: target, user: user) @@ -22,7 +22,7 @@ RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team create(:todo, :done, project: target.project, target: target, user: user) create(:todo, :done, project: target.project, target: target, user: user) - expect { subject }.not_to exceed_query_limit(control_count) + expect { subject }.not_to exceed_query_limit(control) end it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do diff --git a/spec/services/user_project_access_changed_service_spec.rb b/spec/services/user_project_access_changed_service_spec.rb index a50bd3ee2f1..8236d892072 100644 --- a/spec/services/user_project_access_changed_service_spec.rb +++ b/spec/services/user_project_access_changed_service_spec.rb @@ -77,7 +77,7 @@ RSpec.describe UserProjectAccessChangedService, feature_category: :system_access it 'avoids N+1 cached queries', :use_sql_query_cache, :request_store do # Run this once to establish a baseline - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do service.execute end @@ -87,7 +87,7 @@ RSpec.describe UserProjectAccessChangedService, feature_category: :system_access .with([[1], [2], [3], [4], [5]]) .and_return(10) - expect { service.execute }.not_to exceed_all_query_limit(control_count.count) + expect { service.execute }.not_to exceed_all_query_limit(control) end end end diff --git a/spec/services/users/update_todo_count_cache_service_spec.rb b/spec/services/users/update_todo_count_cache_service_spec.rb index eec637cf5b4..d69a4ba99b7 100644 --- a/spec/services/users/update_todo_count_cache_service_spec.rb +++ b/spec/services/users/update_todo_count_cache_service_spec.rb @@ -44,9 +44,9 @@ RSpec.describe Users::UpdateTodoCountCacheService, feature_category: :team_plann end it 'avoids N+1 queries' do - control_count = ActiveRecord::QueryRecorder.new { execute_single }.count + control = ActiveRecord::QueryRecorder.new { execute_single } - expect { execute_all }.not_to exceed_query_limit(control_count) + expect { execute_all }.not_to exceed_query_limit(control) end it 'executes one query per batch of users' do diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml index 0715e56c130..cb6cffd064f 100644 --- a/spec/support/rspec_order_todo.yml +++ b/spec/support/rspec_order_todo.yml @@ -7116,7 +7116,6 @@ - './spec/models/concerns/counter_attribute_spec.rb' - './spec/models/concerns/cron_schedulable_spec.rb' - './spec/models/concerns/cross_database_modification_spec.rb' -- './spec/models/concerns/database_event_tracking_spec.rb' - './spec/models/concerns/database_reflection_spec.rb' - './spec/models/concerns/delete_with_limit_spec.rb' - './spec/models/concerns/deployment_platform_spec.rb' diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb index c921da10347..94208e29d77 100644 --- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb +++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb @@ -125,9 +125,9 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do group_a.add_owner(user) create(:project, :import_started, import_type: provider, namespace: user.namespace) - control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do + control = ActiveRecord::QueryRecorder.new(skip_cached: false) do get :status, format: :json - end.count + end stub_client(repos: [repo, org_repo], orgs: []) group_b = create(:group) @@ -135,7 +135,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do create(:project, :import_started, import_type: provider, namespace: user.namespace) expect { get :status, format: :json } - .not_to exceed_all_query_limit(control_count) + .not_to exceed_all_query_limit(control) end context 'when user is not allowed to import projects' do diff --git a/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb index 446bc4cd92f..461dcf2fcb6 100644 --- a/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb +++ b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb @@ -65,7 +65,7 @@ RSpec.shared_examples 'issuables list meta-data' do |issuable_type, action = nil issuable.update!(source_project: fork_project(project)) end - expect { get_action(action, project) }.not_to exceed_query_limit(control.count) + expect { get_action(action, project) }.not_to exceed_query_limit(control) end describe "when given empty collection" do diff --git a/spec/support/shared_examples/controllers/snippet_shared_examples.rb b/spec/support/shared_examples/controllers/snippet_shared_examples.rb index f49cc979368..bf8183525a9 100644 --- a/spec/support/shared_examples/controllers/snippet_shared_examples.rb +++ b/spec/support/shared_examples/controllers/snippet_shared_examples.rb @@ -17,12 +17,12 @@ RSpec.shared_examples 'snippets views' do project = create(:project, namespace: user.namespace) create(:project_snippet, project: project, author: user) - control_count = ActiveRecord::QueryRecorder.new { get(:index, params: params) }.count + control = ActiveRecord::QueryRecorder.new { get(:index, params: params) } project = create(:project, namespace: user.namespace) create(:project_snippet, project: project, author: user) - expect { get(:index, params: params) }.not_to exceed_query_limit(control_count) + expect { get(:index, params: params) }.not_to exceed_query_limit(control) end end end diff --git a/spec/support/shared_examples/models/database_event_tracking_shared_examples.rb b/spec/support/shared_examples/models/database_event_tracking_shared_examples.rb deleted file mode 100644 index 56b36b3ea07..00000000000 --- a/spec/support/shared_examples/models/database_event_tracking_shared_examples.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -RSpec.shared_examples 'database events tracking' do - describe 'events tracking' do - # required definitions: - # :record, :update_params - # - # other available attributes: - # :project, :namespace - - let(:user) { nil } - let(:category) { described_class.to_s } - let(:label) { described_class.table_name } - let(:action) { "database_event_#{property}" } - let(:record_tracked_attributes) { record.attributes.slice(*described_class::SNOWPLOW_ATTRIBUTES.map(&:to_s)) } - let(:base_extra) { record_tracked_attributes.merge(project: try(:project), namespace: try(:namespace)) } - - before do - allow(Gitlab::Tracking).to receive(:database_event).and_call_original - end - - describe '#create' do - it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do - subject(:create_record) { record } - - let(:extra) { base_extra } - let(:property) { 'create' } - end - end - - describe '#update', :freeze_time do - it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do - subject(:update_record) { record.update!(update_params) } - - let(:extra) { base_extra.merge(update_params.stringify_keys) } - let(:property) { 'update' } - end - end - - describe '#destroy' do - it_behaves_like 'Snowplow event tracking', overrides: { tracking_method: :database_event } do - subject(:delete_record) { record.destroy! } - - let(:extra) { base_extra } - let(:property) { 'destroy' } - end - end - end -end diff --git a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb index 2b46c8c8fb9..692320d45d5 100644 --- a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb +++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb @@ -175,15 +175,15 @@ RSpec.shared_examples 'a class that supports relative positioning' do create_items_with_positions(10..12) a, b, c, d, e, f, *xs = create_items_with_positions([nil] * 10) - baseline = ActiveRecord::QueryRecorder.new do + control = ActiveRecord::QueryRecorder.new do described_class.move_nulls_to_end([a, b]) end expect { described_class.move_nulls_to_end([c, d, e, f]) } - .not_to exceed_query_limit(baseline) + .not_to exceed_query_limit(control) expect { described_class.move_nulls_to_end(xs) } - .not_to exceed_query_limit(baseline.count) + .not_to exceed_query_limit(control) end end diff --git a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb index b7247f1f243..2976018b60f 100644 --- a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb +++ b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb @@ -12,7 +12,7 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do # See also: https://gitlab.com/gitlab-org/gitlab/-/issues/373151 relax_count = 4 - expect { serialize(grouping: true) }.not_to exceed_query_limit(control.count + relax_count) + expect { serialize(grouping: true) }.not_to exceed_query_limit(control).with_threshold(relax_count) end it 'avoids N+1 database queries without grouping', :request_store do @@ -27,7 +27,7 @@ RSpec.shared_examples 'avoid N+1 on environments serialization' do # See also: https://gitlab.com/gitlab-org/gitlab/-/issues/373151 relax_count = 5 - expect { serialize(grouping: false) }.not_to exceed_query_limit(control.count + relax_count) + expect { serialize(grouping: false) }.not_to exceed_query_limit(control).with_threshold(relax_count) end it 'does not preload for environments that does not exist in the page', :request_store do diff --git a/spec/support/shared_examples/services/count_service_shared_examples.rb b/spec/support/shared_examples/services/count_service_shared_examples.rb index 54c6ff79976..42fe170d2c4 100644 --- a/spec/support/shared_examples/services/count_service_shared_examples.rb +++ b/spec/support/shared_examples/services/count_service_shared_examples.rb @@ -10,10 +10,10 @@ RSpec.shared_examples 'a counter caching service' do describe '#count' do it 'caches the count', :request_store do subject.delete_cache - control_count = ActiveRecord::QueryRecorder.new { subject.count }.count + control = ActiveRecord::QueryRecorder.new { subject.count } subject.delete_cache - expect { 2.times { subject.count } }.not_to exceed_query_limit(control_count) + expect { 2.times { subject.count } }.not_to exceed_query_limit(control) end end diff --git a/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb b/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb index d2b52468c25..459c957091c 100644 --- a/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb +++ b/spec/support/shared_examples/services/destroy_label_links_shared_examples.rb @@ -8,13 +8,13 @@ RSpec.shared_examples_for 'service deleting label links of an issuable' do end it 'deletes label links for specified target ID and type' do - control_count = ActiveRecord::QueryRecorder.new { execute }.count + control = ActiveRecord::QueryRecorder.new { execute } # Create more label links for the target create(:label_link, target: target) create(:label_link, target: target) - expect { execute }.not_to exceed_query_limit(control_count) + expect { execute }.not_to exceed_query_limit(control) expect(target.reload.label_links.count).to eq(0) end end diff --git a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb index cb544f42765..97dd2aa96d4 100644 --- a/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb +++ b/spec/support/shared_examples/services/packages/debian/generate_distribution_shared_examples.rb @@ -244,10 +244,10 @@ RSpec.shared_examples 'Generate Debian Distribution and component files' do end create_list(:debian_package, 10, project: project, published_in: project_distribution) - control_count = ActiveRecord::QueryRecorder.new { subject2 }.count + control = ActiveRecord::QueryRecorder.new { subject2 } create_list(:debian_package, 10, project: project, published_in: project_distribution) - expect { subject3 }.not_to exceed_query_limit(control_count) + expect { subject3 }.not_to exceed_query_limit(control) end end diff --git a/spec/workers/jira_connect/sync_project_worker_spec.rb b/spec/workers/jira_connect/sync_project_worker_spec.rb index b617508bb3a..83bce97cd51 100644 --- a/spec/workers/jira_connect/sync_project_worker_spec.rb +++ b/spec/workers/jira_connect/sync_project_worker_spec.rb @@ -51,11 +51,11 @@ RSpec.describe JiraConnect::SyncProjectWorker, factory_default: :keep, feature_c end it 'avoids N+1 database queries' do - control_count = ActiveRecord::QueryRecorder.new { perform(project.id, update_sequence_id) }.count + control = ActiveRecord::QueryRecorder.new { perform(project.id, update_sequence_id) } create(:merge_request, :unique_branches, title: 'TEST-123') - expect { perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control_count) + expect { perform(project.id, update_sequence_id) }.not_to exceed_query_limit(control) end context 'with branches to sync' do