Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-01-04 21:07:37 +00:00
parent cc514c362b
commit 808b8561f4
159 changed files with 1328 additions and 1754 deletions

View File

@ -135,7 +135,6 @@ Gitlab/AvoidGitlabInstanceChecks:
- 'lib/gitlab/gon_helper.rb'
- 'lib/gitlab/monitor/demo_projects.rb'
- 'lib/gitlab/qa.rb'
- 'lib/gitlab/tracking/destinations/database_events_snowplow.rb'
- 'lib/gitlab/tracking/standard_context.rb'
- 'lib/gitlab/usage/metrics/instrumentations/count_ci_internal_pipelines_metric.rb'
- 'lib/gitlab/usage/metrics/instrumentations/count_issues_created_manually_from_alerts_metric.rb'

View File

@ -4357,7 +4357,6 @@ RSpec/FeatureCategory:
- 'spec/models/concerns/counter_attribute_spec.rb'
- 'spec/models/concerns/cron_schedulable_spec.rb'
- 'spec/models/concerns/cross_database_modification_spec.rb'
- 'spec/models/concerns/database_event_tracking_spec.rb'
- 'spec/models/concerns/database_reflection_spec.rb'
- 'spec/models/concerns/delete_with_limit_spec.rb'
- 'spec/models/concerns/deployment_platform_spec.rb'

View File

@ -2425,7 +2425,6 @@ RSpec/NamedSubject:
- 'spec/lib/gitlab/terraform_registry_token_spec.rb'
- 'spec/lib/gitlab/throttle_spec.rb'
- 'spec/lib/gitlab/time_tracking_formatter_spec.rb'
- 'spec/lib/gitlab/tracking/destinations/database_events_snowplow_spec.rb'
- 'spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb'
- 'spec/lib/gitlab/tracking/destinations/snowplow_spec.rb'
- 'spec/lib/gitlab/tracking_spec.rb'

View File

@ -3052,7 +3052,6 @@ Style/InlineDisableAnnotation:
- 'spec/models/ci/pipeline_spec.rb'
- 'spec/models/ci/runner_manager_build_spec.rb'
- 'spec/models/concerns/bulk_insertable_associations_spec.rb'
- 'spec/models/concerns/database_event_tracking_spec.rb'
- 'spec/models/concerns/encrypted_user_password_spec.rb'
- 'spec/models/concerns/legacy_bulk_insert_spec.rb'
- 'spec/models/concerns/manual_inverse_association_spec.rb'

View File

@ -1,22 +1,17 @@
<script>
import { GlAlert } from '@gitlab/ui';
import { n__ } from '~/locale';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import CandidateListRow from '~/ml/model_registry/components/candidate_list_row.vue';
import { makeLoadCandidatesErrorMessage, NO_CANDIDATES_LABEL } from '../translations';
import getModelCandidatesQuery from '../graphql/queries/get_model_candidates.query.graphql';
import { GRAPHQL_PAGE_SIZE } from '../constants';
import SearchableList from './searchable_list.vue';
import CandidateListRow from './candidate_list_row.vue';
export default {
name: 'MlCandidateList',
components: {
GlAlert,
CandidateListRow,
PackagesListLoader,
RegistryList,
SearchableList,
},
props: {
modelId: {
@ -26,7 +21,7 @@ export default {
},
data() {
return {
modelVersions: {},
candidates: {},
errorMessage: undefined,
};
},
@ -49,18 +44,12 @@ export default {
gid() {
return convertToGraphQLId('Ml::Model', this.modelId);
},
isListEmpty() {
return this.count === 0;
},
isLoading() {
return this.$apollo.queries.candidates.loading;
},
pageInfo() {
return this.candidates?.pageInfo ?? {};
},
listTitle() {
return n__('%d candidate', '%d candidates', this.count);
},
queryVariables() {
return {
id: this.gid,
@ -70,18 +59,12 @@ export default {
items() {
return this.candidates?.nodes ?? [];
},
count() {
return this.candidates?.count ?? 0;
},
},
methods: {
fetchPage({ first = null, last = null, before = null, after = null } = {}) {
fetchPage(newPageInfo) {
const variables = {
...this.queryVariables,
first,
last,
before,
after,
...newPageInfo,
};
this.$apollo.queries.candidates.fetchMore({
@ -91,18 +74,6 @@ export default {
},
});
},
fetchPreviousCandidatesPage() {
this.fetchPage({
last: GRAPHQL_PAGE_SIZE,
before: this.pageInfo?.startCursor,
});
},
fetchNextCandidatesPage() {
this.fetchPage({
first: GRAPHQL_PAGE_SIZE,
after: this.pageInfo?.endCursor,
});
},
},
i18n: {
NO_CANDIDATES_LABEL,
@ -111,29 +82,19 @@ export default {
</script>
<template>
<div>
<div v-if="isLoading">
<packages-list-loader />
</div>
<gl-alert v-else-if="errorMessage" variant="danger" :dismissible="false">{{
errorMessage
}}</gl-alert>
<div v-else-if="isListEmpty" class="gl-text-secondary">
{{ $options.i18n.NO_CANDIDATES_LABEL }}
</div>
<div v-else>
<registry-list
:hidden-delete="true"
:is-loading="isLoading"
:items="items"
:pagination="pageInfo"
:title="listTitle"
@prev-page="fetchPreviousCandidatesPage"
@next-page="fetchNextCandidatesPage"
>
<template #default="{ item }">
<candidate-list-row :candidate="item" />
</template>
</registry-list>
</div>
<searchable-list
:page-info="pageInfo"
:items="items"
:error-message="errorMessage"
@fetch-page="fetchPage"
>
<template #empty-state>
{{ $options.i18n.NO_CANDIDATES_LABEL }}
</template>
<template #item="{ item }">
<candidate-list-row :candidate="item" />
</template>
</searchable-list>
</div>
</template>

View File

@ -1,23 +1,18 @@
<script>
import { GlAlert } from '@gitlab/ui';
import { n__ } from '~/locale';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { makeLoadVersionsErrorMessage } from '~/ml/model_registry/translations';
import { convertToGraphQLId } from '~/graphql_shared/utils';
import getModelVersionsQuery from '../graphql/queries/get_model_versions.query.graphql';
import { GRAPHQL_PAGE_SIZE, MODEL_ENTITIES } from '../constants';
import SearchableList from './searchable_list.vue';
import EmptyState from './empty_state.vue';
import ModelVersionRow from './model_version_row.vue';
export default {
components: {
EmptyState,
GlAlert,
ModelVersionRow,
PackagesListLoader,
RegistryList,
SearchableList,
},
props: {
modelId: {
@ -50,18 +45,12 @@ export default {
gid() {
return convertToGraphQLId('Ml::Model', this.modelId);
},
isListEmpty() {
return this.count === 0;
},
isLoading() {
return this.$apollo.queries.modelVersions.loading;
},
pageInfo() {
return this.modelVersions?.pageInfo ?? {};
},
listTitle() {
return n__('%d version', '%d versions', this.versions.length);
},
queryVariables() {
return {
id: this.gid,
@ -71,31 +60,12 @@ export default {
versions() {
return this.modelVersions?.nodes ?? [];
},
count() {
return this.modelVersions?.count ?? 0;
},
},
methods: {
fetchPreviousVersionsPage() {
fetchPage(pageInfo) {
const variables = {
...this.queryVariables,
first: null,
last: GRAPHQL_PAGE_SIZE,
before: this.pageInfo?.startCursor,
};
this.$apollo.queries.modelVersions.fetchMore({
variables,
updateQuery: (previousResult, { fetchMoreResult }) => {
return fetchMoreResult;
},
});
},
fetchNextVersionsPage() {
const variables = {
...this.queryVariables,
first: GRAPHQL_PAGE_SIZE,
last: null,
after: this.pageInfo?.endCursor,
...pageInfo,
};
this.$apollo.queries.modelVersions.fetchMore({
@ -110,28 +80,18 @@ export default {
};
</script>
<template>
<div>
<div v-if="isLoading">
<packages-list-loader />
</div>
<gl-alert v-else-if="errorMessage" variant="danger" :dismissible="false">{{
errorMessage
}}</gl-alert>
<empty-state v-else-if="isListEmpty" :entity-type="$options.modelVersionEntity" />
<div v-else>
<registry-list
:hidden-delete="true"
:is-loading="isLoading"
:items="versions"
:pagination="pageInfo"
:title="listTitle"
@prev-page="fetchPreviousVersionsPage"
@next-page="fetchNextVersionsPage"
>
<template #default="{ item }">
<model-version-row :model-version="item" />
</template>
</registry-list>
</div>
</div>
<searchable-list
:page-info="pageInfo"
:items="versions"
:error-message="errorMessage"
@fetch-page="fetchPage"
>
<template #empty-state>
<empty-state :entity-type="$options.modelVersionEntity" />
</template>
<template #item="{ item }">
<model-version-row :model-version="item" />
</template>
</searchable-list>
</template>

View File

@ -0,0 +1,79 @@
<script>
import { GlAlert } from '@gitlab/ui';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import { GRAPHQL_PAGE_SIZE } from '~/ml/model_registry/constants';
export default {
name: 'SearchableList',
components: { PackagesListLoader, RegistryList, GlAlert },
props: {
items: {
type: Array,
required: true,
},
pageInfo: {
type: Object,
required: true,
},
isLoading: {
type: Boolean,
required: false,
default: false,
},
errorMessage: {
type: String,
required: false,
default: '',
},
},
computed: {
isListEmpty() {
return this.items.length === 0;
},
},
methods: {
prevPage() {
const pageInfo = {
first: null,
last: GRAPHQL_PAGE_SIZE,
before: this.pageInfo.startCursor,
};
this.$emit('fetch-page', pageInfo);
},
nextPage() {
const pageInfo = {
first: GRAPHQL_PAGE_SIZE,
last: null,
after: this.pageInfo.endCursor,
};
this.$emit('fetch-page', pageInfo);
},
},
};
</script>
<template>
<div>
<packages-list-loader v-if="isLoading" />
<gl-alert v-else-if="errorMessage" variant="danger" :dismissible="false">
{{ errorMessage }}
</gl-alert>
<slot v-else-if="isListEmpty" name="empty-state"></slot>
<registry-list
v-else
:hidden-delete="true"
:is-loading="isLoading"
:items="items"
:pagination="pageInfo"
@prev-page="prevPage"
@next-page="nextPage"
>
<template #default="{ item }">
<slot name="item" :item="item"></slot>
</template>
</registry-list>
</div>
</template>

View File

@ -7,7 +7,6 @@ module Analytics
self.table_name = :analytics_cycle_analytics_group_stages
include DatabaseEventTracking
include Analytics::CycleAnalytics::Stageable
include Analytics::CycleAnalytics::Parentable
@ -38,22 +37,6 @@ module Analytics
.select("DISTINCT ON(stage_event_hash_id) #{quoted_table_name}.*")
end
SNOWPLOW_ATTRIBUTES = %i[
id
created_at
updated_at
relative_position
start_event_identifier
end_event_identifier
group_id
start_event_label_id
end_event_label_id
hidden
custom
name
group_value_stream_id
].freeze
private
def max_stages_count

View File

@ -1,52 +0,0 @@
# frozen_string_literal: true
module DatabaseEventTracking
extend ActiveSupport::Concern
included do
after_create_commit :publish_database_create_event
after_destroy_commit :publish_database_destroy_event
after_update_commit :publish_database_update_event
end
def publish_database_create_event
publish_database_event('create')
end
def publish_database_destroy_event
publish_database_event('destroy')
end
def publish_database_update_event
publish_database_event('update')
end
def publish_database_event(name)
# Gitlab::Tracking#event is triggering Snowplow event
# Snowplow events are sent with usage of
# https://snowplow.github.io/snowplow-ruby-tracker/SnowplowTracker/AsyncEmitter.html
# that reports data asynchronously and does not impact performance nor carries a risk of
# rollback in case of error
Gitlab::Tracking.database_event(
self.class.to_s,
"database_event_#{name}",
label: self.class.table_name,
project: try(:project),
namespace: (try(:group) || try(:namespace)) || try(:project)&.namespace,
property: name,
**filtered_record_attributes
)
rescue StandardError => err
# this rescue should be a dead code due to utilization of AsyncEmitter, however
# since this concern is expected to be included in every model, it is better to
# prevent against any unexpected outcome
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(err)
end
def filtered_record_attributes
attributes
.with_indifferent_access
.slice(*self.class::SNOWPLOW_ATTRIBUTES)
end
end

View File

@ -7,6 +7,7 @@ module Integrations
field :external_wiki_url,
section: SECTION_TYPE_CONNECTION,
title: -> { s_('ExternalWikiService|External wiki URL') },
description: -> { s_('ExternalWikiService|URL of the external wiki.') },
placeholder: -> { s_('ExternalWikiService|https://example.com/xxx/wiki/...') },
help: -> { s_('ExternalWikiService|Enter the URL to the external wiki.') },
required: true

View File

@ -1,8 +1,6 @@
# frozen_string_literal: true
class MergeRequest::Metrics < ApplicationRecord
include DatabaseEventTracking
belongs_to :merge_request, inverse_of: :metrics
belongs_to :pipeline, class_name: 'Ci::Pipeline', foreign_key: :pipeline_id
belongs_to :latest_closed_by, class_name: 'User'
@ -33,8 +31,7 @@ class MergeRequest::Metrics < ApplicationRecord
RETURNING id, #{inserted_columns.join(', ')}
SQL
result = connection.execute(sql).first
new(result).publish_database_create_event
connection.execute(sql)
end
end
@ -48,31 +45,6 @@ class MergeRequest::Metrics < ApplicationRecord
with_valid_time_to_merge
.pick(time_to_merge_expression)
end
SNOWPLOW_ATTRIBUTES = %i[
id
merge_request_id
latest_build_started_at
latest_build_finished_at
first_deployed_to_production_at
merged_at
created_at
updated_at
pipeline_id
merged_by_id
latest_closed_by_id
latest_closed_at
first_comment_at
first_commit_at
last_commit_at
diff_size
modified_paths_size
commits_count
first_approved_at
first_reassigned_at
added_lines
removed_lines
].freeze
end
MergeRequest::Metrics.prepend_mod_with('MergeRequest::Metrics')

View File

@ -1,8 +0,0 @@
---
name: use_merge_approval_rules_when_merged
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/129165
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/422380
milestone: '16.4'
type: development
group: group::code review
default_enabled: false

View File

@ -150,8 +150,8 @@ You may also want to back up any TLS keys and certificates (`/etc/gitlab/ssl`, `
[SSH host keys](https://superuser.com/questions/532040/copy-ssh-keys-from-one-server-to-another-server/532079#532079)
to avoid man-in-the-middle attack warnings if you have to perform a full machine restore.
In the unlikely event that the secrets file is lost, see the
[troubleshooting section](#when-the-secrets-file-is-lost).
In the unlikely event that the secrets file is lost, see
[When the secrets file is lost](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost).
### Other data
@ -1452,618 +1452,3 @@ There are a few possible downsides to this:
There is an **experimental** script that attempts to automate this process in
[the Geo team Runbooks project](https://gitlab.com/gitlab-org/geo-team/runbooks/-/tree/main/experimental-online-backup-through-rsync).
## Troubleshooting
The following are possible problems you might encounter, along with potential
solutions.
### When the secrets file is lost
If you didn't [back up the secrets file](#storing-configuration-files), you
must complete several steps to get GitLab working properly again.
The secrets file is responsible for storing the encryption key for the columns
that contain required, sensitive information. If the key is lost, GitLab can't
decrypt those columns, preventing access to the following items:
- [CI/CD variables](../../ci/variables/index.md)
- [Kubernetes / GCP integration](../../user/infrastructure/clusters/index.md)
- [Custom Pages domains](../../user/project/pages/custom_domains_ssl_tls_certification/index.md)
- [Project error tracking](../../operations/error_tracking.md)
- [Runner authentication](../../ci/runners/index.md)
- [Project mirroring](../../user/project/repository/mirror/index.md)
- [Integrations](../../user/project/integrations/index.md)
- [Web hooks](../../user/project/integrations/webhooks.md)
In cases like CI/CD variables and runner authentication, you can experience
unexpected behaviors, such as:
- Stuck jobs.
- 500 errors.
In this case, you must reset all the tokens for CI/CD variables and
runner authentication, which is described in more detail in the following
sections. After resetting the tokens, you should be able to visit your project
and the jobs begin running again.
WARNING:
The steps in this section can potentially lead to **data loss** on the above listed items.
Consider opening a [Support Request](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer.
#### Verify that all values can be decrypted
You can determine if your database contains values that can't be decrypted by using a
[Rake task](../raketasks/check.md#verify-database-values-can-be-decrypted-using-the-current-secrets).
#### Take a backup
You must directly modify GitLab data to work around your lost secrets file.
WARNING:
Be sure to create a full database backup before attempting any changes.
#### Disable user two-factor authentication (2FA)
Users with 2FA enabled can't sign in to GitLab. In that case, you must
[disable 2FA for everyone](../../security/two_factor_authentication.md#for-all-users),
after which users must reactivate 2FA.
#### Reset CI/CD variables
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Examine the `ci_group_variables` and `ci_variables` tables:
```sql
SELECT * FROM public."ci_group_variables";
SELECT * FROM public."ci_variables";
```
These are the variables that you need to delete.
1. Delete all variables:
```sql
DELETE FROM ci_group_variables;
DELETE FROM ci_variables;
```
1. If you know the specific group or project from which you wish to delete variables, you can include a `WHERE` statement to specify that in your `DELETE`:
```sql
DELETE FROM ci_group_variables WHERE group_id = <GROUPID>;
DELETE FROM ci_variables WHERE project_id = <PROJECTID>;
```
You may need to reconfigure or restart GitLab for the changes to take effect.
#### Reset runner registration tokens
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Clear all tokens for projects, groups, and the entire instance:
WARNING:
The final `UPDATE` operation stops the runners from being able to pick
up new jobs. You must register new runners.
```sql
-- Clear project tokens
UPDATE projects SET runners_token = null, runners_token_encrypted = null;
-- Clear group tokens
UPDATE namespaces SET runners_token = null, runners_token_encrypted = null;
-- Clear instance tokens
UPDATE application_settings SET runners_registration_token_encrypted = null;
-- Clear key used for JWT authentication
-- This may break the $CI_JWT_TOKEN job variable:
-- https://gitlab.com/gitlab-org/gitlab/-/issues/325965
UPDATE application_settings SET encrypted_ci_jwt_signing_key = null;
-- Clear runner tokens
UPDATE ci_runners SET token = null, token_encrypted = null;
```
#### Reset pending pipeline jobs
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Clear all the tokens for pending jobs:
For GitLab 15.3 and earlier:
```sql
-- Clear build tokens
UPDATE ci_builds SET token = null, token_encrypted = null;
```
For GitLab 15.4 and later:
```sql
-- Clear build tokens
UPDATE ci_builds SET token_encrypted = null;
```
A similar strategy can be employed for the remaining features. By removing the
data that can't be decrypted, GitLab can be returned to operation, and the
lost data can be manually replaced.
#### Fix integrations and webhooks
If you've lost your secrets, the [integrations settings](../../user/project/integrations/index.md)
and [webhooks settings](../../user/project/integrations/webhooks.md) pages might display `500` error messages. Lost secrets might also produce `500` errors when you try to access a repository in a project with a previously configured integration or webhook.
The fix is to truncate the affected tables (those containing encrypted columns).
This deletes all your configured integrations, webhooks, and related metadata.
You should verify that the secrets are the root cause before deleting any data.
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Truncate the following tables:
```sql
-- truncate web_hooks table
TRUNCATE integrations, chat_names, issue_tracker_data, jira_tracker_data, slack_integrations, web_hooks, zentao_tracker_data, web_hook_logs CASCADE;
```
### Container registry push failures after restoring from a backup
If you use the [container registry](../../user/packages/container_registry/index.md),
pushes to the registry may fail after restoring your backup on a Linux package (Omnibus)
instance after restoring the registry data.
These failures mention permission issues in the registry logs, similar to:
```plaintext
level=error
msg="response completed with error"
err.code=unknown
err.detail="filesystem: mkdir /var/opt/gitlab/gitlab-rails/shared/registry/docker/registry/v2/repositories/...: permission denied"
err.message="unknown error"
```
This issue is caused by the restore running as the unprivileged user `git`,
which is unable to assign the correct ownership to the registry files during
the restore process ([issue #62759](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/62759 "Incorrect permissions on registry filesystem after restore")).
To get your registry working again:
```shell
sudo chown -R registry:registry /var/opt/gitlab/gitlab-rails/shared/registry/docker
```
If you changed the default file system location for the registry, run `chown`
against your custom location, instead of `/var/opt/gitlab/gitlab-rails/shared/registry/docker`.
### Backup fails to complete with Gzip error
When running the backup, you may receive a Gzip error message:
```shell
sudo /opt/gitlab/bin/gitlab-backup create
...
Dumping ...
...
gzip: stdout: Input/output error
Backup failed
```
If this happens, examine the following:
- Confirm there is sufficient disk space for the Gzip operation. It's not uncommon for backups that
use the [default strategy](#backup-strategy-option) to require half the instance size
in free disk space during backup creation.
- If NFS is being used, check if the mount option `timeout` is set. The
default is `600`, and changing this to smaller values results in this error.
### Backup fails with `File name too long` error
During backup, you can get the `File name too long` error ([issue #354984](https://gitlab.com/gitlab-org/gitlab/-/issues/354984)). For example:
```plaintext
Problem: <class 'OSError: [Errno 36] File name too long:
```
This problem stops the backup script from completing. To fix this problem, you must truncate the file names causing the problem. A maximum of 246 characters, including the file extension, is permitted.
WARNING:
The steps in this section can potentially lead to **data loss**. All steps must be followed strictly in the order given.
Consider opening a [Support Request](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer.
Truncating file names to resolve the error involves:
- Cleaning up remote uploaded files that aren't tracked in the database.
- Truncating the file names in the database.
- Rerunning the backup task.
#### Clean up remote uploaded files
A [known issue](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/45425) caused object store uploads to remain after a parent resource was deleted. This issue was [resolved](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/18698).
To fix these files, you must clean up all remote uploaded files that are in the storage but not tracked in the `uploads` database table.
1. List all the object store upload files that can be moved to a lost and found directory if they don't exist in the GitLab database:
```shell
bundle exec rake gitlab:cleanup:remote_upload_files RAILS_ENV=production
```
1. If you are sure you want to delete these files and remove all non-referenced uploaded files, run:
WARNING:
The following action is **irreversible**.
```shell
bundle exec rake gitlab:cleanup:remote_upload_files RAILS_ENV=production DRY_RUN=false
```
#### Truncate the file names referenced by the database
You must truncate the files referenced by the database that are causing the problem. The file names referenced by the database are stored:
- In the `uploads` table.
- In the references found. Any reference found from other database tables and columns.
- On the file system.
Truncate the file names in the `uploads` table:
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
1. Search the `uploads` table for file names longer than 246 characters:
The following query selects the `uploads` records with file names longer than 246 characters in batches of 0 to 10000. This improves the performance on large GitLab instances with tables having thousand of records.
```sql
CREATE TEMP TABLE uploads_with_long_filenames AS
SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, id, path
FROM uploads AS u
WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246;
CREATE INDEX ON uploads_with_long_filenames(row_id);
SELECT
u.id,
u.path,
-- Current file name
(regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] AS current_filename,
-- New file name
CONCAT(
LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
) AS new_filename,
-- New path
CONCAT(
COALESCE((regexp_match(u.path, '(.*\/).*'))[1], ''),
CONCAT(
LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
)
) AS new_path
FROM uploads_with_long_filenames AS u
WHERE u.row_id > 0 AND u.row_id <= 10000;
```
Output example:
```postgresql
-[ RECORD 1 ]----+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
id | 34
path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt
current_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt
new_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt
new_path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt
```
Where:
- `current_filename`: a file name that is currently more than 246 characters long.
- `new_filename`: a file name that has been truncated to 246 characters maximum.
- `new_path`: new path considering the `new_filename` (truncated).
After you validate the batch results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table.
1. Rename the files found in the `uploads` table from long file names to new truncated file names. The following query rolls back the update so you can check the results safely in a transaction wrapper:
```sql
CREATE TEMP TABLE uploads_with_long_filenames AS
SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id
FROM uploads AS u
WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246;
CREATE INDEX ON uploads_with_long_filenames(row_id);
BEGIN;
WITH updated_uploads AS (
UPDATE uploads
SET
path =
CONCAT(
COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''),
CONCAT(
LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
)
)
FROM
uploads_with_long_filenames AS updatable_uploads
WHERE
uploads.id = updatable_uploads.id
AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000
RETURNING uploads.*
)
SELECT id, path FROM updated_uploads;
ROLLBACK;
```
After you validate the batch update results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table.
1. Validate that the new file names from the previous query are the expected ones. If you are sure you want to truncate the records found in the previous step to 246 characters, run the following:
WARNING:
The following action is **irreversible**.
```sql
CREATE TEMP TABLE uploads_with_long_filenames AS
SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id
FROM uploads AS u
WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246;
CREATE INDEX ON uploads_with_long_filenames(row_id);
UPDATE uploads
SET
path =
CONCAT(
COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''),
CONCAT(
LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
)
)
FROM
uploads_with_long_filenames AS updatable_uploads
WHERE
uploads.id = updatable_uploads.id
AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000;
```
After you finish the batch update, you must change the batch size (`updatable_uploads.row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table.
Truncate the file names in the references found:
1. Check if those records are referenced somewhere. One way to do this is to dump the database and search for the parent directory name and file name:
1. To dump your database, you can use the following command as an example:
```shell
pg_dump -h /var/opt/gitlab/postgresql/ -d gitlabhq_production > gitlab-dump.tmp
```
1. Then you can search for the references using the `grep` command. Combining the parent directory and the file name can be a good idea. For example:
```shell
grep public/alongfilenamehere.txt gitlab-dump.tmp
```
1. Replace those long file names using the new file names obtained from querying the `uploads` table.
Truncate the file names on the file system. You must manually rename the files in your file system to the new file names obtained from querying the `uploads` table.
#### Re-run the backup task
After following all the previous steps, re-run the backup task.
### Restoring database backup fails when `pg_stat_statements` was previously enabled
The GitLab backup of the PostgreSQL database includes all SQL statements required to enable extensions that were
previously enabled in the database.
The `pg_stat_statements` extension can only be enabled or disabled by a PostgreSQL user with `superuser` role.
As the restore process uses a database user with limited permissions, it can't execute the following SQL statements:
```sql
DROP EXTENSION IF EXISTS pg_stat_statements;
CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public;
```
When trying to restore the backup in a PostgreSQL instance that doesn't have the `pg_stats_statements` extension,
the following error message is displayed:
```plaintext
ERROR: permission denied to create extension "pg_stat_statements"
HINT: Must be superuser to create this extension.
ERROR: extension "pg_stat_statements" does not exist
```
When trying to restore in an instance that has the `pg_stats_statements` extension enabled, the cleaning up step
fails with an error message similar to the following:
```plaintext
rake aborted!
ActiveRecord::StatementInvalid: PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/bin/bundle:23:in `load'
/opt/gitlab/embedded/bin/bundle:23:in `<main>'
Caused by:
PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/bin/bundle:23:in `load'
/opt/gitlab/embedded/bin/bundle:23:in `<main>'
Tasks: TOP => gitlab:db:drop_tables
(See full trace by running task with --trace)
```
#### Prevent the dump file to include `pg_stat_statements`
To prevent the inclusion of the extension in the PostgreSQL dump file that is part of the backup bundle,
enable the extension in any schema except the `public` schema:
```sql
CREATE SCHEMA adm;
CREATE EXTENSION pg_stat_statements SCHEMA adm;
```
If the extension was previously enabled in the `public` schema, move it to a new one:
```sql
CREATE SCHEMA adm;
ALTER EXTENSION pg_stat_statements SET SCHEMA adm;
```
To query the `pg_stat_statements` data after changing the schema, prefix the view name with the new schema:
```sql
SELECT * FROM adm.pg_stat_statements limit 0;
```
To make it compatible with third-party monitoring solutions that expect it to be enabled in the `public` schema,
you need to include it in the `search_path`:
```sql
set search_path to public,adm;
```
#### Fix an existing dump file to remove references to `pg_stat_statements`
To fix an existing backup file, do the following changes:
1. Extract from the backup the following file: `db/database.sql.gz`.
1. Decompress the file or use an editor that is capable of handling it compressed.
1. Remove the following lines, or similar ones:
```sql
CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public;
```
```sql
COMMENT ON EXTENSION pg_stat_statements IS 'track planning and execution statistics of all SQL statements executed';
```
1. Save the changes and recompress the file.
1. Update the backup file with the modified `db/database.sql.gz`.

View File

@ -38,8 +38,7 @@ before restoring the backup.
To restore a backup, **you must also restore the GitLab secrets**.
These include the database encryption key, [CI/CD variables](../../ci/variables/index.md), and
variables used for [two-factor authentication](../../user/profile/account/two_factor_authentication.md).
Without the keys, [multiple issues occur](backup_gitlab.md#when-the-secrets-file-is-lost),
including loss of access by users with [two-factor authentication enabled](../../user/profile/account/two_factor_authentication.md),
Without the keys, [multiple issues occur](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost), including loss of access by users with [two-factor authentication enabled](../../user/profile/account/two_factor_authentication.md),
and GitLab Runners cannot log in.
Restore:

View File

@ -0,0 +1,619 @@
---
stage: Systems
group: Geo
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Troubleshooting GitLab backups
When you back up GitLab, you might encounter the following issues.
## When the secrets file is lost
If you didn't [back up the secrets file](../../administration/backup_restore/backup_gitlab.md#storing-configuration-files), you
must complete several steps to get GitLab working properly again.
The secrets file is responsible for storing the encryption key for the columns
that contain required, sensitive information. If the key is lost, GitLab can't
decrypt those columns, preventing access to the following items:
- [CI/CD variables](../../ci/variables/index.md)
- [Kubernetes / GCP integration](../../user/infrastructure/clusters/index.md)
- [Custom Pages domains](../../user/project/pages/custom_domains_ssl_tls_certification/index.md)
- [Project error tracking](../../operations/error_tracking.md)
- [Runner authentication](../../ci/runners/index.md)
- [Project mirroring](../../user/project/repository/mirror/index.md)
- [Integrations](../../user/project/integrations/index.md)
- [Web hooks](../../user/project/integrations/webhooks.md)
In cases like CI/CD variables and runner authentication, you can experience
unexpected behaviors, such as:
- Stuck jobs.
- 500 errors.
In this case, you must reset all the tokens for CI/CD variables and
runner authentication, which is described in more detail in the following
sections. After resetting the tokens, you should be able to visit your project
and the jobs begin running again.
WARNING:
The steps in this section can potentially lead to **data loss** on the above listed items.
Consider opening a [Support Request](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer.
### Verify that all values can be decrypted
You can determine if your database contains values that can't be decrypted by using a
[Rake task](../raketasks/check.md#verify-database-values-can-be-decrypted-using-the-current-secrets).
### Take a backup
You must directly modify GitLab data to work around your lost secrets file.
WARNING:
Be sure to create a full database backup before attempting any changes.
### Disable user two-factor authentication (2FA)
Users with 2FA enabled can't sign in to GitLab. In that case, you must
[disable 2FA for everyone](../../security/two_factor_authentication.md#for-all-users),
after which users must reactivate 2FA.
### Reset CI/CD variables
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Examine the `ci_group_variables` and `ci_variables` tables:
```sql
SELECT * FROM public."ci_group_variables";
SELECT * FROM public."ci_variables";
```
These are the variables that you need to delete.
1. Delete all variables:
```sql
DELETE FROM ci_group_variables;
DELETE FROM ci_variables;
```
1. If you know the specific group or project from which you wish to delete variables, you can include a `WHERE` statement to specify that in your `DELETE`:
```sql
DELETE FROM ci_group_variables WHERE group_id = <GROUPID>;
DELETE FROM ci_variables WHERE project_id = <PROJECTID>;
```
You may need to reconfigure or restart GitLab for the changes to take effect.
### Reset runner registration tokens
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Clear all tokens for projects, groups, and the entire instance:
WARNING:
The final `UPDATE` operation stops the runners from being able to pick
up new jobs. You must register new runners.
```sql
-- Clear project tokens
UPDATE projects SET runners_token = null, runners_token_encrypted = null;
-- Clear group tokens
UPDATE namespaces SET runners_token = null, runners_token_encrypted = null;
-- Clear instance tokens
UPDATE application_settings SET runners_registration_token_encrypted = null;
-- Clear key used for JWT authentication
-- This may break the $CI_JWT_TOKEN job variable:
-- https://gitlab.com/gitlab-org/gitlab/-/issues/325965
UPDATE application_settings SET encrypted_ci_jwt_signing_key = null;
-- Clear runner tokens
UPDATE ci_runners SET token = null, token_encrypted = null;
```
### Reset pending pipeline jobs
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Clear all the tokens for pending jobs:
For GitLab 15.3 and earlier:
```sql
-- Clear build tokens
UPDATE ci_builds SET token = null, token_encrypted = null;
```
For GitLab 15.4 and later:
```sql
-- Clear build tokens
UPDATE ci_builds SET token_encrypted = null;
```
A similar strategy can be employed for the remaining features. By removing the
data that can't be decrypted, GitLab can be returned to operation, and the
lost data can be manually replaced.
### Fix integrations and webhooks
If you've lost your secrets, the [integrations settings](../../user/project/integrations/index.md)
and [webhooks settings](../../user/project/integrations/webhooks.md) pages might display `500` error messages. Lost secrets might also produce `500` errors when you try to access a repository in a project with a previously configured integration or webhook.
The fix is to truncate the affected tables (those containing encrypted columns).
This deletes all your configured integrations, webhooks, and related metadata.
You should verify that the secrets are the root cause before deleting any data.
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
1. Truncate the following tables:
```sql
-- truncate web_hooks table
TRUNCATE integrations, chat_names, issue_tracker_data, jira_tracker_data, slack_integrations, web_hooks, zentao_tracker_data, web_hook_logs CASCADE;
```
## Container registry push failures after restoring from a backup
If you use the [container registry](../../user/packages/container_registry/index.md),
pushes to the registry may fail after restoring your backup on a Linux package (Omnibus)
instance after restoring the registry data.
These failures mention permission issues in the registry logs, similar to:
```plaintext
level=error
msg="response completed with error"
err.code=unknown
err.detail="filesystem: mkdir /var/opt/gitlab/gitlab-rails/shared/registry/docker/registry/v2/repositories/...: permission denied"
err.message="unknown error"
```
This issue is caused by the restore running as the unprivileged user `git`,
which is unable to assign the correct ownership to the registry files during
the restore process ([issue #62759](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/62759 "Incorrect permissions on registry filesystem after restore")).
To get your registry working again:
```shell
sudo chown -R registry:registry /var/opt/gitlab/gitlab-rails/shared/registry/docker
```
If you changed the default file system location for the registry, run `chown`
against your custom location, instead of `/var/opt/gitlab/gitlab-rails/shared/registry/docker`.
## Backup fails to complete with Gzip error
When running the backup, you may receive a Gzip error message:
```shell
sudo /opt/gitlab/bin/gitlab-backup create
...
Dumping ...
...
gzip: stdout: Input/output error
Backup failed
```
If this happens, examine the following:
- Confirm there is sufficient disk space for the Gzip operation. It's not uncommon for backups that
use the [default strategy](../../administration/backup_restore/backup_gitlab.md#backup-strategy-option) to require half the instance size
in free disk space during backup creation.
- If NFS is being used, check if the mount option `timeout` is set. The
default is `600`, and changing this to smaller values results in this error.
## Backup fails with `File name too long` error
During backup, you can get the `File name too long` error ([issue #354984](https://gitlab.com/gitlab-org/gitlab/-/issues/354984)). For example:
```plaintext
Problem: <class 'OSError: [Errno 36] File name too long:
```
This problem stops the backup script from completing. To fix this problem, you must truncate the file names causing the problem. A maximum of 246 characters, including the file extension, is permitted.
WARNING:
The steps in this section can potentially lead to **data loss**. All steps must be followed strictly in the order given.
Consider opening a [Support Request](https://support.gitlab.com/hc/en-us/requests/new) if you're a Premium or Ultimate customer.
Truncating file names to resolve the error involves:
- Cleaning up remote uploaded files that aren't tracked in the database.
- Truncating the file names in the database.
- Rerunning the backup task.
### Clean up remote uploaded files
A [known issue](https://gitlab.com/gitlab-org/gitlab-foss/-/issues/45425) caused object store uploads to remain after a parent resource was deleted. This issue was [resolved](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/18698).
To fix these files, you must clean up all remote uploaded files that are in the storage but not tracked in the `uploads` database table.
1. List all the object store upload files that can be moved to a lost and found directory if they don't exist in the GitLab database:
```shell
bundle exec rake gitlab:cleanup:remote_upload_files RAILS_ENV=production
```
1. If you are sure you want to delete these files and remove all non-referenced uploaded files, run:
WARNING:
The following action is **irreversible**.
```shell
bundle exec rake gitlab:cleanup:remote_upload_files RAILS_ENV=production DRY_RUN=false
```
### Truncate the file names referenced by the database
You must truncate the files referenced by the database that are causing the problem. The file names referenced by the database are stored:
- In the `uploads` table.
- In the references found. Any reference found from other database tables and columns.
- On the file system.
Truncate the file names in the `uploads` table:
1. Enter the database console:
For the Linux package (Omnibus) GitLab 14.2 and later:
```shell
sudo gitlab-rails dbconsole --database main
```
For the Linux package (Omnibus) GitLab 14.1 and earlier:
```shell
sudo gitlab-rails dbconsole
```
For self-compiled installations, GitLab 14.2 and later:
```shell
sudo -u git -H bundle exec rails dbconsole -e production --database main
```
For self-compiled installations, GitLab 14.1 and earlier:
```shell
sudo -u git -H bundle exec rails dbconsole -e production
```
1. Search the `uploads` table for file names longer than 246 characters:
The following query selects the `uploads` records with file names longer than 246 characters in batches of 0 to 10000. This improves the performance on large GitLab instances with tables having thousand of records.
```sql
CREATE TEMP TABLE uploads_with_long_filenames AS
SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, id, path
FROM uploads AS u
WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246;
CREATE INDEX ON uploads_with_long_filenames(row_id);
SELECT
u.id,
u.path,
-- Current file name
(regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] AS current_filename,
-- New file name
CONCAT(
LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
) AS new_filename,
-- New path
CONCAT(
COALESCE((regexp_match(u.path, '(.*\/).*'))[1], ''),
CONCAT(
LEFT(SPLIT_PART((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
)
) AS new_path
FROM uploads_with_long_filenames AS u
WHERE u.row_id > 0 AND u.row_id <= 10000;
```
Output example:
```postgresql
-[ RECORD 1 ]----+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
id | 34
path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt
current_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisit.txt
new_filename | loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt
new_path | public/@hashed/loremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelitsedvulputatemisitloremipsumdolorsitametconsecteturadipiscingelitseddoeiusmodtemporincididuntutlaboreetdoloremagnaaliquaauctorelits.txt
```
Where:
- `current_filename`: a file name that is currently more than 246 characters long.
- `new_filename`: a file name that has been truncated to 246 characters maximum.
- `new_path`: new path considering the `new_filename` (truncated).
After you validate the batch results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table.
1. Rename the files found in the `uploads` table from long file names to new truncated file names. The following query rolls back the update so you can check the results safely in a transaction wrapper:
```sql
CREATE TEMP TABLE uploads_with_long_filenames AS
SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id
FROM uploads AS u
WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246;
CREATE INDEX ON uploads_with_long_filenames(row_id);
BEGIN;
WITH updated_uploads AS (
UPDATE uploads
SET
path =
CONCAT(
COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''),
CONCAT(
LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
)
)
FROM
uploads_with_long_filenames AS updatable_uploads
WHERE
uploads.id = updatable_uploads.id
AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000
RETURNING uploads.*
)
SELECT id, path FROM updated_uploads;
ROLLBACK;
```
After you validate the batch update results, you must change the batch size (`row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table.
1. Validate that the new file names from the previous query are the expected ones. If you are sure you want to truncate the records found in the previous step to 246 characters, run the following:
WARNING:
The following action is **irreversible**.
```sql
CREATE TEMP TABLE uploads_with_long_filenames AS
SELECT ROW_NUMBER() OVER(ORDER BY id) row_id, path, id
FROM uploads AS u
WHERE LENGTH((regexp_match(u.path, '[^\\/:*?"<>|\r\n]+$'))[1]) > 246;
CREATE INDEX ON uploads_with_long_filenames(row_id);
UPDATE uploads
SET
path =
CONCAT(
COALESCE((regexp_match(updatable_uploads.path, '(.*\/).*'))[1], ''),
CONCAT(
LEFT(SPLIT_PART((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1], '.', 1), 242),
COALESCE(SUBSTRING((regexp_match(updatable_uploads.path, '[^\\/:*?"<>|\r\n]+$'))[1] FROM '\.(?:.(?!\.))+$'))
)
)
FROM
uploads_with_long_filenames AS updatable_uploads
WHERE
uploads.id = updatable_uploads.id
AND updatable_uploads.row_id > 0 AND updatable_uploads.row_id <= 10000;
```
After you finish the batch update, you must change the batch size (`updatable_uploads.row_id`) using the following sequence of numbers (10000 to 20000). Repeat this process until you reach the last record in the `uploads` table.
Truncate the file names in the references found:
1. Check if those records are referenced somewhere. One way to do this is to dump the database and search for the parent directory name and file name:
1. To dump your database, you can use the following command as an example:
```shell
pg_dump -h /var/opt/gitlab/postgresql/ -d gitlabhq_production > gitlab-dump.tmp
```
1. Then you can search for the references using the `grep` command. Combining the parent directory and the file name can be a good idea. For example:
```shell
grep public/alongfilenamehere.txt gitlab-dump.tmp
```
1. Replace those long file names using the new file names obtained from querying the `uploads` table.
Truncate the file names on the file system. You must manually rename the files in your file system to the new file names obtained from querying the `uploads` table.
### Re-run the backup task
After following all the previous steps, re-run the backup task.
## Restoring database backup fails when `pg_stat_statements` was previously enabled
The GitLab backup of the PostgreSQL database includes all SQL statements required to enable extensions that were
previously enabled in the database.
The `pg_stat_statements` extension can only be enabled or disabled by a PostgreSQL user with `superuser` role.
As the restore process uses a database user with limited permissions, it can't execute the following SQL statements:
```sql
DROP EXTENSION IF EXISTS pg_stat_statements;
CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public;
```
When trying to restore the backup in a PostgreSQL instance that doesn't have the `pg_stats_statements` extension,
the following error message is displayed:
```plaintext
ERROR: permission denied to create extension "pg_stat_statements"
HINT: Must be superuser to create this extension.
ERROR: extension "pg_stat_statements" does not exist
```
When trying to restore in an instance that has the `pg_stats_statements` extension enabled, the cleaning up step
fails with an error message similar to the following:
```plaintext
rake aborted!
ActiveRecord::StatementInvalid: PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/bin/bundle:23:in `load'
/opt/gitlab/embedded/bin/bundle:23:in `<main>'
Caused by:
PG::InsufficientPrivilege: ERROR: must be owner of view pg_stat_statements
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:42:in `block (4 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `each'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/db.rake:41:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/service/gitlab-rails/lib/tasks/gitlab/backup.rake:71:in `block (3 levels) in <top (required)>'
/opt/gitlab/embedded/bin/bundle:23:in `load'
/opt/gitlab/embedded/bin/bundle:23:in `<main>'
Tasks: TOP => gitlab:db:drop_tables
(See full trace by running task with --trace)
```
### Prevent the dump file to include `pg_stat_statements`
To prevent the inclusion of the extension in the PostgreSQL dump file that is part of the backup bundle,
enable the extension in any schema except the `public` schema:
```sql
CREATE SCHEMA adm;
CREATE EXTENSION pg_stat_statements SCHEMA adm;
```
If the extension was previously enabled in the `public` schema, move it to a new one:
```sql
CREATE SCHEMA adm;
ALTER EXTENSION pg_stat_statements SET SCHEMA adm;
```
To query the `pg_stat_statements` data after changing the schema, prefix the view name with the new schema:
```sql
SELECT * FROM adm.pg_stat_statements limit 0;
```
To make it compatible with third-party monitoring solutions that expect it to be enabled in the `public` schema,
you need to include it in the `search_path`:
```sql
set search_path to public,adm;
```
### Fix an existing dump file to remove references to `pg_stat_statements`
To fix an existing backup file, do the following changes:
1. Extract from the backup the following file: `db/database.sql.gz`.
1. Decompress the file or use an editor that is capable of handling it compressed.
1. Remove the following lines, or similar ones:
```sql
CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public;
```
```sql
COMMENT ON EXTENSION pg_stat_statements IS 'track planning and execution statistics of all SQL statements executed';
```
1. Save the changes and recompress the file.
1. Update the backup file with the modified `db/database.sql.gz`.

View File

@ -215,7 +215,7 @@ secrets file (`gitlab-secrets.json`).
Automatic resolution is not yet implemented. If you have values that
cannot be decrypted, you can follow steps to reset them, see our
documentation on what to do [when the secrets file is lost](../../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost).
documentation on what to do [when the secrets file is lost](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost).
This can take a very long time, depending on the size of your
database, as it checks all rows in all tables.

View File

@ -644,7 +644,7 @@ Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `external_wiki_url` | string | true | The URL of the external wiki. |
| `external_wiki_url` | string | true | URL of the external wiki. |
### Disable an external wiki

View File

@ -187,11 +187,12 @@ The response is `404 Not Found` if the vulnerability export is not finished yet
Example response:
```csv
Group Name,Project Name,Tool,Scanner Name,Status,Vulnerability,Details,Additional Info,Severity,CVE,CWE,Other Identifiers,Detected At,Location,Activity,Comments,Full Path
Gitlab.org,Defend,container_scanning,Trivy,resolved,CVE-2019-14697 in musl-utils-1.1.20-r4,"musl libc through 1.1.23 has an x87 floating-point stack adjustment imbalance, related to the math/i386/ directory. In some cases, use of this library could introduce out-of-bounds writes that are not present in an application's source code.",CVE-2019-14697 in musl-utils-1.1.20-r4,critical,CVE-2019-14697,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl-utils""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"2022-10-07 13:41:08 UTC|root|resolved|changed vulnerability status to resolved",group/project/1
Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2019-19242 in sqlite-libs-3.26.0-r3,"SQLite 3.30.1 mishandles pExpr->y.pTab, as demonstrated by the TK_COLUMN case in sqlite3ExprCodeTarget in expr.c.",CVE-2019-19242 in sqlite-libs-3.26.0-r3,medium,CVE-2019-19242,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""sqlite-libs""}, ""version""=>""3.26.0-r3""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/2
Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2020-28928 in musl-1.1.20-r4,"In musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow).",CVE-2020-28928 in musl-1.1.20-r4,medium,CVE-2020-28928,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/3
Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,Carefully crafted requests can cause shell escape sequences to be written to the terminal via Rack's Lint middleware and CommonLogger middleware. These escape sequences can be leveraged to possibly execute commands in the victim's terminal.,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,unknown,Gemfile.lock:rack:gemnasium:60b5a27f-4e4d-4ab4-8ae7-74b4b212e177,,Gemnasium-60b5a27f-4e4d-4ab4-8ae7-74b4b212e177; GHSA-wq4h-7r42-5hrr,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,"",group/project/4
Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Denial of Service Vulnerability in Rack Multipart Parsing in rack,"Carefully crafted multipart POST requests can cause Rack's multipart parser to take much longer than expected, leading to a possible denial of service vulnerability. Impacted code will use Rack's multipart parser to parse multipart posts.",Denial of Service Vulnerability in Rack Multipart Parsing in rack,unknown,Gemfile.lock:rack:gemnasium:20daa17a-47b5-4f79-80c2-cd8f2db9805c,,Gemnasium-20daa17a-47b5-4f79-80c2-cd8f2db9805c; GHSA-hxqx-xwvh-44m2,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,"",group/project/5
Gitlab.org,Defend,sast,Brakeman,detected,Possible SQL injection,,Possible SQL injection,medium,e52f23a259cd489168b4313317ac94a3f13bffde57b9635171c1a44a9f329e9a,,"""Brakeman Warning Code 0""",2022-10-13 15:16:36 UTC,"{""file""=>""main.rb"", ""class""=>""User"", ""method""=>""index"", ""start_line""=>3}",false,"",group/project/6
Group Name,Project Name,Tool,Scanner Name,Status,Vulnerability,Details,Additional Info,Severity,CVE,CWE,Other Identifiers,Detected At,Location,Activity,Comments,Full Path,CVSS Vectors,Dismissal Reason
Gitlab.org,Defend,container_scanning,Trivy,resolved,CVE-2019-14697 in musl-utils-1.1.20-r4,"musl libc through 1.1.23 has an x87 floating-point stack adjustment imbalance, related to the math/i386/ directory. In some cases, use of this library could introduce out-of-bounds writes that are not present in an application's source code.",CVE-2019-14697 in musl-utils-1.1.20-r4,critical,CVE-2019-14697,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl-utils""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"2022-10-07 13:41:08 UTC|root|resolved|changed vulnerability status to resolved",group/project/1,,,
Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2019-19242 in sqlite-libs-3.26.0-r3,"SQLite 3.30.1 mishandles pExpr->y.pTab, as demonstrated by the TK_COLUMN case in sqlite3ExprCodeTarget in expr.c.",CVE-2019-19242 in sqlite-libs-3.26.0-r3,medium,CVE-2019-19242,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""sqlite-libs""}, ""version""=>""3.26.0-r3""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/2,,,
Gitlab.org,Defend,container_scanning,Trivy,detected,CVE-2020-28928 in musl-1.1.20-r4,"In musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow).",CVE-2020-28928 in musl-1.1.20-r4,medium,CVE-2020-28928,,"",2022-10-07 13:34:41 UTC,"{""image""=>""python:3.4-alpine"", ""dependency""=>{""package""=>{""name""=>""musl""}, ""version""=>""1.1.20-r4""}, ""operating_system""=>""alpine 3.9.2""}",true,"",group/project/3,,,
Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,Carefully crafted requests can cause shell escape sequences to be written to the terminal via Rack's Lint middleware and CommonLogger middleware. These escape sequences can be leveraged to possibly execute commands in the victim's terminal.,Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection') in rack,unknown,Gemfile.lock:rack:gemnasium:60b5a27f-4e4d-4ab4-8ae7-74b4b212e177,,Gemnasium-60b5a27f-4e4d-4ab4-8ae7-74b4b212e177; GHSA-wq4h-7r42-5hrr,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,group/project/4,,,
Gitlab.org,Defend,dependency_scanning,Gemnasium,detected,Denial of Service Vulnerability in Rack Multipart Parsing in rack,"Carefully crafted multipart POST requests can cause Rack's multipart parser to take much longer than expected, leading to a possible denial of service vulnerability. Impacted code will use Rack's multipart parser to parse multipart posts.",Denial of Service Vulnerability in Rack Multipart Parsing in rack,unknown,Gemfile.lock:rack:gemnasium:20daa17a-47b5-4f79-80c2-cd8f2db9805c,,Gemnasium-20daa17a-47b5-4f79-80c2-cd8f2db9805c; GHSA-hxqx-xwvh-44m2,2022-10-14 13:16:00 UTC,"{""file""=>""Gemfile.lock"", ""dependency""=>{""package""=>{""name""=>""rack""}, ""version""=>""2.2.3""}}",false,group/project/5,,,
Gitlab.org,Defend,sast,Brakeman,detected,Possible SQL injection,,Possible SQL injection,medium,e52f23a259cd489168b4313317ac94a3f13bffde57b9635171c1a44a9f329e9a,,"""Brakeman Warning Code 0""",2022-10-13 15:16:36 UTC,"{""file""=>""main.rb"", ""class""=>""User"", ""method""=>""index"", ""start_line""=>3}",false,"",group/project/6,,,
Gitlab.org,Defend,sast,Semgrep,dismissed,Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection'),"SQL Injection is a critical vulnerability that can lead to data or system compromise...",,critical,,CWE-89,SCS0002,2023-12-28 10:48:34 UTC,"{""file""=>""WebGoat/App_Code/DB/SqliteDbProvider.cs"", ""start_line""=>274}",false,"2023-12-28 10:51:32 UTC|root|Dismissed|""changed vulnerability status to Dismissed: Not Applicable and the following comment: ""dismiss 5""",gitlab-org/defend/579,,Not applicable,
```

View File

@ -2,7 +2,6 @@
stage: Verify
group: Pipeline Execution
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
description: "Learn how to use GitLab CI/CD, the GitLab built-in Continuous Integration, Continuous Deployment, and Continuous Delivery toolset to build, test, and deploy your application."
---
# Get started with GitLab CI/CD **(FREE ALL)**

View File

@ -244,7 +244,7 @@ malicious code can compromise both masked and protected variables.
Variable values are encrypted using [`aes-256-cbc`](https://en.wikipedia.org/wiki/Advanced_Encryption_Standard)
and stored in the database. This data can only be read and decrypted with a
valid [secrets file](../../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost).
valid [secrets file](../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost).
### Mask a CI/CD variable

View File

@ -621,7 +621,7 @@ to back up the `gitlab.rb` file.
WARNING:
[Backing up the GitLab secrets file](../administration/backup_restore/backup_gitlab.md#storing-configuration-files) is required
to avoid [complicated steps](../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost) when recovering
to avoid [complicated steps](../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost) when recovering
GitLab from backup. The secrets file is stored at `/etc/gitlab/gitlab-secrets.json` inside the container, or
`$GITLAB_HOME/config/gitlab-secrets.json` [on the container host](#set-up-the-volumes-location).

View File

@ -1,6 +1,7 @@
---
stage: Service Management
group: Respond
description: Error tracking, incident management.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,7 @@
---
stage: Systems
group: Distribution
description: Support details.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,13 +1,13 @@
---
stage: none
group: unassigned
description: Runners, jobs, pipelines, variables.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Use CI/CD to build your application **(FREE ALL)**
Add your source code to a repository, create merge requests to check in
code, and use CI/CD to generate your application. Include packages in your app and output it to a variety of environments.
Use CI/CD to generate your application.
- [Getting started](../ci/index.md)
- [CI/CD YAML syntax reference](../ci/yaml/index.md)

View File

@ -1,6 +1,7 @@
---
stage: Create
group: Source Code
description: Common commands and workflows.
info: "To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments"
---

View File

@ -1,12 +1,13 @@
---
stage: none
group: unassigned
description: Repositories, merge requests, remote development.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---
# Manage your code **(FREE ALL)**
Store your source files in a repository and create merge requests. Write, debug, and compile code hosted on GitLab.
Store your source files in a repository and create merge requests. Write, debug, and collaborate on code.
- [Repositories](../user/project/repository/index.md)
- [Merge requests](../user/project/merge_requests/index.md)

View File

@ -1,6 +1,7 @@
---
stage: Plan
group: Project Management
description: Epics, issues, milestones, labels.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,7 @@
---
stage: none
group: unassigned
description: Environments, packages, review apps, GitLab Pages.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,7 @@
---
stage: none
group: unassigned
description: Users, groups, namespaces, SSH keys.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,7 @@
---
stage: Plan
group: Optimize
description: Instance, group, and project analytics.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,7 @@
---
stage: Secure
group: Static Analysis
description: Container, dependency, and vulnerability scans.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -205,6 +205,8 @@ To sort vulnerabilities by the date each vulnerability was detected, select the
## Export vulnerability details
> Added "Dismissal Reason" as a column in the CSV export [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/434076) in GitLab 16.8.
You can export details of the vulnerabilities listed in the Vulnerability Report. The export format
is CSV (comma separated values). All vulnerabilities are included because filters do not
apply to the export.
@ -229,6 +231,7 @@ Fields included are:
- Comments
- Full Path
- CVSS Vectors
- [Dismissal Reason](../vulnerabilities/index.md#vulnerability-dismissal-reasons)
NOTE:
Full details are available through our

View File

@ -1,6 +1,7 @@
---
stage: Deploy
group: Environments
description: Terraform and Kubernetes deployments.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -1,6 +1,7 @@
---
stage: Data Stores
group: Tenant Scale
description: Project visibility, search, badges, layout.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -208,4 +208,4 @@ Older versions of SSH may require you to remove `-E md5` from the command.
- [Troubleshooting](troubleshooting.md) for repository mirroring.
- Configure a [Pull Mirroring Interval](../../../../administration/instance_limits.md#pull-mirroring-interval)
- [Disable mirrors for a project](../../../../administration/settings/visibility_and_access_controls.md#enable-project-mirroring)
- [Secrets file and mirroring](../../../../administration/backup_restore/backup_gitlab.md#when-the-secrets-file-is-lost)
- [Secrets file and mirroring](../../../../administration/backup_restore/troubleshooting_backup_gitlab.md#when-the-secrets-file-is-lost)

View File

@ -1,6 +1,7 @@
---
stage: none
group: none
description: Version information.
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
---

View File

@ -268,14 +268,7 @@ module API
desc: 'Branches for which notifications are to be sent'
}
],
'external-wiki' => [
{
required: true,
name: :external_wiki_url,
type: String,
desc: 'The URL of the external wiki'
}
],
'external-wiki' => ::Integrations::ExternalWiki.api_fields,
'google-play' => ::Integrations::GooglePlay.api_fields,
'hangouts-chat' => [
{

View File

@ -33,21 +33,6 @@ module Gitlab
track_struct_event(tracker, category, action, label: label, property: property, value: value, contexts: contexts)
end
def database_event(category, action, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists
action = action.to_s
destination = Gitlab::Tracking::Destinations::DatabaseEventsSnowplow.new
contexts = [
Tracking::StandardContext.new(
namespace_id: namespace&.id,
plan_name: namespace&.actual_plan_name,
project_id: project&.id,
user_id: user&.id,
**extra).to_context, *context
]
track_struct_event(destination, category, action, label: label, property: property, value: value, contexts: contexts)
end
def definition(basename, category: nil, action: nil, label: nil, property: nil, value: nil, context: [], project: nil, user: nil, namespace: nil, **extra) # rubocop:disable Metrics/ParameterLists
definition = YAML.load_file(Rails.root.join("config/events/#{basename}.yml"))

View File

@ -1,52 +0,0 @@
# frozen_string_literal: true
module Gitlab
module Tracking
module Destinations
class DatabaseEventsSnowplow < Snowplow
extend ::Gitlab::Utils::Override
HOSTNAME = 'db-snowplow.trx.gitlab.net'
override :enabled?
# database events are only collected for SaaS instance
def enabled?
::Gitlab.dev_or_test_env? || ::Gitlab.com?
end
override :hostname
def hostname
return Gitlab::CurrentSettings.snowplow_database_collector_hostname || HOSTNAME if ::Gitlab.com?
'localhost:9091'
end
private
override :increment_failed_events_emissions
def increment_failed_events_emissions(value)
Gitlab::Metrics.counter(
:gitlab_db_events_snowplow_failed_events_total,
'Number of failed Snowplow events emissions'
).increment({}, value.to_i)
end
override :increment_successful_events_emissions
def increment_successful_events_emissions(value)
Gitlab::Metrics.counter(
:gitlab_db_events_snowplow_successful_events_total,
'Number of successful Snowplow events emissions'
).increment({}, value.to_i)
end
override :increment_total_events_counter
def increment_total_events_counter
Gitlab::Metrics.counter(
:gitlab_db_events_snowplow_events_total,
'Number of Snowplow events'
).increment
end
end
end
end
end

View File

@ -172,11 +172,6 @@ msgid_plural "%d authors"
msgstr[0] ""
msgstr[1] ""
msgid "%d candidate"
msgid_plural "%d candidates"
msgstr[0] ""
msgstr[1] ""
msgid "%d changed file"
msgid_plural "%d changed files"
msgstr[0] ""
@ -20343,6 +20338,9 @@ msgstr ""
msgid "ExternalWikiService|Link to an external wiki from the sidebar."
msgstr ""
msgid "ExternalWikiService|URL of the external wiki."
msgstr ""
msgid "ExternalWikiService|https://example.com/xxx/wiki/..."
msgstr ""
@ -54013,6 +54011,9 @@ msgstr ""
msgid "VulnerabilityExport|Detected At"
msgstr ""
msgid "VulnerabilityExport|Dismissal Reason"
msgstr ""
msgid "VulnerabilityExport|Full Path"
msgstr ""

View File

@ -3,7 +3,7 @@
source 'https://rubygems.org'
gem 'gitlab-qa', '~> 13', '>= 13.1.0', require: 'gitlab/qa'
gem 'gitlab_quality-test_tooling', '~> 1.9.0', require: false
gem 'gitlab_quality-test_tooling', '~> 1.10.0', require: false
gem 'gitlab-utils', path: '../gems/gitlab-utils'
gem 'activesupport', '~> 7.0.8' # This should stay in sync with the root's Gemfile
gem 'allure-rspec', '~> 2.23.0'

View File

@ -129,7 +129,7 @@ GEM
rainbow (>= 3, < 4)
table_print (= 1.5.7)
zeitwerk (>= 2, < 3)
gitlab_quality-test_tooling (1.9.0)
gitlab_quality-test_tooling (1.10.0)
activesupport (>= 6.1, < 7.2)
amatch (~> 0.4.1)
gitlab (~> 4.19)
@ -361,7 +361,7 @@ DEPENDENCIES
fog-google (~> 1.19)
gitlab-qa (~> 13, >= 13.1.0)
gitlab-utils!
gitlab_quality-test_tooling (~> 1.9.0)
gitlab_quality-test_tooling (~> 1.10.0)
influxdb-client (~> 3.0)
knapsack (~> 4.0)
nokogiri (~> 1.15, >= 1.15.5)

View File

@ -49,11 +49,11 @@ RSpec.describe Admin::ProjectsController do
it 'does not have N+1 queries', :use_clean_rails_memory_store_caching, :request_store do
get :index
control_count = ActiveRecord::QueryRecorder.new { get :index }.count
control = ActiveRecord::QueryRecorder.new { get :index }
create(:project)
expect { get :index }.not_to exceed_query_limit(control_count)
expect { get :index }.not_to exceed_query_limit(control)
end
end

View File

@ -89,11 +89,11 @@ RSpec.describe Admin::RunnersController, feature_category: :fleet_visibility do
it 'avoids N+1 queries', :request_store do
get :edit, params: { id: runner.id }
control_count = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } }.count
control = ActiveRecord::QueryRecorder.new { get :edit, params: { id: runner.id } }
# There is one additional query looking up subject.group in ProjectPolicy for the
# needs_new_sso_session permission
expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control_count + 1)
expect { get :edit, params: { id: runner.id } }.not_to exceed_query_limit(control).with_threshold(1)
expect(response).to have_gitlab_http_status(:ok)
end

View File

@ -470,7 +470,7 @@ RSpec.describe ApplicationController, feature_category: :shared do
enforce_terms
expect { get :index }.not_to exceed_query_limit(control.count).with_threshold(1)
expect { get :index }.not_to exceed_query_limit(control).with_threshold(1)
end
context 'when terms are enforced' do

View File

@ -46,15 +46,15 @@ RSpec.describe RendersCommits do
it 'avoids N + 1', :request_store do
stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 5)
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
go
end.count
end
stub_const("MergeRequestDiff::COMMITS_SAFE_SIZE", 15)
expect do
go
end.not_to exceed_all_query_limit(control_count)
end.not_to exceed_all_query_limit(control)
end
end
@ -73,7 +73,7 @@ RSpec.describe RendersCommits do
expect do
subject.prepare_commits_for_rendering(merge_request.commits)
merge_request.commits.each(&:latest_pipeline)
end.not_to exceed_all_query_limit(control.count)
end.not_to exceed_all_query_limit(control)
end
end
end

View File

@ -62,7 +62,9 @@ RSpec.describe Groups::LabelsController, feature_category: :team_planning do
create_list(:group_label, 3, group: group)
# some n+1 queries still exist
expect { get :index, params: { group_id: group.to_param } }.not_to exceed_all_query_limit(control.count).with_threshold(10)
expect do
get :index, params: { group_id: group.to_param }
end.not_to exceed_all_query_limit(control).with_threshold(10)
expect(assigns(:labels).count).to eq(4)
end
end

View File

@ -62,12 +62,12 @@ RSpec.describe Groups::ReleasesController do
context 'N+1 queries' do
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
control = ActiveRecord::QueryRecorder.new { subject }
create_list(:release, 5, project: project)
create_list(:release, 5, project: private_project)
expect { subject }.not_to exceed_query_limit(control_count)
expect { subject }.not_to exceed_query_limit(control)
end
end
end

View File

@ -987,11 +987,11 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
labels = create_list(:label, 10, project: project).map(&:to_reference)
issue = create(:issue, project: project, description: 'Test issue')
control_count = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) }.count
control = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) }
# Follow-up to get rid of this `2 * label.count` requirement: https://gitlab.com/gitlab-org/gitlab-foss/issues/52230
expect { issue.update!(description: [issue.description, labels].join(' ')) }
.not_to exceed_query_limit(control_count + 2 * labels.count)
.not_to exceed_query_limit(control).with_threshold(2 * labels.count)
end
it 'logs the view with Gitlab::Search::RecentIssues' do
@ -1849,15 +1849,17 @@ RSpec.describe Projects::IssuesController, :request_store, feature_category: :te
RequestStore.clear!
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
end.count
end
RequestStore.clear!
create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
expect { get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid } }.not_to exceed_query_limit(control_count)
expect do
get :discussions, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
end.not_to exceed_query_limit(control)
end
end

View File

@ -108,7 +108,7 @@ RSpec.describe Projects::LabelsController, feature_category: :team_planning do
# some n+1 queries still exist
# calls to get max project authorization access level
expect { list_labels }.not_to exceed_all_query_limit(control.count).with_threshold(25)
expect { list_labels }.not_to exceed_all_query_limit(control).with_threshold(25)
expect(assigns(:labels).count).to eq(10)
end
end

View File

@ -249,15 +249,15 @@ RSpec.describe Projects::NotesController, type: :controller, feature_category: :
RequestStore.clear!
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
get :index, params: request_params
end.count
end
RequestStore.clear!
create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
expect { get :index, params: request_params }.not_to exceed_query_limit(control_count)
expect { get :index, params: request_params }.not_to exceed_query_limit(control)
end
end
end

View File

@ -108,11 +108,11 @@ RSpec.describe Projects::PipelineSchedulesController, feature_category: :continu
end
it 'avoids N + 1 queries', :request_store do
control_count = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }.count
control = ActiveRecord::QueryRecorder.new { visit_pipelines_schedules }
create_list(:ci_pipeline_schedule, 2, project: project)
expect { visit_pipelines_schedules }.not_to exceed_query_limit(control_count)
expect { visit_pipelines_schedules }.not_to exceed_query_limit(control)
end
context 'when the scope is set to active' do

View File

@ -381,7 +381,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
# Set up all required variables
get_pipeline_json
control_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
control = ActiveRecord::QueryRecorder.new { get_pipeline_json }
first_build = pipeline.builds.first
first_build.tag_list << [:hello, :world]
@ -391,9 +391,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
second_build.tag_list << [:docker, :ruby]
create(:deployment, deployable: second_build)
new_count = ActiveRecord::QueryRecorder.new { get_pipeline_json }.count
expect(new_count).to be_within(1).of(control_count)
expect { get_pipeline_json }.not_to exceed_query_limit(control).with_threshold(1)
end
end
@ -1074,7 +1072,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
clear_controller_memoization
control_count = ActiveRecord::QueryRecorder.new { get_test_report_json }.count
control = ActiveRecord::QueryRecorder.new { get_test_report_json }
create(:ci_build, name: 'karma', pipeline: pipeline).tap do |build|
create(:ci_job_artifact, :junit, job: build)
@ -1082,7 +1080,7 @@ RSpec.describe Projects::PipelinesController, feature_category: :continuous_inte
clear_controller_memoization
expect { get_test_report_json }.not_to exceed_query_limit(control_count)
expect { get_test_report_json }.not_to exceed_query_limit(control)
end
end

View File

@ -40,11 +40,11 @@ RSpec.describe Projects::StarrersController do
it 'avoids N+1s loading users', :request_store do
get_starrers
control_count = ActiveRecord::QueryRecorder.new { get_starrers }.count
control = ActiveRecord::QueryRecorder.new { get_starrers }
create_list(:user, 5).each { |user| user.toggle_star(project) }
expect { get_starrers }.not_to exceed_query_limit(control_count)
expect { get_starrers }.not_to exceed_query_limit(control)
end
end

View File

@ -16,11 +16,11 @@ RSpec.describe 'issuable list', :js, feature_category: :team_planning do
issuable_types.each do |issuable_type|
it "avoids N+1 database queries for #{issuable_type.to_s.humanize.pluralize}", quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/231426' } do
control_count = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) }.count
control = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) }
create_issuables(issuable_type)
expect { visit_issuable_list(issuable_type) }.not_to exceed_query_limit(control_count)
expect { visit_issuable_list(issuable_type) }.not_to exceed_query_limit(control)
end
it "counts upvotes, downvotes and notes count for each #{issuable_type.to_s.humanize}" do

View File

@ -171,12 +171,12 @@ RSpec.describe 'Branches', feature_category: :source_code_management do
new_branches_count = 20
sql_queries_count_threshold = 10
control_count = ActiveRecord::QueryRecorder.new { visit project_branches_path(project) }.count
control = ActiveRecord::QueryRecorder.new { visit project_branches_path(project) }
(1..new_branches_count).each { |number| repository.add_branch(user, "new-branch-#{number}", 'master') }
expect { visit project_branches_filtered_path(project, state: 'all') }
.not_to exceed_query_limit(control_count).with_threshold(sql_queries_count_threshold)
.not_to exceed_query_limit(control).with_threshold(sql_queries_count_threshold)
end
end

View File

@ -41,11 +41,11 @@ RSpec.describe 'Developer views tags', feature_category: :source_code_management
end
it 'avoids a N+1 query in branches index' do
control_count = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) }.count
control = ActiveRecord::QueryRecorder.new { visit project_tags_path(project) }
%w[one two three four five].each { |tag| repository.add_tag(user, tag, 'master', 'foo') }
expect { visit project_tags_path(project) }.not_to exceed_query_limit(control_count)
expect { visit project_tags_path(project) }.not_to exceed_query_limit(control)
end
it 'views the tags list page' do

View File

@ -343,14 +343,14 @@ RSpec.describe DeploymentsFinder, feature_category: :deployment_management do
it 'avoids N+1 queries' do
execute_queries = -> { described_class.new({ group: group }).execute.first }
control_count = ActiveRecord::QueryRecorder.new { execute_queries }.count
control = ActiveRecord::QueryRecorder.new { execute_queries }
new_project = create(:project, :repository, group: group)
new_env = create(:environment, project: new_project, name: "production")
create_list(:deployment, 2, status: :success, project: new_project, environment: new_env)
group.reload
expect { execute_queries }.not_to exceed_query_limit(control_count)
expect { execute_queries }.not_to exceed_query_limit(control)
end
end
end

View File

@ -166,12 +166,12 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do
# warm up
# We need this warm up because there is 1 query being fired in one of the policies,
# and policy results are cached. Without a warm up, the control_count will be X queries
# and policy results are cached. Without a warm up, the control.count will be X queries
# but the test phase will only fire X-1 queries, due the fact that the
# result of the policy is already available in the cache.
described_class.new(project, user2).execute.map(&:user)
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
described_class.new(project, user2).execute.map(&:user)
end
@ -179,7 +179,7 @@ RSpec.describe MembersFinder, feature_category: :groups_and_projects do
expect do
described_class.new(project, user2).execute.map(&:user)
end.to issue_same_number_of_queries_as(control_count)
end.to issue_same_number_of_queries_as(control)
end
context 'with :shared_into_ancestors' do

View File

@ -168,9 +168,9 @@ RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_proj
let(:params) { query_params }
it 'subgroups avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
releases
end.count
end
subgroups = create_list(:group, 10, parent: group)
projects = create_list(:project, 10, namespace: subgroups[0])
@ -178,7 +178,7 @@ RSpec.describe Releases::GroupReleasesFinder, feature_category: :groups_and_proj
expect do
releases
end.not_to exceed_all_query_limit(control_count)
end.not_to exceed_all_query_limit(control)
end
end
end

View File

@ -49,8 +49,8 @@ RSpec.describe ResourceMilestoneEventFinder do
milestone1 = create(:milestone, project: issue_project)
milestone2 = create(:milestone, project: issue_project)
control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, issue).execute }.count
expect(control_count).to eq(1) # 1 events query
control = ActiveRecord::QueryRecorder.new { described_class.new(user, issue).execute }
expect(control.count).to eq(1) # 1 events query
create_event(milestone1, :add)
create_event(milestone1, :remove)
@ -60,7 +60,7 @@ RSpec.describe ResourceMilestoneEventFinder do
create_event(milestone2, :remove)
# 1 milestones + 1 project + 1 user + 4 ability
expect { described_class.new(user, issue).execute }.not_to exceed_query_limit(control_count + 6)
expect { described_class.new(user, issue).execute }.not_to exceed_query_limit(control).with_threshold(6)
end
end

View File

@ -1,13 +1,11 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { mount } from '@vue/test-utils';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import CandidateList from '~/ml/model_registry/components/candidate_list.vue';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
import CandidateListRow from '~/ml/model_registry/components/candidate_list_row.vue';
import getModelCandidatesQuery from '~/ml/model_registry/graphql/queries/get_model_candidates.query.graphql';
import { GRAPHQL_PAGE_SIZE } from '~/ml/model_registry/constants';
@ -24,10 +22,7 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
let wrapper;
let apolloProvider;
const findAlert = () => wrapper.findComponent(GlAlert);
const findLoader = () => wrapper.findComponent(PackagesListLoader);
const findRegistryList = () => wrapper.findComponent(RegistryList);
const findListRow = () => wrapper.findComponent(CandidateListRow);
const findSearchableList = () => wrapper.findComponent(SearchableList);
const findAllRows = () => wrapper.findAllComponents(CandidateListRow);
const mountComponent = ({
@ -37,15 +32,12 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
const requestHandlers = [[getModelCandidatesQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMount(CandidateList, {
wrapper = mount(CandidateList, {
apolloProvider,
propsData: {
modelId: 2,
...props,
},
stubs: {
RegistryList,
},
});
};
@ -60,25 +52,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
await waitForPromises();
});
it('displays empty slot message', () => {
it('shows empty state', () => {
expect(wrapper.text()).toContain('This model has no candidates');
});
it('does not display loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('does not display rows', () => {
expect(findListRow().exists()).toBe(false);
});
it('does not display registry list', () => {
expect(findRegistryList().exists()).toBe(false);
});
it('does not display alert', () => {
expect(findAlert().exists()).toBe(false);
});
});
describe('if load fails, alert', () => {
@ -90,19 +66,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
});
it('is displayed', () => {
expect(findAlert().exists()).toBe(true);
});
it('shows error message', () => {
expect(findAlert().text()).toContain('Failed to load model candidates with error: Failure!');
});
it('is not dismissible', () => {
expect(findAlert().props('dismissible')).toBe(false);
});
it('is of variant danger', () => {
expect(findAlert().attributes('variant')).toBe('danger');
expect(findSearchableList().props('errorMessage')).toBe(
'Failed to load model candidates with error: Failure!',
);
});
it('error is logged in sentry', () => {
@ -116,21 +82,11 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
await waitForPromises();
});
it('displays package registry list', () => {
expect(findRegistryList().exists()).toEqual(true);
it('Passes items to list', () => {
expect(findSearchableList().props('items')).toEqual(graphqlCandidates);
});
it('binds the right props', () => {
expect(findRegistryList().props()).toMatchObject({
items: graphqlCandidates,
pagination: {},
isLoading: false,
hiddenDelete: true,
});
});
it('displays candidate rows', () => {
expect(findAllRows().exists()).toEqual(true);
it('displays package version rows', () => {
expect(findAllRows()).toHaveLength(graphqlCandidates.length);
});
@ -143,17 +99,9 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
candidate: expect.objectContaining(graphqlCandidates[1]),
});
});
it('does not display loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('does not display empty message', () => {
expect(findAlert().exists()).toBe(false);
});
});
describe('when user interacts with pagination', () => {
describe('when list requests update', () => {
const resolver = jest.fn().mockResolvedValue(modelCandidatesQuery());
beforeEach(async () => {
@ -161,22 +109,18 @@ describe('ml/model_registry/components/candidate_list.vue', () => {
await waitForPromises();
});
it('when list emits next-page fetches the next set of records', async () => {
findRegistryList().vm.$emit('next-page');
it('when list emits fetch-page fetches the next set of records', async () => {
findSearchableList().vm.$emit('fetch-page', {
after: 'eyJpZCI6IjIifQ',
first: 30,
id: 'gid://gitlab/Ml::Model/2',
});
await waitForPromises();
expect(resolver).toHaveBeenLastCalledWith(
expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
);
});
it('when list emits prev-page fetches the prev set of records', async () => {
findRegistryList().vm.$emit('prev-page');
await waitForPromises();
expect(resolver).toHaveBeenLastCalledWith(
expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }),
);
});
});
});

View File

@ -1,13 +1,11 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlAlert } from '@gitlab/ui';
import * as Sentry from '~/sentry/sentry_browser_wrapper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ModelVersionList from '~/ml/model_registry/components/model_version_list.vue';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
import ModelVersionRow from '~/ml/model_registry/components/model_version_row.vue';
import getModelVersionsQuery from '~/ml/model_registry/graphql/queries/get_model_versions.query.graphql';
import EmptyState from '~/ml/model_registry/components/empty_state.vue';
@ -25,11 +23,8 @@ describe('ModelVersionList', () => {
let wrapper;
let apolloProvider;
const findAlert = () => wrapper.findComponent(GlAlert);
const findLoader = () => wrapper.findComponent(PackagesListLoader);
const findRegistryList = () => wrapper.findComponent(RegistryList);
const findSearchableList = () => wrapper.findComponent(SearchableList);
const findEmptyState = () => wrapper.findComponent(EmptyState);
const findListRow = () => wrapper.findComponent(ModelVersionRow);
const findAllRows = () => wrapper.findAllComponents(ModelVersionRow);
const mountComponent = ({
@ -39,15 +34,12 @@ describe('ModelVersionList', () => {
const requestHandlers = [[getModelVersionsQuery, resolver]];
apolloProvider = createMockApollo(requestHandlers);
wrapper = shallowMountExtended(ModelVersionList, {
wrapper = mountExtended(ModelVersionList, {
apolloProvider,
propsData: {
modelId: 2,
...props,
},
stubs: {
RegistryList,
},
});
};
@ -65,22 +57,6 @@ describe('ModelVersionList', () => {
it('shows empty state', () => {
expect(findEmptyState().props('entityType')).toBe(MODEL_ENTITIES.modelVersion);
});
it('does not display loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('does not display rows', () => {
expect(findListRow().exists()).toBe(false);
});
it('does not display registry list', () => {
expect(findRegistryList().exists()).toBe(false);
});
it('does not display alert', () => {
expect(findAlert().exists()).toBe(false);
});
});
describe('if load fails, alert', () => {
@ -92,19 +68,9 @@ describe('ModelVersionList', () => {
});
it('is displayed', () => {
expect(findAlert().exists()).toBe(true);
});
it('shows error message', () => {
expect(findAlert().text()).toContain('Failed to load model versions with error: Failure!');
});
it('is not dismissible', () => {
expect(findAlert().props('dismissible')).toBe(false);
});
it('is of variant danger', () => {
expect(findAlert().attributes('variant')).toBe('danger');
expect(findSearchableList().props('errorMessage')).toBe(
'Failed to load model versions with error: Failure!',
);
});
it('error is logged in sentry', () => {
@ -118,21 +84,11 @@ describe('ModelVersionList', () => {
await waitForPromises();
});
it('displays package registry list', () => {
expect(findRegistryList().exists()).toEqual(true);
});
it('binds the right props', () => {
expect(findRegistryList().props()).toMatchObject({
items: graphqlModelVersions,
pagination: {},
isLoading: false,
hiddenDelete: true,
});
it('Passes items to list', () => {
expect(findSearchableList().props('items')).toEqual(graphqlModelVersions);
});
it('displays package version rows', () => {
expect(findAllRows().exists()).toEqual(true);
expect(findAllRows()).toHaveLength(graphqlModelVersions.length);
});
@ -145,17 +101,9 @@ describe('ModelVersionList', () => {
modelVersion: expect.objectContaining(graphqlModelVersions[1]),
});
});
it('does not display loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('does not display empty state', () => {
expect(findEmptyState().exists()).toBe(false);
});
});
describe('when user interacts with pagination', () => {
describe('when list requests update', () => {
const resolver = jest.fn().mockResolvedValue(modelVersionsQuery());
beforeEach(async () => {
@ -163,22 +111,18 @@ describe('ModelVersionList', () => {
await waitForPromises();
});
it('when list emits next-page fetches the next set of records', async () => {
findRegistryList().vm.$emit('next-page');
it('when list emits fetch-page fetches the next set of records', async () => {
findSearchableList().vm.$emit('fetch-page', {
after: 'eyJpZCI6IjIifQ',
first: 30,
id: 'gid://gitlab/Ml::Model/2',
});
await waitForPromises();
expect(resolver).toHaveBeenLastCalledWith(
expect.objectContaining({ after: graphqlPageInfo.endCursor, first: GRAPHQL_PAGE_SIZE }),
);
});
it('when list emits prev-page fetches the prev set of records', async () => {
findRegistryList().vm.$emit('prev-page');
await waitForPromises();
expect(resolver).toHaveBeenLastCalledWith(
expect.objectContaining({ before: graphqlPageInfo.startCursor, last: GRAPHQL_PAGE_SIZE }),
);
});
});
});

View File

@ -0,0 +1,170 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import SearchableList from '~/ml/model_registry/components/searchable_list.vue';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import { defaultPageInfo } from '../mock_data';
describe('ml/model_registry/components/searchable_list.vue', () => {
let wrapper;
const findAlert = () => wrapper.findComponent(GlAlert);
const findLoader = () => wrapper.findComponent(PackagesListLoader);
const findRegistryList = () => wrapper.findComponent(RegistryList);
const findEmptyState = () => wrapper.findByTestId('empty-state-slot');
const findFirstRow = () => wrapper.findByTestId('element');
const findRows = () => wrapper.findAllByTestId('element');
const defaultProps = {
items: ['a', 'b', 'c'],
pageInfo: defaultPageInfo,
isLoading: false,
errorMessage: '',
};
const mountComponent = (props = {}) => {
wrapper = shallowMountExtended(SearchableList, {
propsData: {
...defaultProps,
...props,
},
stubs: {
RegistryList,
},
slots: {
'empty-state': '<div data-testid="empty-state-slot">This is empty</div>',
item: '<div data-testid="element"></div>',
},
});
};
describe('when list is loaded and has no data', () => {
beforeEach(() => mountComponent({ items: [] }));
it('shows empty state', () => {
expect(findEmptyState().text()).toBe('This is empty');
});
it('does not display loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('does not display rows', () => {
expect(findFirstRow().exists()).toBe(false);
});
it('does not display registry list', () => {
expect(findRegistryList().exists()).toBe(false);
});
it('does not display alert', () => {
expect(findAlert().exists()).toBe(false);
});
});
describe('if errorMessage', () => {
beforeEach(() => mountComponent({ errorMessage: 'Failure!' }));
it('shows error message', () => {
expect(findAlert().text()).toContain('Failure!');
});
it('is not dismissible', () => {
expect(findAlert().props('dismissible')).toBe(false);
});
it('is of variant danger', () => {
expect(findAlert().attributes('variant')).toBe('danger');
});
it('hides loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('hides registry list', () => {
expect(findRegistryList().exists()).toBe(false);
});
it('hides empty state', () => {
expect(findEmptyState().exists()).toBe(false);
});
});
describe('if loading', () => {
beforeEach(() => mountComponent({ isLoading: true }));
it('shows loader', () => {
expect(findLoader().exists()).toBe(true);
});
it('hides error message', () => {
expect(findAlert().exists()).toBe(false);
});
it('hides registry list', () => {
expect(findRegistryList().exists()).toBe(false);
});
it('hides empty state', () => {
expect(findEmptyState().exists()).toBe(false);
});
});
describe('when list is loaded with data', () => {
beforeEach(() => mountComponent());
it('displays package registry list', () => {
expect(findRegistryList().exists()).toEqual(true);
});
it('binds the right props', () => {
expect(findRegistryList().props()).toMatchObject({
items: ['a', 'b', 'c'],
isLoading: false,
pagination: defaultPageInfo,
hiddenDelete: true,
});
});
it('displays package version rows', () => {
expect(findRows().exists()).toEqual(true);
expect(findRows()).toHaveLength(3);
});
it('does not display loader', () => {
expect(findLoader().exists()).toBe(false);
});
it('does not display empty state', () => {
expect(findEmptyState().exists()).toBe(false);
});
});
describe('when user interacts with pagination', () => {
beforeEach(() => mountComponent());
it('when list emits next-page emits fetchPage with correct pageInfo', () => {
findRegistryList().vm.$emit('next-page');
const expectedNewPageInfo = {
after: 'eyJpZCI6IjIifQ',
first: 30,
last: null,
};
expect(wrapper.emitted('fetch-page')).toEqual([[expectedNewPageInfo]]);
});
it('when list emits prev-page emits fetchPage with correct pageInfo', () => {
findRegistryList().vm.$emit('prev-page');
const expectedNewPageInfo = {
before: 'eyJpZCI6IjE2In0',
first: null,
last: 30,
};
expect(wrapper.emitted('fetch-page')).toEqual([[expectedNewPageInfo]]);
});
});
});

View File

@ -43,15 +43,15 @@ RSpec.describe Resolvers::DesignManagement::VersionsResolver do
context 'loading associations' do
it 'prevents N+1 queries when loading author' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
resolve_versions(object).items.map(&:author)
end.count
end
create_list(:design_version, 3, issue: issue)
expect do
resolve_versions(object).items.map(&:author)
end.not_to exceed_all_query_limit(control_count)
end.not_to exceed_all_query_limit(control)
end
end
end

View File

@ -78,7 +78,7 @@ RSpec.describe Resolvers::GroupLabelsResolver do
Gitlab::SafeRequestStore.ensure_request_store do
resolve_labels(group, params).to_a
end
end.not_to exceed_query_limit(control.count)
end.not_to exceed_query_limit(control)
end
end

View File

@ -78,7 +78,7 @@ RSpec.describe Resolvers::LabelsResolver do
Gitlab::SafeRequestStore.ensure_request_store do
resolve_labels(project, params).to_a
end
end.not_to exceed_query_limit(control.count)
end.not_to exceed_query_limit(control)
end
end

View File

@ -159,17 +159,17 @@ RSpec.describe GitlabSchema.types['CurrentUserTodos'] do
# AND ("todos"."state" IN ('done','pending'))
# AND "todos"."target_id" = merge_request
# AND "todos"."target_type" = 'MergeRequest' ORDER BY "todos"."id" DESC
baseline = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
execute_query(query_type, graphql: base_query)
end
expect do
execute_query(query_type, graphql: query_without_state_arguments)
end.not_to exceed_query_limit(baseline) # at present this is 3
end.not_to exceed_query_limit(control) # at present this is 3
expect do
execute_query(query_type, graphql: with_state_arguments)
end.not_to exceed_query_limit(baseline.count + 1)
end.not_to exceed_query_limit(control).with_threshold(1)
end
it 'returns correct data' do

View File

@ -114,13 +114,13 @@ RSpec.describe GroupsHelper, feature_category: :groups_and_projects do
end
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
helper.group_title(nested_group)
end
expect do
helper.group_title(very_deep_nested_group)
end.not_to exceed_query_limit(control_count)
end.not_to exceed_query_limit(control)
end
end

View File

@ -656,12 +656,12 @@ RSpec.describe SearchHelper, feature_category: :global_search do
@project = create(:project)
description = FFaker::Lorem.characters(210)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { search_md_sanitize(description) }.count
control = ActiveRecord::QueryRecorder.new(skip_cached: false) { search_md_sanitize(description) }
issues = create_list(:issue, 4, project: @project)
description_with_issues = description + ' ' + issues.map { |issue| "##{issue.iid}" }.join(' ')
expect { search_md_sanitize(description_with_issues) }.not_to exceed_all_query_limit(control_count)
expect { search_md_sanitize(description_with_issues) }.not_to exceed_all_query_limit(control)
end
end

View File

@ -433,16 +433,16 @@ RSpec.describe Atlassian::JiraConnect::Client, feature_category: :integrations d
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
subject.send(:store_dev_info, project: project, merge_requests: merge_requests)
end.count
end
merge_requests << create(:merge_request, :unique_branches, source_project: project)
expect do
subject.send(:store_dev_info, project: project,
merge_requests: merge_requests)
end.not_to exceed_query_limit(control_count)
end.not_to exceed_query_limit(control)
end
end

View File

@ -24,9 +24,9 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
subject { described_class.represent(merge_requests, user_notes_count: user_notes_count).as_json }
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
described_class.represent(merge_requests, user_notes_count: user_notes_count)
end.count
end
merge_requests << create(:merge_request, :unique_branches)
@ -35,7 +35,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity, feature_c
records: merge_requests, associations: { merge_request_reviewers: :reviewer }
).call
expect { subject }.not_to exceed_query_limit(control_count)
expect { subject }.not_to exceed_query_limit(control)
end
it 'uses counts from user_notes_count' do

View File

@ -68,20 +68,20 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
end
it 'avoids N+1 database queries' do
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
subject.dump(destination, backup_id)
end.count
end
create_list(:project, 2, :repository)
create_list(:snippet, 2, :repository)
# Number of expected queries are 2 more than control_count
# Number of expected queries are 2 more than control.count
# to account for the queries for project.design_management_repository
# for each project.
# We are using 2 projects here.
expect do
subject.dump(destination, backup_id)
end.not_to exceed_query_limit(control_count + 2)
end.not_to exceed_query_limit(control).with_threshold(2)
end
describe 'storages' do

View File

@ -47,13 +47,13 @@ RSpec.describe Banzai::Filter::CustomEmojiFilter, feature_category: :team_planni
it 'does not do N+1 query' do
create(:custom_emoji, name: 'party-parrot', group: group)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter('<p>:tanuki:</p>')
end
expect do
filter('<p>:tanuki:</p> <p>:party-parrot:</p>')
end.not_to exceed_all_query_limit(control_count.count)
end.not_to exceed_all_query_limit(control)
end
it 'uses custom emoji from ancestor group' do

View File

@ -259,15 +259,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
# warm up
filter(link, context)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter(link, context)
end.count
end
expect(control_count).to eq 12
expect(control.count).to eq 12
expect do
filter("#{link} #{link2}", context)
end.not_to exceed_all_query_limit(control_count)
end.not_to exceed_all_query_limit(control)
end
end
end
@ -419,15 +419,15 @@ RSpec.describe Banzai::Filter::IssuableReferenceExpansionFilter, feature_categor
# warm up
filter(link, context)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
filter(link, context)
end.count
end
expect(control_count).to eq 10
expect(control.count).to eq 10
expect do
filter("#{link} #{link2}", context)
end.not_to exceed_all_query_limit(control_count)
end.not_to exceed_all_query_limit(control)
end
end
end

View File

@ -230,11 +230,11 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = alert_reference.to_s
max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
end.count
end
expect(max_count).to eq 1
expect(control.count).to eq 1
markdown = "#{alert_reference} ^alert#2 ^alert#3 ^alert#4 #{alert2_reference}"
@ -248,11 +248,9 @@ RSpec.describe Banzai::Filter::References::AlertReferenceFilter, feature_categor
# 1x2 for alerts in each project
# Total == 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
max_count += 6
expect do
reference_filter(markdown)
end.not_to exceed_all_query_limit(max_count)
end.not_to exceed_all_query_limit(control).with_threshold(6)
end
end
end

View File

@ -283,11 +283,11 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = commit_reference.to_s
max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
end.count
end
expect(max_count).to eq 0
expect(control.count).to eq 0
markdown = "#{commit_reference} 8b95f2f1 8b95f2f2 8b95f2f3 #{commit2_reference} #{commit3_reference}"
@ -298,11 +298,9 @@ RSpec.describe Banzai::Filter::References::CommitReferenceFilter, feature_catego
# 1 for loading the namespaces associated to the project
# 1 for loading the routes associated with the namespace
# Total = 5
max_count += 5
expect do
reference_filter(markdown)
end.not_to exceed_all_query_limit(max_count)
end.not_to exceed_all_query_limit(control).with_threshold(5)
end
end
end

View File

@ -240,7 +240,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
* #1[not a valid reference.gif]
MD
baseline = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
control = ActiveRecord::QueryRecorder.new { process(one_ref_per_project) }
# each project mentioned requires 2 queries:
#
@ -253,7 +253,7 @@ RSpec.describe Banzai::Filter::References::DesignReferenceFilter, feature_catego
# In addition there is a 1 query overhead for all the projects at the
# start. Currently, the baseline for 2 projects is `2 * 2 + 1 = 5` queries
#
expect { process(multiple_references) }.not_to exceed_query_limit(baseline.count)
expect { process(multiple_references) }.not_to exceed_query_limit(control)
end
end

View File

@ -338,9 +338,9 @@ RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter, feature
single_reference = "External Issue #{issue1.to_reference}"
multiple_references = "External Issues #{issue1.to_reference} and #{issue2.to_reference}"
control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
end

View File

@ -41,9 +41,9 @@ RSpec.describe Banzai::Filter::References::IssueReferenceFilter, feature_categor
single_reference = "Issue #{issue.to_reference}"
multiple_references = "Issues #{issue.to_reference} and #{another_issue.to_reference}"
control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end

View File

@ -35,13 +35,13 @@ RSpec.describe Banzai::Filter::References::LabelReferenceFilter, feature_categor
# Run this once to establish a baseline
reference_filter("Label #{reference}")
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter("Label #{reference}")
end
labels_markdown = Array.new(10, "Label #{reference}").join('\n')
expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control_count.count)
expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control)
end
it 'includes a data-project attribute' do

View File

@ -26,9 +26,9 @@ RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter, feature_
single_reference = "Merge request #{merge.to_reference}"
multiple_references = "Merge requests #{merge.to_reference} and #{another_merge.to_reference}"
control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end

View File

@ -115,17 +115,17 @@ RSpec.describe Banzai::Filter::References::ProjectReferenceFilter, feature_categ
# warm up first
reference_filter(markdown)
max_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
end.count
end
expect(max_count).to eq 2
expect(control.count).to eq 2
markdown = "#{normal_project_reference} #{invalidate_reference(normal_project_reference)} #{group_project_reference} #{nested_project_reference}"
expect do
reference_filter(markdown)
end.not_to exceed_all_query_limit(max_count)
end.not_to exceed_all_query_limit(control)
end
end
end

View File

@ -70,13 +70,13 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
filter_single = filter_class.new(doc_single, project: project)
cache_single = described_class.new(filter_single, { project: project }, {})
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
cache_single.load_references_per_parent(filter_single.nodes)
cache_single.load_parent_per_reference
cache_single.load_records_per_parent
end.count
end
expect(control_count).to eq 3
expect(control.count).to eq 3
# Since this is an issue filter that is not batching issue queries
# across projects, we have to account for that.
# 1 for for routes to find routes.source_id of projects matching paths
@ -88,13 +88,11 @@ RSpec.describe Banzai::Filter::References::ReferenceCache, feature_category: :te
# 1x2 for groups
# 1x2 for work_item_types
# Total = 11
max_count = control_count + 8
expect do
cache.load_references_per_parent(filter.nodes)
cache.load_parent_per_reference
cache.load_records_per_parent
end.not_to exceed_query_limit(max_count)
end.not_to exceed_query_limit(control).with_threshold(8)
end
end

View File

@ -229,11 +229,11 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
it 'does not have N+1 per multiple references per project', :use_sql_query_cache do
markdown = "#{reference} $9999990"
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
reference_filter(markdown)
end.count
end
expect(control_count).to eq 1
expect(control.count).to eq 1
markdown = "#{reference} $9999990 $9999991 $9999992 $9999993 #{reference2} something/cool$12"
@ -247,11 +247,9 @@ RSpec.describe Banzai::Filter::References::SnippetReferenceFilter, feature_categ
# 1x2 for snippets in each project == 2
# Total = 7
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/330359
max_count = control_count + 6
expect do
reference_filter(markdown)
end.not_to exceed_all_query_limit(max_count)
end.not_to exceed_all_query_limit(control).with_threshold(6)
end
end
end

View File

@ -306,9 +306,9 @@ RSpec.describe Banzai::Filter::References::WorkItemReferenceFilter, feature_cate
single_reference = "Work item #{work_item.to_reference}"
multiple_references = "Work items #{work_item.to_reference} and #{another_work_item.to_reference}"
control_count = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }.count
control = ActiveRecord::QueryRecorder.new { reference_filter(single_reference).to_html }
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control_count)
expect { reference_filter(multiple_references).to_html }.not_to exceed_query_limit(control)
end
end
end

View File

@ -45,9 +45,9 @@ RSpec.describe Banzai::IssuableExtractor, feature_category: :team_planning do
second_call_queries = ActiveRecord::QueryRecorder.new do
extractor.extract([issue_link, work_item_link, merge_request_link])
end.count
end
expect(second_call_queries).to eq 0
expect(second_call_queries.count).to eq 0
end
end
end

View File

@ -37,11 +37,11 @@ RSpec.describe Banzai::ReferenceParser::SnippetParser, feature_category: :team_p
# Run this once to establish a baseline
visible_references(:public)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
subject.nodes_visible_to_user(user, [link])
end
expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control_count.count)
expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control)
end
it 'creates a reference for guest for a public snippet' do

View File

@ -184,14 +184,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte
create(:ci_build, :deploy_to_production, :with_deployment, user: user, project: project, pipeline: pipeline)
# We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }.count
control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).to_json }
# Adding more builds to the pipeline and serializing the data again
create_list(:ci_build, 3, user: user, project: project, pipeline: pipeline)
create(:ci_build, :start_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
create(:ci_build, :stop_review_app, :with_deployment, user: user, project: project, pipeline: pipeline)
expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control_count)
expect { described_class.build(pipeline.reload).to_json }.not_to exceed_query_limit(control)
end
it "with multiple retried builds" do
@ -201,14 +201,14 @@ RSpec.describe Gitlab::DataBuilder::Pipeline, feature_category: :continuous_inte
create(:ci_build, :deploy_to_production, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
# We need `.to_json` as the build hook data is wrapped within `Gitlab::Lazy`
control_count = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }.count
control = ActiveRecord::QueryRecorder.new { described_class.build(pipeline.reload).with_retried_builds.to_json }
# Adding more builds to the pipeline and serializing the data again
create_list(:ci_build, 3, :retried, user: user, project: project, pipeline: pipeline)
create(:ci_build, :start_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
create(:ci_build, :stop_review_app, :retried, :with_deployment, user: user, project: project, pipeline: pipeline)
expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control_count)
expect { described_class.build(pipeline.reload).with_retried_builds.to_json }.not_to exceed_query_limit(control)
end
end
end

View File

@ -1062,14 +1062,14 @@ RSpec.describe Gitlab::GitAccess, :aggregate_failures, feature_category: :system
# additional queries.
access.check('git-receive-pack', changes)
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
access.check('git-receive-pack', changes)
end
changes = ['6f6d7e7ed 570e7b2ab refs/heads/master', '6f6d7e7ed 570e7b2ab refs/heads/feature']
# There is still an N+1 query with protected branches
expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control_count).with_threshold(2)
expect { access.check('git-receive-pack', changes) }.not_to exceed_query_limit(control).with_threshold(2)
end
it 'raises TimeoutError when #check_access! raises a timeout error' do

View File

@ -1,140 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :do_not_stub_snowplow_by_default, feature_category: :application_instrumentation do
let(:emitter) { SnowplowTracker::Emitter.new(endpoint: 'localhost', options: { buffer_size: 1 }) }
let(:tracker) do
SnowplowTracker::Tracker
.new(
emitters: [emitter],
subject: SnowplowTracker::Subject.new,
namespace: 'namespace',
app_id: 'app_id'
)
end
before do
stub_application_setting(snowplow_app_id: '_abc123_')
end
around do |example|
freeze_time { example.run }
end
context 'when snowplow is enabled' do
before do
allow(SnowplowTracker::AsyncEmitter)
.to receive(:new)
.with(endpoint: endpoint,
options:
{
protocol: 'https',
on_success: subject.method(:increment_successful_events_emissions),
on_failure: subject.method(:failure_callback)
}
).and_return(emitter)
allow(SnowplowTracker::Tracker)
.to receive(:new)
.with(
emitters: [emitter],
subject: an_instance_of(SnowplowTracker::Subject),
namespace: described_class::SNOWPLOW_NAMESPACE,
app_id: '_abc123_'
).and_return(tracker)
end
describe '#event' do
let(:endpoint) { 'localhost:9091' }
let(:event_params) do
{
category: 'category',
action: 'action',
label: 'label',
property: 'property',
value: 1.5,
context: nil,
tstamp: (Time.now.to_f * 1000).to_i
}
end
context 'when on gitlab.com environment' do
let(:endpoint) { 'db-snowplow.trx.gitlab.net' }
before do
stub_application_setting(snowplow_database_collector_hostname: endpoint)
end
it 'sends event to tracker' do
allow(Gitlab).to receive(:com?).and_return(true)
allow(tracker).to receive(:track_struct_event).and_call_original
subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
expect(tracker).to have_received(:track_struct_event).with(event_params)
end
end
it 'sends event to tracker' do
allow(tracker).to receive(:track_struct_event).and_call_original
subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
expect(tracker).to have_received(:track_struct_event).with(event_params)
end
it 'increase total snowplow events counter' do
counter = double
expect(counter).to receive(:increment)
expect(Gitlab::Metrics).to receive(:counter)
.with(:gitlab_db_events_snowplow_events_total, 'Number of Snowplow events')
.and_return(counter)
subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
end
end
end
context 'for callbacks' do
describe 'on success' do
it 'increase gitlab_successful_snowplow_events_total counter' do
counter = double
expect(counter).to receive(:increment).with({}, 2)
expect(Gitlab::Metrics).to receive(:counter)
.with(
:gitlab_db_events_snowplow_successful_events_total,
'Number of successful Snowplow events emissions').and_return(counter)
subject.method(:increment_successful_events_emissions).call(2)
end
end
describe 'on failure' do
it 'increase gitlab_failed_snowplow_events_total counter and logs failures', :aggregate_failures do
counter = double
error_message = "Issue database_event_update failed to be reported to collector at localhost:9091"
failures = [{ "e" => "se",
"se_ca" => "Issue",
"se_la" => "issues",
"se_ac" => "database_event_update" }]
allow(Gitlab::Metrics).to receive(:counter)
.with(
:gitlab_db_events_snowplow_successful_events_total,
'Number of successful Snowplow events emissions').and_call_original
expect(Gitlab::AppLogger).to receive(:error).with(error_message)
expect(counter).to receive(:increment).with({}, 1)
expect(Gitlab::Metrics).to receive(:counter)
.with(
:gitlab_db_events_snowplow_failed_events_total,
'Number of failed Snowplow events emissions').and_return(counter)
subject.method(:failure_callback).call(2, failures)
end
end
end
end

View File

@ -165,38 +165,6 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
end
end
describe '.database_event' do
context 'when the action is not passed in as a string' do
it 'allows symbols' do
expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
described_class.database_event('category', :some_action)
end
it 'allows nil' do
expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
described_class.database_event('category', nil)
end
it 'allows integers' do
expect(Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception)
described_class.database_event('category', 1)
end
end
it_behaves_like 'rescued error raised by destination class' do
let(:category) { 'Issue' }
let(:action) { 'created' }
let(:destination_class) { Gitlab::Tracking::Destinations::DatabaseEventsSnowplow }
subject(:tracking_method) { described_class.database_event(category, action) }
end
it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::DatabaseEventsSnowplow, :database_event
end
describe '.event' do
context 'when the action is not passed in as a string' do
it 'allows symbols' do

View File

@ -2461,23 +2461,27 @@ RSpec.describe Notify, feature_category: :code_review_workflow do
end
it 'avoids N+1 cached queries when rendering html', :use_sql_query_cache, :request_store do
control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
subject.html_part
end
create_list(:diff_note_on_merge_request, 3, review: review, project: project, author: review.author, noteable: merge_request)
expect { described_class.new_review_email(recipient.id, review.id).html_part }.not_to exceed_all_query_limit(control_count)
expect do
described_class.new_review_email(recipient.id, review.id).html_part
end.not_to exceed_all_query_limit(control)
end
it 'avoids N+1 cached queries when rendering text', :use_sql_query_cache, :request_store do
control_count = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
control = ActiveRecord::QueryRecorder.new(query_recorder_debug: true, skip_cached: false) do
subject.text_part
end
create_list(:diff_note_on_merge_request, 3, review: review, project: project, author: review.author, noteable: merge_request)
expect { described_class.new_review_email(recipient.id, review.id).text_part }.not_to exceed_all_query_limit(control_count)
expect do
described_class.new_review_email(recipient.id, review.id).text_part
end.not_to exceed_all_query_limit(control)
end
end

View File

@ -81,20 +81,4 @@ RSpec.describe Analytics::CycleAnalytics::Stage, feature_category: :value_stream
expect(current_event_pairs).to eq(expected_event_pairs)
end
end
it_behaves_like 'database events tracking' do
let(:namespace) { create(:group) }
let(:value_stream) { create(:cycle_analytics_value_stream) }
let(:record) { described_class.create!(stage_params) }
let(:update_params) { { name: 'st 2' } }
let(:stage_params) do
{
namespace: namespace,
name: 'st1',
start_event_identifier: :merge_request_created,
end_event_identifier: :merge_request_merged,
group_value_stream_id: value_stream.id
}
end
end
end

View File

@ -576,9 +576,9 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
it 'avoids N+1 queries' do
another_project = create(:project)
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
described_class.ancestor_clusters_for_clusterable(another_project, hierarchy_order: hierarchy_order)
end.count
end
cluster2 = create(:cluster, :provided_by_gcp, :group)
child2 = cluster2.group
@ -587,7 +587,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching,
expect do
described_class.ancestor_clusters_for_clusterable(project, hierarchy_order: hierarchy_order)
end.not_to exceed_query_limit(control_count)
end.not_to exceed_query_limit(control)
end
context 'for a group' do

View File

@ -1,85 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe DatabaseEventTracking, :snowplow, feature_category: :service_ping do
before do
allow(Gitlab::Tracking).to receive(:database_event).and_call_original
end
let(:test_class) do
Class.new(ActiveRecord::Base) do
include DatabaseEventTracking
self.table_name = 'application_setting_terms'
self::SNOWPLOW_ATTRIBUTES = %w[id].freeze # rubocop:disable RSpec/LeakyConstantDeclaration
end
end
subject(:create_test_class_record) { test_class.create!(id: 1, terms: "") }
context 'if event emmiter failed' do
before do
allow(Gitlab::Tracking).to receive(:database_event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking
end
it 'tracks the exception' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
create_test_class_record
end
end
describe 'event tracking' do
let(:category) { test_class.to_s }
let(:event) { 'database_event' }
it 'when created' do
create_test_class_record
expect_snowplow_event(
tracking_method: :database_event,
category: category,
action: "#{event}_create",
label: 'application_setting_terms',
property: 'create',
namespace: nil,
project: nil,
"id" => 1
)
end
it 'when updated' do
create_test_class_record
test_class.first.update!(id: 3)
expect_snowplow_event(
tracking_method: :database_event,
category: category,
action: "#{event}_update",
label: 'application_setting_terms',
property: 'update',
namespace: nil,
project: nil,
"id" => 3
)
end
it 'when destroyed' do
create_test_class_record
test_class.first.destroy!
expect_snowplow_event(
tracking_method: :database_event,
category: category,
action: "#{event}_destroy",
label: 'application_setting_terms',
property: 'destroy',
namespace: nil,
project: nil,
"id" => 1
)
end
end
end

View File

@ -89,7 +89,7 @@ RSpec.shared_examples 'routable resource' do
context 'when use_includes: true' do
it 'includes route information when loading records' do
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
described_class.where_full_path_in([record.full_path, record_2.full_path], use_includes: true)
.map(&:route)
end
@ -103,7 +103,7 @@ RSpec.shared_examples 'routable resource' do
record_4.full_path
], use_includes: true)
.map(&:route)
end.to issue_same_number_of_queries_as(control_count)
end.to issue_same_number_of_queries_as(control)
end
end

View File

@ -93,12 +93,4 @@ RSpec.describe MergeRequest::Metrics do
end
end
end
it_behaves_like 'database events tracking', feature_category: :service_ping do
let(:merge_request) { create(:merge_request) }
let(:record) { merge_request.metrics }
let(:namespace) { nil }
let(:update_params) { { pipeline_id: 1, updated_at: Date.tomorrow } }
end
end

View File

@ -897,12 +897,14 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
it 'does not cause N+1 query in fetching registries' do
stub_container_registry_tags(repository: :any, tags: [])
control_count = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }.count
control = ActiveRecord::QueryRecorder.new { namespace.any_project_has_container_registry_tags? }
other_repositories = create_list(:container_repository, 2)
create(:project, namespace: namespace, container_repositories: other_repositories)
expect { namespace.first_project_with_container_registry_tags }.not_to exceed_query_limit(control_count + 1)
expect do
namespace.first_project_with_container_registry_tags
end.not_to exceed_query_limit(control).with_threshold(1)
end
end

View File

@ -448,13 +448,13 @@ RSpec.describe Note, feature_category: :team_planning do
# Project authorization checks are cached, establish a baseline
retrieve_participants
control_count = ActiveRecord::QueryRecorder.new do
control = ActiveRecord::QueryRecorder.new do
retrieve_participants
end
create(:note_on_commit, project: note.project, note: 'another note', noteable_id: commit.id)
expect { retrieve_participants }.not_to exceed_query_limit(control_count)
expect { retrieve_participants }.not_to exceed_query_limit(control)
end
end

Some files were not shown because too many files have changed in this diff Show More