Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2021-04-19 15:09:08 +00:00
parent c6af94ea4e
commit 6a3c4476fa
87 changed files with 856 additions and 276 deletions

View File

@ -27,7 +27,7 @@
.rails-cache:
cache:
key: "rails-v4"
key: "rails-v5"
paths:
- vendor/ruby/
- vendor/gitaly-ruby/

View File

@ -864,11 +864,6 @@ Style/RaiseArgs:
Enabled: false
EnforcedStyle: exploded
# Offense count: 73
# Cop supports --auto-correct.
Style/RedundantAssignment:
Enabled: false
# Offense count: 2
# Cop supports --auto-correct.
Style/RedundantBegin:

View File

@ -17,16 +17,19 @@ class Groups::EmailCampaignsController < Groups::ApplicationController
private
def track_click
data = {
namespace_id: group.id,
track: @track.to_s,
series: @series,
subject_line: subject_line(@track, @series)
}
if Gitlab.com?
data = {
namespace_id: group.id,
track: @track.to_s,
series: @series,
subject_line: subject_line(@track, @series)
}
context = SnowplowTracker::SelfDescribingJson.new(EMAIL_CAMPAIGNS_SCHEMA_URL, data)
context = SnowplowTracker::SelfDescribingJson.new(EMAIL_CAMPAIGNS_SCHEMA_URL, data)
::Gitlab::Tracking.event(self.class.name, 'click', context: [context])
::Gitlab::Tracking.event(self.class.name, 'click', context: [context])
else
::Users::InProductMarketingEmail.save_cta_click(current_user, @track, @series)
end
end
def redirect_link

View File

@ -13,8 +13,7 @@ class AwardEmojisFinder
def execute
awards = awardable.award_emoji
awards = by_name(awards)
awards = by_awarded_by(awards)
awards
by_awarded_by(awards)
end
private

View File

@ -11,8 +11,7 @@ class BranchesFinder < GitRefsFinder
else
branches = repository.branches_sorted_by(sort)
branches = by_search(branches)
branches = by_names(branches)
branches
by_names(branches)
end
end

View File

@ -35,8 +35,7 @@ module Ci
return Ci::DailyBuildGroupReportResult.none unless query_allowed?
collection = Ci::DailyBuildGroupReportResult.by_projects(params[:project])
collection = filter_report_results(collection)
collection
filter_report_results(collection)
end
private
@ -51,8 +50,7 @@ module Ci
collection = by_dates(collection)
collection = sort(collection)
collection = limit_by(collection)
collection
limit_by(collection)
end
def by_coverage(items)

View File

@ -11,8 +11,7 @@ module Ci
def execute
variables = resource.variables
variables = by_key(variables)
variables = by_environment_scope(variables)
variables
by_environment_scope(variables)
end
private

View File

@ -11,9 +11,7 @@ class ContextCommitsFinder
def execute
commits = init_collection
commits = filter_existing_commits(commits)
commits
filter_existing_commits(commits)
end
private
@ -21,19 +19,15 @@ class ContextCommitsFinder
attr_reader :project, :merge_request, :search, :limit, :offset
def init_collection
commits =
if search.present?
search_commits
else
project.repository.commits(merge_request.target_branch, { limit: limit, offset: offset })
end
commits
if search.present?
search_commits
else
project.repository.commits(merge_request.target_branch, { limit: limit, offset: offset })
end
end
def filter_existing_commits(commits)
commits.select! { |commit| already_included_ids.exclude?(commit.id) }
commits
end

View File

@ -33,9 +33,7 @@ class DeploymentsFinder
items = by_environment(items)
items = by_status(items)
items = preload_associations(items)
items = sort(items)
items
sort(items)
end
private

View File

@ -15,9 +15,7 @@ class EnvironmentsFinder
environments = by_search(environments)
# Raises InvalidStatesError if params[:states] contains invalid states.
environments = by_states(environments)
environments
by_states(environments)
end
private

View File

@ -48,8 +48,7 @@ class GroupProjectsFinder < ProjectsFinder
def filter_projects(collection)
projects = super
projects = by_feature_availability(projects)
projects
by_feature_availability(projects)
end
def limit(collection)

View File

@ -119,9 +119,7 @@ class IssuableFinder
# https://www.postgresql.org/docs/current/static/queries-with.html
items = by_search(items)
items = sort(items)
items
sort(items)
end
def filter_items(items)

View File

@ -81,8 +81,7 @@ class IssuesFinder < IssuableFinder
issues = super
issues = by_due_date(issues)
issues = by_confidential(issues)
issues = by_issue_types(issues)
issues
by_issue_types(issues)
end
def by_confidential(items)

View File

@ -14,9 +14,7 @@ class MergeRequest::MetricsFinder
items = init_collection
items = by_target_project(items)
items = by_merged_after(items)
items = by_merged_before(items)
items
by_merged_before(items)
end
private

View File

@ -39,8 +39,7 @@ module Namespaces
def filter_projects(collection)
collection = by_ids(collection)
collection = by_similarity(collection)
collection
by_similarity(collection)
end
def by_ids(items)

View File

@ -8,8 +8,7 @@ module Packages
def package_files
files = super
files = by_conan_file_type(files)
files = by_conan_package_reference(files)
files
by_conan_package_reference(files)
end
def by_conan_file_type(files)

View File

@ -11,8 +11,7 @@ module Packages
collection = relation.with_container(container)
collection = by_codename(collection)
collection = by_suite(collection)
collection = by_codename_or_suite(collection)
collection
by_codename_or_suite(collection)
end
private

View File

@ -32,8 +32,7 @@ module Packages
packages = filter_with_version(packages)
packages = filter_by_package_type(packages)
packages = filter_by_package_name(packages)
packages = filter_by_status(packages)
packages
filter_by_status(packages)
end
def group_projects_visible_to_current_user

View File

@ -21,9 +21,7 @@ class Packages::PackageFileFinder
def package_files
files = package.package_files
files = by_file_name(files)
files
by_file_name(files)
end
def by_file_name(files)

View File

@ -22,8 +22,7 @@ module Packages
packages = filter_by_package_type(packages)
packages = filter_by_package_name(packages)
packages = filter_by_status(packages)
packages = order_packages(packages)
packages
order_packages(packages)
end
private

View File

@ -26,9 +26,7 @@ class PendingTodosFinder
todos = by_project(todos)
todos = by_target_id(todos)
todos = by_target_type(todos)
todos = by_commit_id(todos)
todos
by_commit_id(todos)
end
def by_project(todos)

View File

@ -12,9 +12,7 @@ module Projects
def execute
export_jobs = project.export_jobs
export_jobs = by_status(export_jobs)
export_jobs
by_status(export_jobs)
end
private

View File

@ -30,9 +30,7 @@ module Projects
relation = by_environment(relation)
relation = by_metric(relation)
relation = by_id(relation)
relation = ordered(relation)
relation
ordered(relation)
end
private

View File

@ -83,8 +83,7 @@ class ProjectsFinder < UnionFinder
collection = by_deleted_status(collection)
collection = by_last_activity_after(collection)
collection = by_last_activity_before(collection)
collection = by_repository_storage(collection)
collection
by_repository_storage(collection)
end
def collection_with_user

View File

@ -36,9 +36,7 @@ class PrometheusMetricsFinder
metrics = by_common(metrics)
metrics = by_ordered(metrics)
metrics = by_identifier(metrics)
metrics = by_id(metrics)
metrics
by_id(metrics)
end
private

View File

@ -20,9 +20,7 @@ class ProtectedBranchesFinder
def execute
protected_branches = project.limited_protected_branches(LIMIT)
protected_branches = by_name(protected_branches)
protected_branches
by_name(protected_branches)
end
private

View File

@ -20,8 +20,7 @@ class ReleasesFinder
releases = get_releases
releases = by_tag(releases)
releases = releases.preloaded if preload
releases = order_releases(releases)
releases
order_releases(releases)
end
private

View File

@ -7,7 +7,6 @@ class TagsFinder < GitRefsFinder
def execute
tags = repository.tags_sorted_by(sort)
tags = by_search(tags)
tags
by_search(tags)
end
end

View File

@ -15,9 +15,7 @@ class UsersStarProjectsFinder
stars = UsersStarProject.all
stars = by_project(stars)
stars = by_search(stars)
stars = filter_visible_profiles(stars)
stars
filter_visible_profiles(stars)
end
private

View File

@ -165,7 +165,7 @@ module ApplicationHelper
css_classes = [short_format ? 'js-short-timeago' : 'js-timeago']
css_classes << html_class unless html_class.blank?
element = content_tag :time, l(time, format: "%b %d, %Y"),
content_tag :time, l(time, format: "%b %d, %Y"),
class: css_classes.join(' '),
title: l(time.to_time.in_time_zone, format: :timeago_tooltip),
datetime: time.to_time.getutc.iso8601,
@ -174,8 +174,6 @@ module ApplicationHelper
placement: placement,
container: 'body'
}
element
end
def edited_time_ago_with_tooltip(object, placement: 'top', html_class: 'time_ago', exclude_author: false)

View File

@ -4,8 +4,7 @@ module GitHelper
def strip_signature(text)
text = text.gsub(/-----BEGIN PGP SIGNATURE-----(.*)-----END PGP SIGNATURE-----/m, "")
text = text.gsub(/-----BEGIN PGP MESSAGE-----(.*)-----END PGP MESSAGE-----/m, "")
text = text.gsub(/-----BEGIN SIGNED MESSAGE-----(.*)-----END SIGNED MESSAGE-----/m, "")
text
text.gsub(/-----BEGIN SIGNED MESSAGE-----(.*)-----END SIGNED MESSAGE-----/m, "")
end
def short_sha(text)

View File

@ -21,8 +21,7 @@ module Ci
deps = model_class.where(pipeline_id: processable.pipeline_id).latest
deps = from_previous_stages(deps)
deps = from_needs(deps)
deps = from_dependencies(deps)
deps
from_dependencies(deps)
end
# Dependencies from the same parent-pipeline hierarchy excluding

View File

@ -51,16 +51,15 @@ class TeamcityService < CiService
end
def title
'JetBrains TeamCity CI'
'JetBrains TeamCity'
end
def description
'A continuous integration and build server'
s_('ProjectService|Run CI/CD pipelines with JetBrains TeamCity.')
end
def help
'You will want to configure monitoring of all branches so merge '\
'requests build, that setting is in the vsc root advanced settings.'
s_('To run CI/CD pipelines with JetBrains TeamCity, input the GitLab project details in the TeamCity project Version Control Settings.')
end
def fields
@ -68,20 +67,20 @@ class TeamcityService < CiService
{
type: 'text',
name: 'teamcity_url',
title: s_('ProjectService|TeamCity URL'),
placeholder: 'TeamCity root URL like https://teamcity.example.com',
title: s_('ProjectService|TeamCity server URL'),
placeholder: 'https://teamcity.example.com',
required: true
},
{
type: 'text',
name: 'build_type',
placeholder: 'Build configuration ID',
help: s_('ProjectService|The build configuration ID of the TeamCity project.'),
required: true
},
{
type: 'text',
name: 'username',
placeholder: 'A user with permissions to trigger a manual build'
help: s_('ProjectService|Must have permission to trigger a manual build in TeamCity.')
},
{
type: 'password',

View File

@ -35,5 +35,15 @@ module Users
.where(in_product_marketing_emails: { id: nil })
.select(Arel.sql("DISTINCT ON(#{users.table_name}.id) #{users.table_name}.*"))
end
scope :for_user_with_track_and_series, -> (user, track, series) do
where(user: user, track: track, series: series)
end
def self.save_cta_click(user, track, series)
email = for_user_with_track_and_series(user, track, series).take
email.update(cta_clicked_at: Time.zone.now) if email && email.cta_clicked_at.blank?
end
end
end

View File

@ -49,11 +49,9 @@ module MergeRequests
def duplicates
existing_oids = merge_request.merge_request_context_commits.map { |commit| commit.sha.to_s }
duplicate_oids = existing_oids.select do |existing_oid|
existing_oids.select do |existing_oid|
commit_ids.select { |commit_id| existing_oid.start_with?(commit_id) }.count > 0
end
duplicate_oids
end
def build_context_commit_rows(merge_request_id, commits)

View File

@ -58,21 +58,22 @@ module Packages
file_type == :dsc || file_type == :buildinfo || file_type == :changes
end
def extracted_fields
if file_type_debian?
package_file.file.use_file do |file_path|
::Packages::Debian::ExtractDebMetadataService.new(file_path).execute
end
elsif file_type_meta?
package_file.file.use_file do |file_path|
::Packages::Debian::ParseDebian822Service.new(File.read(file_path)).execute.each_value.first
def fields
strong_memoize(:fields) do
if file_type_debian?
package_file.file.use_file do |file_path|
::Packages::Debian::ExtractDebMetadataService.new(file_path).execute
end
elsif file_type_meta?
package_file.file.use_file do |file_path|
::Packages::Debian::ParseDebian822Service.new(File.read(file_path)).execute.each_value.first
end
end
end
end
def extract_metadata
fields = extracted_fields
architecture = fields.delete('Architecture') if file_type_debian?
architecture = fields['Architecture'] if file_type_debian?
{
file_type: file_type,

View File

@ -0,0 +1,101 @@
# frozen_string_literal: true
module Packages
module Debian
class ProcessChangesService
include ExclusiveLeaseGuard
include Gitlab::Utils::StrongMemoize
# used by ExclusiveLeaseGuard
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
def initialize(package_file, creator)
@package_file, @creator = package_file, creator
end
def execute
try_obtain_lease do
# return if changes file has already been processed
break if package_file.debian_file_metadatum&.changes?
validate!
package_file.transaction do
update_files_metadata
update_changes_metadata
end
end
end
private
attr_reader :package_file, :creator
def validate!
raise ArgumentError, 'invalid package file' unless package_file.debian_file_metadatum
raise ArgumentError, 'invalid package file' unless package_file.debian_file_metadatum.unknown?
raise ArgumentError, 'invalid package file' unless metadata[:file_type] == :changes
end
def update_files_metadata
files.each do |filename, entry|
entry.package_file.package = package
file_metadata = ::Packages::Debian::ExtractMetadataService.new(entry.package_file).execute
entry.package_file.debian_file_metadatum.update!(
file_type: file_metadata[:file_type],
component: files[filename].component,
architecture: file_metadata[:architecture],
fields: file_metadata[:fields]
)
entry.package_file.save!
end
end
def update_changes_metadata
package_file.update!(package: package)
package_file.debian_file_metadatum.update!(
file_type: metadata[:file_type],
fields: metadata[:fields]
)
end
def metadata
strong_memoize(:metadata) do
::Packages::Debian::ExtractChangesMetadataService.new(package_file).execute
end
end
def files
metadata[:files]
end
def project
package_file.package.project
end
def package
strong_memoize(:package) do
params = {
'name': metadata[:fields]['Source'],
'version': metadata[:fields]['Version'],
'distribution_name': metadata[:fields]['Distribution']
}
response = Packages::Debian::FindOrCreatePackageService.new(project, creator, params).execute
response.payload[:package]
end
end
# used by ExclusiveLeaseGuard
def lease_key
"packages:debian:process_changes_service:package_file:#{package_file.id}"
end
# used by ExclusiveLeaseGuard
def lease_timeout
DEFAULT_LEASE_TIMEOUT
end
end
end
end

View File

@ -25,9 +25,7 @@ module ResourceEvents
def apply_common_filters(events)
events = apply_last_fetched_at(events)
events = apply_fetch_until(events)
events
apply_fetch_until(events)
end
def apply_last_fetched_at(events)

View File

@ -147,8 +147,7 @@ class IrkerWorker # rubocop:disable Scalability/IdempotentWorker
def files_count(commit)
diff_size = commit.raw_deltas.size
files = "#{diff_size} file".pluralize(diff_size)
files
"#{diff_size} file".pluralize(diff_size)
end
def colorize_sha(sha)

View File

@ -0,0 +1,5 @@
---
title: Add documentation about Pages deployment migration
merge_request: 59475
author:
type: added

View File

@ -0,0 +1,5 @@
---
title: Resolves offenses Style/RedundantAssignment
merge_request: 58013
author: Shubham Kumar (@imskr)
type: fixed

View File

@ -0,0 +1,5 @@
---
title: Update UI text for TeamCity integration
merge_request: 59493
author:
type: other

44
config/bullet.yml Normal file
View File

@ -0,0 +1,44 @@
---
exclusions:
# See https://github.com/flyerhzm/bullet#configuration for exclusion formats
# Example usage:
#
# paths with method name(recommended use):
#
# example_path_with_method_exclusion_name:
# merge_request: 'some merge request link for context'
# exact_file_name: true
# exclude:
# - 'some_ruby_file_name.rb'
# - 'method_name_inside_the_file_above'
#
# path or pattern only to file(fuzzy, not recommended):
#
# example_path_with_exact_file_name:
# merge_request: 'some merge request link for context'
# exact_file_name: true
# exclude:
# - 'some_ruby_file_name.rb'
#
# example_path_with_pattern:
# merge_request: 'some merge request link for context'
# exact_file_name: false
# exclude:
# - 'file_pattern'
#
# path with line numbers(extremely fragile, not recommended):
#
# example_path_with_line_range:
# merge_request: 'some merge request link for context'
# exact_file_name: true
# exclude:
# - 'some_ruby_file_name.rb'
# - 5..10
#
group_member_presenter_managing_group:
merge_request: 'https://gitlab.com/gitlab-org/gitlab/-/merge_requests/58927'
path_with_method: true
exclude:
- 'ee/app/presenters/ee/group_member_presenter.rb'
- 'group_managed_account?'

View File

@ -1,18 +1,18 @@
# frozen_string_literal: true
def bullet_enabled?
Gitlab::Utils.to_boolean(ENV['ENABLE_BULLET'].to_s)
end
if defined?(Bullet) && (bullet_enabled? || Rails.env.development?)
if Gitlab::Bullet.configure_bullet?
Rails.application.configure do
config.after_initialize do
Bullet.enable = true
Bullet.bullet_logger = bullet_enabled?
Bullet.console = bullet_enabled?
if Gitlab::Bullet.extra_logging_enabled?
Bullet.bullet_logger = true
Bullet.console = true
end
Bullet.raise = Rails.env.test?
Bullet.stacktrace_excludes = Gitlab::Bullet::Exclusions.new.execute
end
end
end

View File

@ -17,9 +17,8 @@ module HangoutsChat
headers: { 'Content-Type' => 'application/json' },
parse: nil # disables automatic response parsing
)
net_http_response = httparty_response.response
httparty_response.response
# The rest of the integration expects a Net::HTTP response
net_http_response
end
end

View File

@ -33,8 +33,9 @@ verification methods:
| Git | Project wiki repository | Geo with Gitaly | Gitaly Checksum |
| Git | Project designs repository | Geo with Gitaly | Gitaly Checksum |
| Git | Object pools for forked project deduplication | Geo with Gitaly | _Not implemented_ |
| Git | Project Snippets | Geo with Gitaly | _Not implemented_ |
| Git | Personal Snippets | Geo with Gitaly | _Not implemented_ |
| Git | Project Snippets | Geo with Gitaly | Gitaly Checksum |
| Git | Personal Snippets | Geo with Gitaly | Gitaly Checksum |
| Git | Group wiki repository | Geo with Gitaly | _Not implemented_ |
| Blobs | User uploads _(file system)_ | Geo with API | _Not implemented_ |
| Blobs | User uploads _(object storage)_ | Geo with API/Managed (*2*) | _Not implemented_ |
| Blobs | LFS objects _(file system)_ | Geo with API | _Not implemented_ |
@ -51,6 +52,8 @@ verification methods:
| Blobs | Versioned Terraform State _(object storage)_ | Geo with API/Managed (*2*) | _Not implemented_ |
| Blobs | External Merge Request Diffs _(file system)_ | Geo with API | _Not implemented_ |
| Blobs | External Merge Request Diffs _(object storage)_ | Geo with API/Managed (*2*) | _Not implemented_ |
| Blobs | Pipeline artifacts _(file system)_ | Geo with API | SHA256 checksum |
| Blobs | Pipeline artifacts _(object storage)_ | Geo with API/Managed (*2*) | SHA256 checksum |
- (*1*): Redis replication can be used as part of HA with Redis sentinel. It's not used between Geo sites.
- (*2*): Object storage replication can be performed by Geo or by your object storage provider/appliance

View File

@ -99,7 +99,7 @@ pair for all the sites. The **secondary** site will use this key to
generate a short-lived JWT that is pull-only-capable to access the
**primary** site Container Registry.
For each application node on the **secondary** site:
For each application and Sidekiq node on the **secondary** site:
1. SSH into the node and login as the `root` user:

View File

@ -999,6 +999,28 @@ sudo gitlab-rake gitlab:pages:clean_migrated_zip_storage
This will not remove any data from the legacy disk storage and the GitLab Pages daemon will automatically fallback
to using that.
### Migrate Pages deployments to object storage
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/325285) in GitLab 13.11
Existing Pages deployments objects (which store [ZIP archives](#zip-storage)) can similarly be
migrated to [object storage](#using-object-storage), if
you've been having them stored locally.
Migrate your existing Pages deployments from local storage to object storage:
```shell
sudo gitlab-rails gitlab:pages:deployments:migrate_to_object_storage
```
### Rolling Pages deployments back to local storage
After the migration to object storage is performed, you can choose to revert your Pages deployments back to local storage:
```shell
sudo gitlab-rails gitlab:pages:deployments:migrate_to_local
```
## Backup
GitLab Pages are part of the [regular backup](../../raketasks/backup_restore.md), so there is no separate backup to configure.

View File

@ -254,10 +254,10 @@ def up
t.bigint :project_id, null: false
t.bigint :user_id, null: false
t.string :jid, limit: 255
end
add_index :imports, :project_id
add_index :imports, :user_id
t.index :project_id
t.index :user_id
end
end
def down

View File

@ -87,7 +87,7 @@ it completes, refreshes every 60 minutes:
1. In the **Host URL** field, enter the URI appropriate for your version of GitLab,
replacing `<gitlab.example.com>` with your GitLab instance domain:
- *For GitLab versions 11.3 and later,* use `https://<gitlab.example.com>/`.
- *For GitLab versions 11.3 and later,* use `https://<gitlab.example.com>`.
- *For GitLab versions 11.2 and earlier,* use
`https://<gitlab.example.com>/-/jira`.

View File

@ -7,22 +7,17 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Groups **(FREE)**
In GitLab, you can put related projects together in a group.
In GitLab, you use groups to manage one or more related projects at the same time.
For example, you might create a group for your company members and a subgroup for each individual team.
You can name the group `company-team`, and the subgroups `backend-team`, `frontend-team`, and `production-team`.
You can use groups to manage permissions for your projects. If someone has access to
the group, they get access to all the projects in the group.
Then you can:
You can also view all of the issues and merge requests for the projects in the group,
and view analytics that show the group's activity.
- Grant members access to multiple projects at once.
- Add to-do items for all of the group members at once.
- View the [issues](../project/issues/index.md) and
[merge requests](../project/merge_requests/reviewing_and_managing_merge_requests.md#view-merge-requests-for-all-projects-in-a-group)
for all projects in the group, together in a single list view.
- [Bulk edit](../group/bulk_editing/index.md) issues, epics, and merge requests.
- [Create a wiki](../project/wiki/index.md) for the group.
You can use groups to communicate with all of the members of the group at once.
You can also create [subgroups](subgroups/index.md).
For larger organizations, you can also create [subgroups](subgroups/index.md).
## View groups

View File

@ -52,9 +52,7 @@ module API
groups = groups.where.not(id: params[:skip_groups]) if params[:skip_groups].present?
order_options = { params[:order_by] => params[:sort] }
order_options["id"] ||= "asc"
groups = groups.reorder(order_options)
groups
groups.reorder(order_options)
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -209,10 +209,8 @@ module API
end
def find_personal_access_token
personal_access_token = find_personal_access_token_from_conan_jwt ||
find_personal_access_token_from_conan_jwt ||
find_personal_access_token_from_http_basic_auth
personal_access_token
end
def find_user_from_job_token

View File

@ -113,9 +113,7 @@ module ErrorTracking
uri = URI(url)
uri.path.squeeze!('/')
# Remove trailing slash
uri = uri.to_s.delete_suffix('/')
uri
uri.to_s.delete_suffix('/')
end
def map_to_errors(issues)

16
lib/gitlab/bullet.rb Normal file
View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
module Gitlab
module Bullet
extend self
def enabled?
Gitlab::Utils.to_boolean(ENV['ENABLE_BULLET'], default: false)
end
alias_method :extra_logging_enabled?, :enabled?
def configure_bullet?
defined?(::Bullet) && (enabled? || Rails.env.development?)
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
module Gitlab
module Bullet
class Exclusions
def initialize(config_file = Gitlab.root.join('config/bullet.yml'))
@config_file = config_file
end
def execute
exclusions.map { |v| v['exclude'] }
end
def validate_paths!
exclusions.each do |properties|
next unless properties['path_with_method']
file = properties['exclude'].first
raise "Bullet: File used by #{config_file} doesn't exist, validate the #{file} exclusion!" unless File.exist?(file)
end
end
private
attr_reader :config_file
def exclusions
@exclusions ||= if File.exist?(config_file)
YAML.load_file(config_file)['exclusions']&.values || []
else
[]
end
end
end
end
end

View File

@ -96,9 +96,7 @@ module Gitlab
initial_config = Config::External::Processor.new(initial_config, @context).perform
initial_config = Config::Extendable.new(initial_config).to_hash
initial_config = Config::Yaml::Tags::Resolver.new(initial_config).to_hash
initial_config = Config::EdgeStagesInjector.new(initial_config).to_hash
initial_config
Config::EdgeStagesInjector.new(initial_config).to_hash
end
def find_sha(project)

View File

@ -70,7 +70,7 @@ module Gitlab
def validate_service_request
headers = {
'X-Request-ID' => Labkit::Correlation::CorrelationId.current_id,
'X-Gitlab-Correlation-id' => Labkit::Correlation::CorrelationId.current_id,
'X-Gitlab-Token' => validation_service_token
}.compact

View File

@ -25,8 +25,7 @@ module Gitlab
return unless value
encrypted_token = Base64.decode64(value)
decrypted_token = Encryptor.decrypt(AES256_GCM_OPTIONS.merge(value: encrypted_token, iv: nonce))
decrypted_token
Encryptor.decrypt(AES256_GCM_OPTIONS.merge(value: encrypted_token, iv: nonce))
end
end
end

View File

@ -12,7 +12,7 @@ module Gitlab
author_url = build_author_url(build.commit, commit)
data = {
{
object_kind: 'build',
ref: build.ref,
@ -67,8 +67,6 @@ module Gitlab
environment: build_environment(build)
}
data
end
private

View File

@ -199,8 +199,7 @@ module Gitlab
def linkify_issues(str)
str = str.gsub(/([Ii]ssue) ([0-9]+)/, '\1 #\2')
str = str.gsub(/([Cc]ase) ([0-9]+)/, '\1 #\2')
str
str.gsub(/([Cc]ase) ([0-9]+)/, '\1 #\2')
end
def escape_for_markdown(str)
@ -208,8 +207,7 @@ module Gitlab
str = str.gsub(/^-/, "\\-")
str = str.gsub("`", "\\~")
str = str.delete("\r")
str = str.gsub("\n", " \n")
str
str.gsub("\n", " \n")
end
def format_content(raw_content)

View File

@ -127,10 +127,9 @@ module Gitlab
end
def decorate!
collection = each_with_index do |element, i|
each_with_index do |element, i|
@array[i] = yield(element)
end
collection
end
alias_method :to_ary, :to_a

View File

@ -30,15 +30,13 @@ module Gitlab
# ex: " OR (relative_position = 23 AND id > 500)"
def second_attribute_condition
condition = <<~SQL
<<~SQL
OR (
#{table_condition(order_list.first, values.first, '=').to_sql}
AND
#{table_condition(order_list[1], values[1], operators[1]).to_sql}
)
SQL
condition
end
# ex: " OR (relative_position IS NULL)"

View File

@ -14,15 +14,13 @@ module Gitlab
# ex: "(relative_position IS NULL AND id > 500)"
def first_attribute_condition
condition = <<~SQL
<<~SQL
(
#{table_condition(order_list.first, nil, 'is_null').to_sql}
AND
#{table_condition(order_list[1], values[1], operators[1]).to_sql}
)
SQL
condition
end
# ex: " OR (relative_position IS NOT NULL)"

View File

@ -224,11 +224,9 @@ module Gitlab
frag_path = frag_path.gsub(DOTS_RE) do |dots|
rel_dir(dots.split('/').count)
end
frag_path = frag_path.gsub(IMPLICIT_ROOT) do
frag_path.gsub(IMPLICIT_ROOT) do
(Rails.root / 'app').to_s + '/'
end
frag_path
end
def rel_dir(n_steps_up)

View File

@ -104,9 +104,7 @@ module Gitlab
def format_query(metric)
expression = remove_new_lines(metric[:expr])
expression = replace_variables(expression)
expression = replace_global_variables(expression, metric)
expression
replace_global_variables(expression, metric)
end
# Accomodates instance-defined Grafana variables.
@ -135,9 +133,7 @@ module Gitlab
def replace_global_variables(expression, metric)
expression = expression.gsub('$__interval', metric[:interval]) if metric[:interval]
expression = expression.gsub('$__from', query_params[:from])
expression = expression.gsub('$__to', query_params[:to])
expression
expression.gsub('$__to', query_params[:to])
end
# Removes new lines from expression.

View File

@ -26,16 +26,13 @@ module Gitlab
# Repeated dashes are invalid (OpenShift limitation)
slugified.squeeze!('-')
slugified =
if slugified.size > 24 || slugified != name
# Maximum length: 24 characters (OpenShift limitation)
shorten_and_add_suffix(slugified)
else
# Cannot end with a dash (Kubernetes label limitation)
slugified.chomp('-')
end
slugified
if slugified.size > 24 || slugified != name
# Maximum length: 24 characters (OpenShift limitation)
shorten_and_add_suffix(slugified)
else
# Cannot end with a dash (Kubernetes label limitation)
slugified.chomp('-')
end
end
private

View File

@ -24707,6 +24707,9 @@ msgstr ""
msgid "ProjectService|Mock service URL"
msgstr ""
msgid "ProjectService|Must have permission to trigger a manual build in TeamCity."
msgstr ""
msgid "ProjectService|New issue URL"
msgstr ""
@ -24716,7 +24719,13 @@ msgstr ""
msgid "ProjectService|Run CI/CD pipelines with Drone."
msgstr ""
msgid "ProjectService|TeamCity URL"
msgid "ProjectService|Run CI/CD pipelines with JetBrains TeamCity."
msgstr ""
msgid "ProjectService|TeamCity server URL"
msgstr ""
msgid "ProjectService|The build configuration ID of the TeamCity project."
msgstr ""
msgid "ProjectService|To configure this integration, you should:"
@ -32827,6 +32836,9 @@ msgstr ""
msgid "To receive alerts from manually configured Prometheus services, add the following URL and Authorization key to your Prometheus webhook config file. Learn more about %{linkStart}configuring Prometheus%{linkEnd} to send alerts to GitLab."
msgstr ""
msgid "To run CI/CD pipelines with JetBrains TeamCity, input the GitLab project details in the TeamCity project Version Control Settings."
msgstr ""
msgid "To see all the user's personal access tokens you must impersonate them first."
msgstr ""

View File

@ -53,12 +53,10 @@ module QA
end
def run_saml_idp_service(group_name)
service = Service::DockerRun::SamlIdp.new(Runtime::Scenario.gitlab_address, group_name).tap do |runner|
Service::DockerRun::SamlIdp.new(Runtime::Scenario.gitlab_address, group_name).tap do |runner|
runner.pull
runner.register!
end
service
end
def remove_saml_idp_service(saml_idp_service)

View File

@ -1,13 +0,0 @@
# frozen_string_literal: true
module QA
module Scenario
module Test
module Integration
class ObjectStorage < Test::Instance::All
tags :object_storage
end
end
end
end
end

View File

@ -1,9 +0,0 @@
# frozen_string_literal: true
RSpec.describe QA::Scenario::Test::Integration::ObjectStorage do
describe '#perform' do
it_behaves_like 'a QA scenario class' do
let(:tags) { [:object_storage] }
end
end
end

View File

@ -43,7 +43,7 @@ module GitalyTest
end
def env
env_hash = {
{
'HOME' => File.expand_path('tmp/tests'),
'GEM_PATH' => Gem.path.join(':'),
'BUNDLE_APP_CONFIG' => File.join(gemfile_dir, '.bundle'),
@ -54,8 +54,6 @@ module GitalyTest
# Git hooks can't run during tests as the internal API is not running.
'GITALY_TESTING_NO_GIT_HOOKS' => "1"
}
env_hash
end
# rubocop:disable GitlabSecurity/SystemCommandInjection

View File

@ -10,16 +10,16 @@ RSpec.describe 'User activates JetBrains TeamCity CI' do
end
it 'activates service', :js do
visit_project_integration('JetBrains TeamCity CI')
visit_project_integration('JetBrains TeamCity')
check('Push')
check('Merge Request')
fill_in('TeamCity URL', with: 'http://teamcity.example.com')
fill_in('TeamCity server URL', with: 'http://teamcity.example.com')
fill_in('Build type', with: 'GitlabTest_Build')
fill_in('Username', with: 'user')
fill_in('Password', with: 'verySecret')
click_test_then_save_integration(expect_test_to_fail: false)
expect(page).to have_content('JetBrains TeamCity CI settings saved and active.')
expect(page).to have_content('JetBrains TeamCity settings saved and active.')
end
end

View File

@ -0,0 +1,155 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::Bullet::Exclusions do
let(:config_file) do
file = Tempfile.new('bullet.yml')
File.basename(file)
end
let(:exclude) { [] }
let(:config) do
{
exclusions: {
abc: {
merge_request: '_mr_',
path_with_method: true,
exclude: exclude
}
}
}
end
before do
File.write(config_file, config.deep_stringify_keys.to_yaml)
end
after do
FileUtils.rm_f(config_file)
end
describe '#execute' do
subject(:executor) { described_class.new(config_file).execute }
shared_examples_for 'loads exclusion results' do
let(:config) { { exclusions: { abc: { exclude: exclude } } } }
let(:results) { [exclude] }
specify do
expect(executor).to match(results)
end
end
context 'with preferred method of path and method name' do
it_behaves_like 'loads exclusion results' do
let(:exclude) { %w[_path_ _method_] }
end
end
context 'with file pattern' do
it_behaves_like 'loads exclusion results' do
let(:exclude) { ['_file_pattern_'] }
end
end
context 'with file name and line range' do
it_behaves_like 'loads exclusion results' do
let(:exclude) { ['file_name.rb', 5..10] }
end
end
context 'without exclusions' do
it_behaves_like 'loads exclusion results' do
let(:exclude) { [] }
end
end
context 'without exclusions key in config' do
it_behaves_like 'loads exclusion results' do
let(:config) { {} }
let(:results) { [] }
end
end
context 'when config file does not exist' do
it 'provides an empty array for exclusions' do
expect(described_class.new('_some_bogus_file_').execute).to match([])
end
end
end
describe '#validate_paths!' do
context 'when validating scenarios' do
let(:source_file) do
file = Tempfile.new('bullet_test_source_file.rb')
File.basename(file)
end
subject { described_class.new(config_file).validate_paths! }
before do
FileUtils.touch(source_file)
end
after do
FileUtils.rm_f(source_file)
end
context 'when using paths with method name' do
let(:exclude) { [source_file, '_method_'] }
context 'when source file for exclusion exists' do
specify do
expect { subject }.not_to raise_error
end
end
context 'when source file for exclusion does not exist' do
let(:exclude) { %w[_bogus_file_ _method_] }
specify do
expect { subject }.to raise_error(RuntimeError)
end
end
end
context 'when using path only' do
let(:exclude) { [source_file] }
context 'when source file for exclusion exists' do
specify do
expect { subject }.not_to raise_error
end
end
context 'when source file for exclusion does not exist' do
let(:exclude) { '_bogus_file_' }
specify do
expect { subject }.to raise_error(RuntimeError)
end
end
end
context 'when path_with_method is false for a file pattern' do
let(:exclude) { ['_file_pattern_'] }
let(:config) do
{
exclusions: {
abc: {
merge_request: '_mr_',
path_with_method: false,
exclude: exclude
}
}
}
end
specify do
expect { subject }.not_to raise_error
end
end
end
end
end

View File

@ -0,0 +1,51 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Bullet do
describe '#enabled?' do
it 'is enabled' do
stub_env('ENABLE_BULLET', true)
expect(described_class.enabled?).to be(true)
end
it 'is not enabled' do
stub_env('ENABLE_BULLET', nil)
expect(described_class.enabled?).to be(false)
end
it 'is correctly aliased for #extra_logging_enabled?' do
expect(described_class.method(:extra_logging_enabled?).original_name).to eq(:enabled?)
end
end
describe '#configure_bullet?' do
context 'with ENABLE_BULLET true' do
before do
stub_env('ENABLE_BULLET', true)
end
it 'is configurable' do
expect(described_class.configure_bullet?).to be(true)
end
end
context 'with ENABLE_BULLET falsey' do
before do
stub_env('ENABLE_BULLET', nil)
end
it 'is not configurable' do
expect(described_class.configure_bullet?).to be(false)
end
it 'is configurable in development' do
allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
expect(described_class.configure_bullet?).to be(true)
end
end
end
end

View File

@ -88,7 +88,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
expect(params[:body]).to match_schema('/external_validation')
expect(params[:timeout]).to eq(described_class::DEFAULT_VALIDATION_REQUEST_TIMEOUT)
expect(params[:headers]).to eq({ 'X-Request-ID' => 'correlation-id' })
expect(params[:headers]).to eq({ 'X-Gitlab-Correlation-id' => 'correlation-id' })
end
perform!

View File

@ -2016,9 +2016,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
project
projects.create!(namespace_id: namespace.id)
end
it 'generates iids properly for models created after the migration' do

View File

@ -10,9 +10,7 @@ RSpec.describe BackfillOperationsFeatureFlagsActive do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
project
projects.create!(namespace_id: namespace.id)
end
it 'executes successfully when there are no flags in the table' do

View File

@ -10,9 +10,7 @@ RSpec.describe BackfillOperationsFeatureFlagsIid do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
project
projects.create!(namespace_id: namespace.id)
end
it 'migrates successfully when there are no flags in the database' do

View File

@ -10,9 +10,7 @@ RSpec.describe DeleteInternalIdsWhereFeatureFlagsUsage do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
project
projects.create!(namespace_id: namespace.id)
end
it 'deletes feature flag rows from the internal_ids table' do

View File

@ -12,9 +12,7 @@ RSpec.describe MigrateOpsFeatureFlagsScopesTargetUserIds do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
flag = flags.create!(project_id: project.id, active: true, name: 'test_flag')
flag
flags.create!(project_id: project.id, active: true, name: 'test_flag')
end
it 'migrates successfully when there are no scopes in the database' do

View File

@ -5816,7 +5816,7 @@ RSpec.describe Project, factory_default: :keep do
services = subject.find_or_initialize_services
expect(services.size).to eq(2)
expect(services.map(&:title)).to eq(['JetBrains TeamCity CI', 'Pushover'])
expect(services.map(&:title)).to eq(['JetBrains TeamCity', 'Pushover'])
end
end
end

View File

@ -3,6 +3,9 @@
require 'spec_helper'
RSpec.describe Users::InProductMarketingEmail, type: :model do
let(:track) { :create }
let(:series) { 0 }
describe 'associations' do
it { is_expected.to belong_to(:user) }
end
@ -17,9 +20,6 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
end
describe '.without_track_and_series' do
let(:track) { :create }
let(:series) { 0 }
let_it_be(:user) { create(:user) }
subject(:without_track_and_series) { User.merge(described_class.without_track_and_series(track, series)) }
@ -57,4 +57,75 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
it { expect(without_track_and_series).to eq [@other_user] }
end
end
describe '.for_user_with_track_and_series' do
let_it_be(:user) { create(:user) }
let_it_be(:in_product_marketing_email) { create(:in_product_marketing_email, series: 0, track: 0, user: user) }
subject(:for_user_with_track_and_series) { described_class.for_user_with_track_and_series(user, track, series).first }
context 'when record for user with given track and series exists' do
it { is_expected.to eq(in_product_marketing_email) }
end
context 'when user is different' do
let(:user) { build_stubbed(:user) }
it { is_expected.to be_nil }
end
context 'when track is different' do
let(:track) { 1 }
it { is_expected.to be_nil }
end
context 'when series is different' do
let(:series) { 1 }
it { is_expected.to be_nil }
end
end
describe '.save_cta_click' do
let(:user) { create(:user) }
subject(:save_cta_click) { described_class.save_cta_click(user, track, series) }
context 'when there is no record' do
it 'does not error' do
expect { save_cta_click }.not_to raise_error
end
end
context 'when there is no record for the track and series' do
it 'does not perform an update' do
other_email = create(:in_product_marketing_email, user: user, track: :verify, series: 2, cta_clicked_at: nil)
expect { save_cta_click }.not_to change { other_email.reload }
end
end
context 'when there is a record for the track and series' do
it 'saves the cta click date' do
email = create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: nil)
freeze_time do
expect { save_cta_click }.to change { email.reload.cta_clicked_at }.from(nil).to(Time.zone.now)
end
end
context 'cta_clicked_at is already set' do
it 'does not update' do
create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: Time.zone.now)
expect_next_found_instance_of(described_class) do |record|
expect(record).not_to receive(:update)
end
save_cta_click
end
end
end
end
end

View File

@ -38,17 +38,43 @@ RSpec.describe Groups::EmailCampaignsController do
expect(subject).to have_gitlab_http_status(:redirect)
end
it 'emits a snowplow event', :snowplow do
subject
context 'on .com' do
before do
allow(Gitlab).to receive(:com?).and_return(true)
end
expect_snowplow_event(
category: described_class.name,
action: 'click',
context: [{
schema: described_class::EMAIL_CAMPAIGNS_SCHEMA_URL,
data: { namespace_id: group.id, series: series.to_i, subject_line: subject_line_text, track: track.to_s }
}]
)
it 'emits a snowplow event', :snowplow do
subject
expect_snowplow_event(
category: described_class.name,
action: 'click',
context: [{
schema: described_class::EMAIL_CAMPAIGNS_SCHEMA_URL,
data: { namespace_id: group.id, series: series.to_i, subject_line: subject_line_text, track: track.to_s }
}]
)
end
it 'does not save the cta_click' do
expect(Users::InProductMarketingEmail).not_to receive(:save_cta_click)
subject
end
end
context 'when not on.com' do
it 'saves the cta_click' do
expect(Users::InProductMarketingEmail).to receive(:save_cta_click)
subject
end
it 'does not track snowplow events' do
subject
expect_no_snowplow_event
end
end
end

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Packages::Debian::ProcessChangesService do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
let_it_be(:incoming) { create(:debian_incoming, project: distribution.project) }
let(:package_file) { incoming.package_files.last }
subject { described_class.new(package_file, user) }
context 'with valid package file' do
it 'updates package and package file', :aggregate_failures do
expect { subject.execute }
.to change { Packages::Package.count }.from(1).to(2)
.and not_change { Packages::PackageFile.count }
.and change { incoming.package_files.count }.from(7).to(0)
created_package = Packages::Package.last
expect(created_package.name).to eq 'sample'
expect(created_package.version).to eq '1.2.3~alpha2'
expect(created_package.creator).to eq user
end
end
context 'with invalid package file' do
let(:package_file) { incoming.package_files.first }
it 'raise ExtractionError', :aggregate_failures do
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
.and not_change { distribution.reload.needs_update? }
.and raise_error(Packages::Debian::ExtractChangesMetadataService::ExtractionError, 'is not a changes file')
end
end
context 'when creating package fails' do
before do
allow_next_instance_of(::Packages::Debian::FindOrCreatePackageService) do |find_or_create_package_service|
expect(find_or_create_package_service).to receive(:execute).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
end
end
it 'remove the package file', :aggregate_failures do
expect { subject.execute }
.to not_change { Packages::Package.count }
.and not_change { Packages::PackageFile.count }
.and not_change { incoming.package_files.count }
.and not_change { distribution.reload.needs_update? }
.and raise_error(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
end
end
end
end

View File

@ -3,37 +3,53 @@
module ActiveRecord
class QueryRecorder
attr_reader :log, :skip_cached, :cached, :data
UNKNOWN = %w(unknown unknown).freeze
def initialize(skip_cached: true, query_recorder_debug: false, &block)
@data = Hash.new { |h, k| h[k] = { count: 0, occurrences: [], backtrace: [] } }
UNKNOWN = %w[unknown unknown].freeze
def initialize(skip_cached: true, log_file: nil, query_recorder_debug: false, &block)
@data = Hash.new { |h, k| h[k] = { count: 0, occurrences: [], backtrace: [], durations: [] } }
@log = []
@cached = []
@skip_cached = skip_cached
@query_recorder_debug = query_recorder_debug
@query_recorder_debug = ENV['QUERY_RECORDER_DEBUG'] || query_recorder_debug
@log_file = log_file
# force replacement of bind parameters to give tests the ability to check for ids
ActiveRecord::Base.connection.unprepared_statement do
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
end
end
def show_backtrace(values)
Rails.logger.debug("QueryRecorder SQL: #{values[:sql]}")
def show_backtrace(values, duration)
values[:sql].lines.each do |line|
print_to_log(:SQL, line)
end
print_to_log(:DURATION, duration)
Gitlab::BacktraceCleaner.clean_backtrace(caller).each do |line|
Rails.logger.debug("QueryRecorder backtrace: --> #{line}")
print_to_log(:backtrace, line)
end
end
def print_to_log(label, line)
msg = "QueryRecorder #{label}: --> #{line}"
if @log_file
@log_file.puts(msg)
else
Rails.logger.debug(msg)
end
end
def get_sql_source(sql)
matches = sql.match(/,line:(?<line>.*):in\s+`(?<method>.*)'\*\//)
matches = sql.match(%r{,line:(?<line>.*):in\s+`(?<method>.*)'\*/})
matches ? [matches[:line], matches[:method]] : UNKNOWN
end
def store_sql_by_source(values: {}, backtrace: nil)
def store_sql_by_source(values: {}, duration: nil, backtrace: nil)
full_name = get_sql_source(values[:sql]).join(':')
@data[full_name][:count] += 1
@data[full_name][:occurrences] << values[:sql]
@data[full_name][:backtrace] << backtrace
@data[full_name][:durations] << duration
end
def find_query(query_regexp, limit, first_only: false)
@ -55,14 +71,14 @@ module ActiveRecord
end
def callback(name, start, finish, message_id, values)
store_backtrace = ENV['QUERY_RECORDER_DEBUG'] || @query_recorder_debug
backtrace = store_backtrace ? show_backtrace(values) : nil
duration = finish - start
if values[:cached] && skip_cached
@cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA")
backtrace = @query_recorder_debug ? show_backtrace(values, duration) : nil
@log << values[:sql]
store_sql_by_source(values: values, backtrace: backtrace)
store_sql_by_source(values: values, duration: duration, backtrace: backtrace)
end
end

View File

@ -11,6 +11,72 @@ RSpec.describe ActiveRecord::QueryRecorder do
end
end
describe 'printing to the log' do
let(:backtrace) { %r{QueryRecorder backtrace: --> (\w+/)*\w+\.rb:\d+:in `.*'} }
let(:duration_line) { %r{QueryRecorder DURATION: --> \d+\.\d+} }
def expect_section(query, lines)
query_lines = lines.take(query.size)
# the query comes first
expect(query_lines).to match(query)
# followed by the duration
expect(lines[query.size]).to match(duration_line)
# and then one or more lines of backtrace
backtrace_lines = lines.drop(query.size + 1).take_while { |line| line.match(backtrace) }
expect(backtrace_lines).not_to be_empty
# Advance to the next section
lines.drop(query.size + 1 + backtrace_lines.size)
end
it 'prints SQL, duration and backtrace, all prefixed with QueryRecorder', :aggregate_failures do
io = StringIO.new
control = ActiveRecord::QueryRecorder.new(log_file: io, query_recorder_debug: true) do
TestQueries.count
TestQueries.first
TestQueries.where(<<~FRAGMENT).to_a # tests multi-line SQL
version = 'foo'
OR
version = 'bar'
FRAGMENT
end
query_a = start_with(%q[QueryRecorder SQL: --> SELECT COUNT(*) FROM "schema_migrations"])
query_b = start_with(%q[QueryRecorder SQL: --> SELECT "schema_migrations".* FROM "schema_migrations" ORDER BY "schema_migrations"."version" ASC LIMIT 1])
query_c_a = eq(%q[QueryRecorder SQL: --> SELECT "schema_migrations".* FROM "schema_migrations" WHERE (version = 'foo'])
query_c_b = eq(%q(QueryRecorder SQL: --> OR))
query_c_c = eq(%q(QueryRecorder SQL: --> version = 'bar'))
query_c_d = start_with("QueryRecorder SQL: --> )")
expect(control.count).to eq(3)
lines = io.string.lines.map(&:chomp)
expect(lines).to all(start_with('QueryRecorder'))
lines = expect_section([query_a], lines)
lines = expect_section([query_b], lines)
lines = expect_section([query_c_a, query_c_b, query_c_c, query_c_d], lines)
expect(lines).to be_empty
end
end
it 'includes duration information' do
control = ActiveRecord::QueryRecorder.new do
TestQueries.count
TestQueries.first
end
expect(control.count).to eq(2)
expect(control.data.values.flat_map { _1[:durations] }).to match([be > 0, be > 0])
end
describe 'detecting the right number of calls and their origin' do
it 'detects two separate queries' do
control = ActiveRecord::QueryRecorder.new query_recorder_debug: true do
@ -23,10 +89,10 @@ RSpec.describe ActiveRecord::QueryRecorder do
.to eq(control.data.keys.size)
# Ensure exactly 2 COUNT queries were detected
expect(control.occurrences_by_line_method.last[1][:occurrences]
.find_all {|i| i.match(/SELECT COUNT/) }.count).to eq(2)
.count { |str| str.start_with?('SELECT COUNT') }).to eq(2)
# Ensure exactly 1 LIMIT 1 (#first)
expect(control.occurrences_by_line_method.first[1][:occurrences]
.find_all { |i| i.match(/ORDER BY.*#{TestQueries.table_name}.*LIMIT 1/) }.count).to eq(1)
.count { |str| str.match(/ORDER BY.*#{TestQueries.table_name}.*LIMIT 1/) }).to eq(1)
# Ensure 3 DB calls overall were executed
expect(control.log.size).to eq(3)