Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-01-23 12:07:23 +00:00
parent 5831f05b4c
commit 84b507d17b
74 changed files with 910 additions and 87 deletions

View File

@ -67,7 +67,6 @@ Layout/FirstHashElementIndentation:
- 'ee/spec/controllers/projects_controller_spec.rb' - 'ee/spec/controllers/projects_controller_spec.rb'
- 'ee/spec/elastic/migrate/migration_shared_examples.rb' - 'ee/spec/elastic/migrate/migration_shared_examples.rb'
- 'ee/spec/factories/dependencies.rb' - 'ee/spec/factories/dependencies.rb'
- 'ee/spec/factories/licenses.rb'
- 'ee/spec/finders/epics_finder_spec.rb' - 'ee/spec/finders/epics_finder_spec.rb'
- 'ee/spec/finders/namespaces/free_user_cap/users_finder_spec.rb' - 'ee/spec/finders/namespaces/free_user_cap/users_finder_spec.rb'
- 'ee/spec/frontend/fixtures/oncall_schedule.rb' - 'ee/spec/frontend/fixtures/oncall_schedule.rb'

View File

@ -520,14 +520,6 @@ RSpec/FeatureCategory:
- 'ee/spec/lib/banzai/reference_parser/epic_parser_spec.rb' - 'ee/spec/lib/banzai/reference_parser/epic_parser_spec.rb'
- 'ee/spec/lib/banzai/reference_parser/iteration_parser_spec.rb' - 'ee/spec/lib/banzai/reference_parser/iteration_parser_spec.rb'
- 'ee/spec/lib/banzai/reference_parser/vulnerability_parser_spec.rb' - 'ee/spec/lib/banzai/reference_parser/vulnerability_parser_spec.rb'
- 'ee/spec/lib/bulk_imports/common/pipelines/boards_pipeline_spec.rb'
- 'ee/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb'
- 'ee/spec/lib/bulk_imports/groups/graphql/get_iterations_query_spec.rb'
- 'ee/spec/lib/bulk_imports/groups/pipelines/epics_pipeline_spec.rb'
- 'ee/spec/lib/bulk_imports/groups/pipelines/iterations_pipeline_spec.rb'
- 'ee/spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
- 'ee/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb'
- 'ee/spec/lib/bulk_imports/projects/pipelines/push_rule_pipeline_spec.rb'
- 'ee/spec/lib/compliance_management/merge_request_approval_settings/resolver_spec.rb' - 'ee/spec/lib/compliance_management/merge_request_approval_settings/resolver_spec.rb'
- 'ee/spec/lib/container_registry/client_spec.rb' - 'ee/spec/lib/container_registry/client_spec.rb'
- 'ee/spec/lib/ee/api/entities/analytics/code_review/merge_request_spec.rb' - 'ee/spec/lib/ee/api/entities/analytics/code_review/merge_request_spec.rb'
@ -2614,9 +2606,6 @@ RSpec/FeatureCategory:
- 'spec/lib/bulk_imports/common/extractors/json_extractor_spec.rb' - 'spec/lib/bulk_imports/common/extractors/json_extractor_spec.rb'
- 'spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb' - 'spec/lib/bulk_imports/common/extractors/ndjson_extractor_spec.rb'
- 'spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb' - 'spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb'
- 'spec/lib/bulk_imports/common/pipelines/badges_pipeline_spec.rb'
- 'spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb'
- 'spec/lib/bulk_imports/common/pipelines/members_pipeline_spec.rb'
- 'spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb' - 'spec/lib/bulk_imports/common/rest/get_badges_query_spec.rb'
- 'spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb' - 'spec/lib/bulk_imports/common/transformers/prohibited_attributes_transformer_spec.rb'
- 'spec/lib/bulk_imports/file_downloads/filename_fetch_spec.rb' - 'spec/lib/bulk_imports/file_downloads/filename_fetch_spec.rb'
@ -2624,10 +2613,6 @@ RSpec/FeatureCategory:
- 'spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb' - 'spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb'
- 'spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb' - 'spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb'
- 'spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb' - 'spec/lib/bulk_imports/groups/graphql/get_projects_query_spec.rb'
- 'spec/lib/bulk_imports/groups/pipelines/group_attributes_pipeline_spec.rb'
- 'spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb'
- 'spec/lib/bulk_imports/groups/pipelines/namespace_settings_pipeline_spec.rb'
- 'spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb'
- 'spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb' - 'spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb'
- 'spec/lib/bulk_imports/pipeline/context_spec.rb' - 'spec/lib/bulk_imports/pipeline/context_spec.rb'
- 'spec/lib/bulk_imports/pipeline/extracted_data_spec.rb' - 'spec/lib/bulk_imports/pipeline/extracted_data_spec.rb'
@ -2635,19 +2620,6 @@ RSpec/FeatureCategory:
- 'spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb' - 'spec/lib/bulk_imports/projects/graphql/get_project_query_spec.rb'
- 'spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb' - 'spec/lib/bulk_imports/projects/graphql/get_repository_query_spec.rb'
- 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb' - 'spec/lib/bulk_imports/projects/graphql/get_snippet_repository_query_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/auto_devops_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/ci_pipelines_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/design_bundle_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/issues_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/project_feature_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/repository_bundle_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/service_desk_setting_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/snippets_pipeline_spec.rb'
- 'spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb'
- 'spec/lib/bulk_imports/retry_pipeline_error_spec.rb' - 'spec/lib/bulk_imports/retry_pipeline_error_spec.rb'
- 'spec/lib/bulk_imports/users_mapper_spec.rb' - 'spec/lib/bulk_imports/users_mapper_spec.rb'
- 'spec/lib/constraints/admin_constrainer_spec.rb' - 'spec/lib/constraints/admin_constrainer_spec.rb'

View File

@ -122,6 +122,12 @@ export const MERGE_REQUEST_METRICS = {
THROUGHPUT: MERGE_REQUEST_THROUGHPUT_TYPE, THROUGHPUT: MERGE_REQUEST_THROUGHPUT_TYPE,
}; };
export const CONTRIBUTOR_COUNT_TYPE = 'contributor_count';
export const CONTRIBUTOR_METRICS = {
COUNT: CONTRIBUTOR_COUNT_TYPE,
};
export const METRIC_TOOLTIPS = { export const METRIC_TOOLTIPS = {
[DORA_METRICS.DEPLOYMENT_FREQUENCY]: { [DORA_METRICS.DEPLOYMENT_FREQUENCY]: {
description: s__( description: s__(
@ -193,6 +199,15 @@ export const METRIC_TOOLTIPS = {
projectLink: '-/analytics/merge_request_analytics', projectLink: '-/analytics/merge_request_analytics',
docsLink: helpPagePath('user/analytics/merge_request_analytics'), docsLink: helpPagePath('user/analytics/merge_request_analytics'),
}, },
[CONTRIBUTOR_METRICS.COUNT]: {
description: s__(
'ValueStreamAnalytics|Number of monthly unique users with contributions in the group.',
),
groupLink: '-/contribution_analytics',
docsLink: helpPagePath('user/profile/contributions_calendar.html', {
anchor: 'user-contribution-events',
}),
},
[VULNERABILITY_METRICS.CRITICAL]: { [VULNERABILITY_METRICS.CRITICAL]: {
description: s__('ValueStreamAnalytics|Critical vulnerabilities over time.'), description: s__('ValueStreamAnalytics|Critical vulnerabilities over time.'),
groupLink: '-/security/vulnerabilities?severity=CRITICAL', groupLink: '-/security/vulnerabilities?severity=CRITICAL',

View File

@ -219,7 +219,7 @@ class GfmAutoComplete {
let tpl = '/${name} '; let tpl = '/${name} ';
let referencePrefix = null; let referencePrefix = null;
if (value.params.length > 0) { if (value.params.length > 0) {
const regexp = /\[[a-z]+:/; const regexp = /^<\[[a-z]+:/;
const match = regexp.exec(value.params); const match = regexp.exec(value.params);
if (match) { if (match) {
[referencePrefix] = match; [referencePrefix] = match;

View File

@ -207,6 +207,16 @@ class BulkImports::Entity < ApplicationRecord
@source_version ||= bulk_import.source_version_info @source_version ||= bulk_import.source_version_info
end end
def checksums
trackers.each_with_object({}) do |tracker, checksums|
next unless tracker.file_extraction_pipeline?
next if tracker.skipped?
next if tracker.checksums_empty?
checksums.merge!(tracker.checksums)
end
end
private private
def validate_parent_is_a_group def validate_parent_is_a_group

View File

@ -46,6 +46,10 @@ module BulkImports
status['batches'].find { |item| item['batch_number'] == batch_number } status['batches'].find { |item| item['batch_number'] == batch_number }
end end
def total_objects_count
status['total_objects_count']
end
private private
attr_reader :client, :entity, :relation, :pipeline_tracker attr_reader :client, :entity, :relation, :pipeline_tracker

View File

@ -86,5 +86,42 @@ class BulkImports::Tracker < ApplicationRecord
event :cleanup_stale do event :cleanup_stale do
transition [:created, :started] => :timeout transition [:created, :started] => :timeout
end end
after_transition any => [:finished, :failed] do |tracker|
BulkImports::ObjectCounter.persist!(tracker)
end
end
def checksums
return unless file_extraction_pipeline?
# Return cached counters until they expire
{ importing_relation => cached_checksums || persisted_checksums }
end
def checksums_empty?
return true unless checksums
sums = checksums[importing_relation]
sums[:source] == 0 && sums[:fetched] == 0 && sums[:imported] == 0
end
def importing_relation
pipeline_class.relation.to_sym
end
private
def cached_checksums
BulkImports::ObjectCounter.summary(self)
end
def persisted_checksums
{
source: source_objects_count,
fetched: fetched_objects_count,
imported: imported_objects_count
}
end end
end end

View File

@ -32,6 +32,7 @@ module TimeTrackable
@spent_at = options[:spent_at] @spent_at = options[:spent_at]
@summary = options[:summary] @summary = options[:summary]
@original_total_time_spent = nil @original_total_time_spent = nil
@category_id = category_id(options[:category])
return if @time_spent == 0 return if @time_spent == 0
@ -94,7 +95,8 @@ module TimeTrackable
note_id: @time_spent_note_id, note_id: @time_spent_note_id,
user: @time_spent_user, user: @time_spent_user,
spent_at: @spent_at, spent_at: @spent_at,
summary: @summary summary: @summary,
timelog_category_id: @category_id
) )
end end
# rubocop:enable Gitlab/ModuleWithInstanceVariables # rubocop:enable Gitlab/ModuleWithInstanceVariables
@ -119,4 +121,8 @@ module TimeTrackable
errors.add(:time_estimate, _('must have a valid format and be greater than or equal to zero.')) errors.add(:time_estimate, _('must have a valid format and be greater than or equal to zero.'))
end end
def category_id(category)
TimeTracking::TimelogCategory.find_by_name(project.root_namespace, category).first&.id
end
end end

View File

@ -82,7 +82,12 @@ module BulkImports
end end
def finish_export!(export) def finish_export!(export)
export.update!(status_event: 'finish', batched: false, error: nil) export.update!(
status_event: 'finish',
batched: false,
error: nil,
total_objects_count: export_service.exported_objects_count
)
end end
def exported_filepath def exported_filepath

View File

@ -25,7 +25,7 @@ Gitlab::HTTP_V2.configure do |config|
end end
end end
if Gitlab.config.gitlab['http_client'] if Gitlab.config.gitlab['http_client'].present?
pem = File.read(Gitlab.config.gitlab['http_client']['tls_client_cert_file']) pem = File.read(Gitlab.config.gitlab['http_client']['tls_client_cert_file'])
password = Gitlab.config.gitlab['http_client']['tls_client_cert_password'] password = Gitlab.config.gitlab['http_client']['tls_client_cert_password']

View File

@ -7,4 +7,12 @@ feature_categories:
description: Represents a Terraform state backend description: Represents a Terraform state backend
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26619 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/26619
milestone: '13.0' milestone: '13.0'
gitlab_schema: gitlab_main gitlab_schema: gitlab_main_cell
allow_cross_joins:
- gitlab_main_clusterwide
allow_cross_transactions:
- gitlab_main_clusterwide
allow_cross_foreign_keys:
- gitlab_main_clusterwide
sharding_key:
project_id: projects

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
class AddObjectCountFieldsToBulkImportTrackers < Gitlab::Database::Migration[2.2]
milestone '16.8'
def change
add_column :bulk_import_trackers, :source_objects_count, :bigint, null: false, default: 0
add_column :bulk_import_trackers, :fetched_objects_count, :bigint, null: false, default: 0
add_column :bulk_import_trackers, :imported_objects_count, :bigint, null: false, default: 0
end
end

View File

@ -0,0 +1 @@
01a7d610bdf3c5d8e5f98f2c479a7cf83b7591e791b6f8e18f836b6bf6f52833

View File

@ -13953,6 +13953,9 @@ CREATE TABLE bulk_import_trackers (
created_at timestamp with time zone, created_at timestamp with time zone,
updated_at timestamp with time zone, updated_at timestamp with time zone,
batched boolean DEFAULT false, batched boolean DEFAULT false,
source_objects_count bigint DEFAULT 0 NOT NULL,
fetched_objects_count bigint DEFAULT 0 NOT NULL,
imported_objects_count bigint DEFAULT 0 NOT NULL,
CONSTRAINT check_2d45cae629 CHECK ((char_length(relation) <= 255)), CONSTRAINT check_2d45cae629 CHECK ((char_length(relation) <= 255)),
CONSTRAINT check_40aeaa600b CHECK ((char_length(next_page) <= 255)), CONSTRAINT check_40aeaa600b CHECK ((char_length(next_page) <= 255)),
CONSTRAINT check_603f91cb06 CHECK ((char_length(jid) <= 255)), CONSTRAINT check_603f91cb06 CHECK ((char_length(jid) <= 255)),

View File

@ -152,7 +152,19 @@ curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
"project_id": null, "project_id": null,
"created_at": "2021-06-18T09:47:37.390Z", "created_at": "2021-06-18T09:47:37.390Z",
"updated_at": "2021-06-18T09:47:51.867Z", "updated_at": "2021-06-18T09:47:51.867Z",
"failures": [] "failures": [],
"stats": {
"labels": {
"source": 10,
"fetched": 10,
"imported": 10
},
"milestones": {
"source": 10,
"fetched": 10,
"imported": 10
}
}
}, },
{ {
"id": 2, "id": 2,
@ -233,7 +245,19 @@ curl --request GET --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab
"status": "finished", "status": "finished",
"source_type": "gitlab", "source_type": "gitlab",
"created_at": "2021-06-18T09:45:55.358Z", "created_at": "2021-06-18T09:45:55.358Z",
"updated_at": "2021-06-18T09:46:27.003Z" "updated_at": "2021-06-18T09:46:27.003Z",
"stats": {
"labels": {
"source": 10,
"fetched": 10,
"imported": 10
},
"milestones": {
"source": 10,
"fetched": 10,
"imported": 10
}
}
} }
] ]
``` ```

View File

@ -2,6 +2,7 @@
stage: core platform stage: core platform
group: Tenant Scale group: Tenant Scale
description: 'Cells: Routing Service' description: 'Cells: Routing Service'
status: accepted
--- ---
# Cells: Routing Service # Cells: Routing Service

View File

@ -35,6 +35,8 @@ The Value Streams Dashboard panels has a default configuration, but you can also
### DevSecOps metrics comparison panel ### DevSecOps metrics comparison panel
> Contributor count metric [added](https://gitlab.com/gitlab-org/gitlab/-/issues/433353) in GitLab 16.9.
The DevSecOps metrics comparison displays DORA4, vulnerability, and flow metrics for a group or project in the The DevSecOps metrics comparison displays DORA4, vulnerability, and flow metrics for a group or project in the
month-to-date, last month, the month before, and the past 180 days. month-to-date, last month, the month before, and the past 180 days.
@ -48,6 +50,9 @@ that are the largest value contributors, overperforming, or underperforming.
You can also drill down the metrics for further analysis. You can also drill down the metrics for further analysis.
When you hover over a metric, a tooltip displays an explanation of the metric and a link to the related documentation page. When you hover over a metric, a tooltip displays an explanation of the metric and a link to the related documentation page.
NOTE:
The contributor count metric is available only on GitLab.com at the group-level. To view this metric in the comparison panel, you must [set up ClickHouse](../../integration/clickhouse.md), and enable the [feature flags](../../administration/feature_flags.md) `clickhouse_data_collection` and `event_sync_worker_for_click_house`.
### DORA Performers score panel ### DORA Performers score panel
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/386843) in GitLab 16.2 [with a flag](../../administration/feature_flags.md) named `dora_performers_score_panel`. Disabled by default. > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/386843) in GitLab 16.2 [with a flag](../../administration/feature_flags.md) named `dora_performers_score_panel`. Disabled by default.
@ -242,6 +247,7 @@ If the comparison panel from the configuration file is enabled with `filter_labe
| Issues closed | Number of issues closed by month. | [Value Stream Analytics](https://gitlab.com/groups/gitlab-org/-/analytics/value_stream_analytics) | [Value Stream Analytics](../group/value_stream_analytics/index.md) | `issues_completed` | | Issues closed | Number of issues closed by month. | [Value Stream Analytics](https://gitlab.com/groups/gitlab-org/-/analytics/value_stream_analytics) | [Value Stream Analytics](../group/value_stream_analytics/index.md) | `issues_completed` |
| Number of deploys | Total number of deploys to production. | [Merge Request Analytics](https://gitlab.com/gitlab-org/gitlab/-/analytics/merge_request_analytics) | [Merge request analytics](merge_request_analytics.md) | `deploys` | | Number of deploys | Total number of deploys to production. | [Merge Request Analytics](https://gitlab.com/gitlab-org/gitlab/-/analytics/merge_request_analytics) | [Merge request analytics](merge_request_analytics.md) | `deploys` |
| Merge request throughput | The number of merge requests merged by month. | [Groups Productivity analytics](productivity_analytics.md), [Projects Merge Request Analytics](https://gitlab.com/gitlab-org/gitlab/-/analytics/merge_request_analytics) | [Groups Productivity analytics](productivity_analytics.md) [Projects Merge request analytics](merge_request_analytics.md) | `merge_request_throughput` | | Merge request throughput | The number of merge requests merged by month. | [Groups Productivity analytics](productivity_analytics.md), [Projects Merge Request Analytics](https://gitlab.com/gitlab-org/gitlab/-/analytics/merge_request_analytics) | [Groups Productivity analytics](productivity_analytics.md) [Projects Merge request analytics](merge_request_analytics.md) | `merge_request_throughput` |
| Contributor count | Number of monthly unique users with contributions in the group.| [Contribution Analytics](https://gitlab.com/groups/gitlab-org/-/contribution_analytics) | [User contribution events](../profile/contributions_calendar.md#user-contribution-events) | `contributor_count` |
| Critical vulnerabilities over time | Critical vulnerabilities over time in project or group | [Vulnerability report](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report) | [Vulnerability report](../application_security/vulnerability_report/index.md) | `vulnerability_critical` | | Critical vulnerabilities over time | Critical vulnerabilities over time in project or group | [Vulnerability report](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report) | [Vulnerability report](../application_security/vulnerability_report/index.md) | `vulnerability_critical` |
| High vulnerabilities over time | High vulnerabilities over time in project or group | [Vulnerability report](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report) | [Vulnerability report](../application_security/vulnerability_report/index.md) | `vulnerability_high` | | High vulnerabilities over time | High vulnerabilities over time in project or group | [Vulnerability report](https://gitlab.com/gitlab-org/gitlab/-/security/vulnerability_report) | [Vulnerability report](../application_security/vulnerability_report/index.md) | `vulnerability_high` |

View File

@ -1047,6 +1047,8 @@ the note content.
Regardless of the tag names, the relative order of the reference tags determines the rendered Regardless of the tag names, the relative order of the reference tags determines the rendered
numbering. numbering.
Regardless where you put the note, it's always shown at the bottom of the file.
<!-- <!--
The following codeblock uses HTML to skip the Vale ReferenceLinks test. The following codeblock uses HTML to skip the Vale ReferenceLinks test.
Do not change it back to a markdown codeblock. Do not change it back to a markdown codeblock.

View File

@ -78,7 +78,7 @@ If you use [Slack slash commands](slack_slash_commands.md) or
- Replace `/gitlab` with the trigger name you've configured for these integrations. - Replace `/gitlab` with the trigger name you've configured for these integrations.
- Remove `<project>`. - Remove `<project>`.
The following slash commands are available in GitLab: The following slash commands are available for GitLab:
| Command | Description | | Command | Description |
| ------- | ----------- | | ------- | ----------- |
@ -157,7 +157,7 @@ To receive notifications to a private Slack channel, you must add the GitLab for
### Notification events ### Notification events
The following GitLab events are available for Slack notifications: The following GitLab events can trigger notifications in Slack:
| Event | Description | | Event | Description |
|----------------------------------------------------------------------|---------------------------------------------------------------| |----------------------------------------------------------------------|---------------------------------------------------------------|

View File

@ -25,6 +25,7 @@ module API
expose :failures, using: EntityFailure, documentation: { is_array: true } expose :failures, using: EntityFailure, documentation: { is_array: true }
expose :migrate_projects, documentation: { type: 'boolean', example: true } expose :migrate_projects, documentation: { type: 'boolean', example: true }
expose :has_failures, documentation: { type: 'boolean', example: false } expose :has_failures, documentation: { type: 'boolean', example: false }
expose :checksums, as: :stats, documentation: { type: 'object' }
end end
end end
end end

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
module BulkImports
class ObjectCounter
SOURCE_COUNTER = :source
FETCHED_COUNTER = :fetched
IMPORTED_COUNTER = :imported
COUNTER_TYPES = [SOURCE_COUNTER, FETCHED_COUNTER, IMPORTED_COUNTER].freeze
CACHE_KEY = 'bulk_imports/object_counter/%{tracker_id}'
class << self
def increment(tracker, counter_type, value = 1)
return unless valid_input?(counter_type, value)
Gitlab::Cache::Import::Caching.hash_increment(counter_key(tracker), counter_type, value)
end
def set(tracker, counter_type, value = 1)
return unless valid_input?(counter_type, value)
Gitlab::Cache::Import::Caching.hash_add(counter_key(tracker), counter_type, value)
end
def summary(tracker)
object_counters = Gitlab::Cache::Import::Caching.values_from_hash(counter_key(tracker))
return unless object_counters.is_a?(Hash)
return if object_counters.empty?
empty_response.merge(object_counters.symbolize_keys.transform_values(&:to_i))
end
# Commits counters from redis to the database
def persist!(tracker)
counters = summary(tracker)
return unless counters
tracker.update!(
source_objects_count: counters[SOURCE_COUNTER],
fetched_objects_count: counters[FETCHED_COUNTER],
imported_objects_count: counters[IMPORTED_COUNTER]
)
end
private
def counter_key(tracker)
Kernel.format(CACHE_KEY, tracker_id: tracker.id)
end
def valid_input?(counter_type, value)
return false unless value.is_a?(Integer)
return false if value <= 0
return false unless COUNTER_TYPES.include?(counter_type)
true
end
def empty_response
{
SOURCE_COUNTER => 0,
FETCHED_COUNTER => 0,
IMPORTED_COUNTER => 0
}
end
end
end
end

View File

@ -12,6 +12,7 @@ module BulkImports
info(message: 'Pipeline started') info(message: 'Pipeline started')
set_source_objects_counter
extracted_data = extracted_data_from extracted_data = extracted_data_from
if extracted_data if extracted_data
@ -21,6 +22,8 @@ module BulkImports
raw_entry = entry.dup raw_entry = entry.dup
next if already_processed?(raw_entry, index) next if already_processed?(raw_entry, index)
increment_fetched_objects_counter
transformers.each do |transformer| transformers.each do |transformer|
entry = run_pipeline_step(:transformer, transformer.class.name) do entry = run_pipeline_step(:transformer, transformer.class.name) do
transformer.transform(context, entry) transformer.transform(context, entry)
@ -29,6 +32,8 @@ module BulkImports
run_pipeline_step(:loader, loader.class.name, entry) do run_pipeline_step(:loader, loader.class.name, entry) do
loader.load(context, entry) loader.load(context, entry)
increment_imported_objects_counter
end end
save_processed_entry(raw_entry, index) save_processed_entry(raw_entry, index)
@ -196,6 +201,21 @@ module BulkImports
context.entity.touch context.entity.touch
context.bulk_import.touch context.bulk_import.touch
end end
def set_source_objects_counter
# Export status is cached for 24h and read from Redis at this point
export_status = ExportStatus.new(tracker, tracker.importing_relation)
ObjectCounter.set(tracker, ObjectCounter::SOURCE_COUNTER, export_status.total_objects_count)
end
def increment_fetched_objects_counter
ObjectCounter.increment(tracker, ObjectCounter::FETCHED_COUNTER)
end
def increment_imported_objects_counter
ObjectCounter.increment(tracker, ObjectCounter::IMPORTED_COUNTER)
end
end end
end end
end end

View File

@ -0,0 +1,11 @@
# frozen_string_literal: true
module CloudConnector
module Config
extend self
def base_url
Gitlab.config.cloud_connector.base_url
end
end
end

View File

@ -239,6 +239,25 @@ module Gitlab
end end
end end
# Increments value of a field in a hash
#
# raw_key - The key of the hash to add to.
# field - The field to increment.
# value - The field value to add to the hash.
# timeout - The new timeout of the key.
def self.hash_increment(raw_key, field, value, timeout: TIMEOUT)
return if value.to_i <= 0
key = cache_key_for(raw_key)
with_redis do |redis|
redis.multi do |m|
m.hincrby(key, field, value.to_i)
m.expire(key, timeout)
end
end
end
# Adds a value to a list. # Adds a value to a list.
# #
# raw_key - The key of the list to add to. # raw_key - The key of the list to add to.

View File

@ -181,7 +181,14 @@ module Gitlab
spend_time_message(time_spent, time_spent_date, true) spend_time_message(time_spent, time_spent_date, true)
end end
params '<time(1h30m | -1h30m)> <date(YYYY-MM-DD)>' params do
base_params = 'time(1h30m | -1h30m) <date(YYYY-MM-DD)>'
if Feature.enabled?(:timelog_categories, quick_action_target.project)
"#{base_params} <[timecategory:category-name]>"
else
base_params
end
end
types Issue, MergeRequest types Issue, MergeRequest
condition do condition do
quick_action_target.supports_time_tracking? && quick_action_target.supports_time_tracking? &&
@ -190,12 +197,13 @@ module Gitlab
parse_params do |raw_time_date| parse_params do |raw_time_date|
Gitlab::QuickActions::SpendTimeAndDateSeparator.new(raw_time_date).execute Gitlab::QuickActions::SpendTimeAndDateSeparator.new(raw_time_date).execute
end end
command :spend, :spent, :spend_time do |time_spent, time_spent_date| command :spend, :spent, :spend_time do |time_spent, time_spent_date, category|
if time_spent if time_spent
@updates[:spend_time] = { @updates[:spend_time] = {
duration: time_spent, duration: time_spent,
user_id: current_user.id, user_id: current_user.id,
spent_at: time_spent_date spent_at: time_spent_date,
category: category
} }
end end
end end

View File

@ -12,6 +12,7 @@ module Gitlab
# in other cases return nil # in other cases return nil
class SpendTimeAndDateSeparator class SpendTimeAndDateSeparator
DATE_REGEX = %r{(\d{2,4}[/\-.]\d{1,2}[/\-.]\d{1,2})} DATE_REGEX = %r{(\d{2,4}[/\-.]\d{1,2}[/\-.]\d{1,2})}
CATEGORY_REGEX = %r{\[timecategory:(.*)\]}
def initialize(spend_command_arg) def initialize(spend_command_arg)
@spend_arg = spend_command_arg @spend_arg = spend_command_arg
@ -19,20 +20,23 @@ module Gitlab
def execute def execute
return if @spend_arg.blank? return if @spend_arg.blank?
return [get_time, DateTime.current] unless date_present? return if date_present? && !valid_date?
return unless valid_date?
[get_time, get_date] [time_spent, spent_at, category]
end end
private private
def get_time def time_spent
raw_time = @spend_arg.gsub(DATE_REGEX, '') raw_time = @spend_arg.gsub(DATE_REGEX, '')
raw_time = raw_time.gsub(CATEGORY_REGEX, '')
Gitlab::TimeTrackingFormatter.parse(raw_time) Gitlab::TimeTrackingFormatter.parse(raw_time)
end end
def get_date def spent_at
return DateTime.current unless date_present?
string_date = @spend_arg.match(DATE_REGEX)[0] string_date = @spend_arg.match(DATE_REGEX)[0]
Date.parse(string_date) Date.parse(string_date)
end end
@ -55,6 +59,12 @@ module Gitlab
def date_past_or_today?(date) def date_past_or_today?(date)
date&.past? || date&.today? date&.past? || date&.today?
end end
def category
return unless @spend_arg.match?(CATEGORY_REGEX)
@spend_arg.match(CATEGORY_REGEX)[1]
end
end end
end end
end end

View File

@ -15437,6 +15437,9 @@ msgstr ""
msgid "DORA4Metrics|Change failure rate (percentage)" msgid "DORA4Metrics|Change failure rate (percentage)"
msgstr "" msgstr ""
msgid "DORA4Metrics|Contributor count"
msgstr ""
msgid "DORA4Metrics|Critical Vulnerabilities over time" msgid "DORA4Metrics|Critical Vulnerabilities over time"
msgstr "" msgstr ""
@ -33505,6 +33508,9 @@ msgstr ""
msgid "ObservabilityMetrics|is not like" msgid "ObservabilityMetrics|is not like"
msgstr "" msgstr ""
msgid "ObservabilityMetrics|multiple"
msgstr ""
msgid "Observability|Enable" msgid "Observability|Enable"
msgstr "" msgstr ""
@ -53962,6 +53968,9 @@ msgstr ""
msgid "ValueStreamAnalytics|Number of issues closed by month." msgid "ValueStreamAnalytics|Number of issues closed by month."
msgstr "" msgstr ""
msgid "ValueStreamAnalytics|Number of monthly unique users with contributions in the group."
msgstr ""
msgid "ValueStreamAnalytics|Number of new issues created." msgid "ValueStreamAnalytics|Number of new issues created."
msgstr "" msgstr ""

View File

@ -9,6 +9,10 @@ FactoryBot.define do
sequence(:pipeline_name) { |n| "pipeline_name_#{n}" } sequence(:pipeline_name) { |n| "pipeline_name_#{n}" }
sequence(:jid) { |n| "bulk_import_entity_#{n}" } sequence(:jid) { |n| "bulk_import_entity_#{n}" }
source_objects_count { 1 }
fetched_objects_count { 1 }
imported_objects_count { 1 }
trait :started do trait :started do
status { 1 } status { 1 }
end end

View File

@ -23,7 +23,8 @@ RSpec.describe API::Entities::BulkImports::Entity, feature_category: :importers
:updated_at, :updated_at,
:failures, :failures,
:migrate_projects, :migrate_projects,
:has_failures :has_failures,
:stats
) )
end end
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
@ -21,6 +21,8 @@ RSpec.describe BulkImports::Common::Pipelines::BadgesPipeline do
allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(first_page, last_page) allow(extractor).to receive(:extract).and_return(first_page, last_page)
end end
allow(subject).to receive(:set_source_objects_counter)
end end
it 'imports a group badge' do it 'imports a group badge' do

View File

@ -42,6 +42,7 @@ RSpec.describe BulkImports::Common::Pipelines::BoardsPipeline, feature_category:
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: board_data)) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: board_data))
end end
allow(subject).to receive(:set_source_objects_counter)
group.add_owner(user) group.add_owner(user)
end end

View File

@ -24,6 +24,7 @@ RSpec.describe BulkImports::Common::Pipelines::LabelsPipeline, feature_category:
let(:tmpdir) { Dir.mktmpdir } let(:tmpdir) { Dir.mktmpdir }
before do before do
allow(subject).to receive(:set_source_objects_counter)
FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz')) FileUtils.copy_file(filepath, File.join(tmpdir, 'labels.ndjson.gz'))
group.add_owner(user) group.add_owner(user)
end end

View File

@ -23,6 +23,7 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline, feature_categ
File.write(lfs_json_file_path, { oid => [0, 1, 2, nil] }.to_json ) File.write(lfs_json_file_path, { oid => [0, 1, 2, nil] }.to_json )
allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
allow(pipeline).to receive(:set_source_objects_counter)
end end
after do after do

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Common::Pipelines::MembersPipeline do RSpec.describe BulkImports::Common::Pipelines::MembersPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) } let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:member_user1) { create(:user, email: 'email1@email.com') } let_it_be(:member_user1) { create(:user, email: 'email1@email.com') }
@ -25,6 +25,10 @@ RSpec.describe BulkImports::Common::Pipelines::MembersPipeline do
subject(:pipeline) { described_class.new(context) } subject(:pipeline) { described_class.new(context) }
before do
allow(pipeline).to receive(:set_source_objects_counter)
end
def extracted_data(email:, has_next_page: false) def extracted_data(email:, has_next_page: false)
data = { data = {
'created_at' => '2020-01-01T00:00:00Z', 'created_at' => '2020-01-01T00:00:00Z',

View File

@ -48,6 +48,8 @@ RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline, feature_categ
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: exported_milestones)) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: exported_milestones))
end end
allow(subject).to receive(:set_source_objects_counter)
end end
subject { described_class.new(context) } subject { described_class.new(context) }

View File

@ -19,6 +19,8 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline, feature_category
FileUtils.mkdir_p(uploads_dir_path) FileUtils.mkdir_p(uploads_dir_path)
FileUtils.touch(upload_file_path) FileUtils.touch(upload_file_path)
allow(pipeline).to receive(:set_source_objects_counter)
end end
after do after do

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupAttributesPipeline do RSpec.describe BulkImports::Groups::Pipelines::GroupAttributesPipeline, feature_category: :importers do
subject(:pipeline) { described_class.new(context) } subject(:pipeline) { described_class.new(context) }
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
@ -35,6 +35,8 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupAttributesPipeline do
BulkImports::Pipeline::ExtractedData.new(data: group_attributes) BulkImports::Pipeline::ExtractedData.new(data: group_attributes)
) )
end end
allow(pipeline).to receive(:set_source_objects_counter)
end end
it 'imports allowed group attributes' do it 'imports allowed group attributes' do

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline, feature_category: :importers do
describe '#run', :clean_gitlab_redis_cache do describe '#run', :clean_gitlab_redis_cache do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:parent) { create(:group) } let_it_be(:parent) { create(:group) }
@ -42,6 +42,8 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: group_data)) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: group_data))
end end
allow(subject).to receive(:set_source_objects_counter)
parent.add_owner(user) parent.add_owner(user)
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::NamespaceSettingsPipeline do RSpec.describe BulkImports::Groups::Pipelines::NamespaceSettingsPipeline, feature_category: :importers do
subject(:pipeline) { described_class.new(context) } subject(:pipeline) { described_class.new(context) }
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
@ -14,6 +14,8 @@ RSpec.describe BulkImports::Groups::Pipelines::NamespaceSettingsPipeline do
before do before do
group.add_owner(user) group.add_owner(user)
allow(pipeline).to receive(:set_source_objects_counter)
end end
describe '#run' do describe '#run' do

View File

@ -34,6 +34,8 @@ RSpec.describe BulkImports::Groups::Pipelines::ProjectEntitiesPipeline, feature_
allow(extractor).to receive(:extract).and_return(extracted_data) allow(extractor).to receive(:extract).and_return(extracted_data)
end end
allow(subject).to receive(:set_source_objects_counter)
destination_group.add_owner(user) destination_group.add_owner(user)
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, path: 'group') } let_it_be(:group) { create(:group, path: 'group') }
let_it_be(:parent) { create(:group, name: 'Imported Group', path: 'imported-group') } let_it_be(:parent) { create(:group, name: 'Imported Group', path: 'imported-group') }
@ -25,6 +25,8 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
allow(extractor).to receive(:extract).and_return(extracted_data) allow(extractor).to receive(:extract).and_return(extracted_data)
end end
allow(subject).to receive(:set_source_objects_counter)
parent.add_owner(user) parent.add_owner(user)
end end

View File

@ -0,0 +1,132 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ObjectCounter, :clean_gitlab_redis_cache, feature_category: :importers do
let_it_be(:tracker) { build(:bulk_import_tracker, id: non_existing_record_id) }
let_it_be(:cache_key) { "bulk_imports/object_counter/#{tracker.id}" }
describe '.increment' do
it 'increments counter by 1' do
expect(Gitlab::Cache::Import::Caching)
.to receive(:hash_increment)
.with(cache_key, described_class::SOURCE_COUNTER, 1)
described_class.increment(tracker, described_class::SOURCE_COUNTER)
end
it 'increments counter by given value' do
expect(Gitlab::Cache::Import::Caching)
.to receive(:hash_increment)
.with(cache_key, described_class::SOURCE_COUNTER, 10)
described_class.increment(tracker, described_class::SOURCE_COUNTER, 10)
end
context 'when value is not an integer' do
it 'does not increment counter' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:hash_increment)
described_class.increment(tracker, described_class::SOURCE_COUNTER, 'foo')
end
end
context 'when value is less than 1' do
it 'does not increment counter' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:hash_increment)
described_class.increment(tracker, described_class::SOURCE_COUNTER, 0)
end
end
context 'when counter type is invalid' do
it 'does not increment counter' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:hash_increment)
described_class.increment(tracker, 'foo')
end
end
end
describe '.set' do
it 'sets counter to given value' do
expect(Gitlab::Cache::Import::Caching).to receive(:hash_add).with(cache_key, described_class::SOURCE_COUNTER, 10)
described_class.set(tracker, described_class::SOURCE_COUNTER, 10)
end
context 'when value is not an integer' do
it 'does not set counter' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:hash_add)
described_class.set(tracker, described_class::SOURCE_COUNTER, 'foo')
end
end
context 'when value is less than 1' do
it 'does not set counter' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:hash_add)
described_class.set(tracker, described_class::SOURCE_COUNTER, 0)
end
end
context 'when counter type is invalid' do
it 'does not set counter' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:hash_add)
described_class.set(tracker, 'foo')
end
end
end
describe '.summary' do
it 'returns symbolized hash' do
expect(Gitlab::Cache::Import::Caching)
.to receive(:values_from_hash)
.with(cache_key).and_return({ 'source' => 10 })
expect(described_class.summary(tracker)).to eq(source: 10, fetched: 0, imported: 0)
end
context 'when hash is empty' do
it 'returns nil' do
expect(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).with(cache_key).and_return({})
expect(described_class.summary(tracker)).to be_nil
end
end
context 'when return value is not a hash' do
it 'returns nil' do
expect(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).with(cache_key).and_return('foo')
expect(described_class.summary(tracker)).to be_nil
end
end
end
describe '.persist!' do
it 'updates tracker with summary' do
tracker = create(
:bulk_import_tracker,
source_objects_count: 0,
fetched_objects_count: 0,
imported_objects_count: 0
)
expect(Gitlab::Cache::Import::Caching)
.to receive(:values_from_hash)
.with("bulk_imports/object_counter/#{tracker.id}")
.and_return('source' => 10, 'fetched' => 20, 'imported' => 30)
described_class.persist!(tracker)
tracker.reload
expect(tracker.source_objects_count).to eq(10)
expect(tracker.fetched_objects_count).to eq(20)
expect(tracker.imported_objects_count).to eq(30)
end
end
end

View File

@ -41,6 +41,12 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end end
stub_const('BulkImports::MyPipeline', pipeline) stub_const('BulkImports::MyPipeline', pipeline)
allow_next_instance_of(BulkImports::ExportStatus) do |export_status|
allow(export_status).to receive(:total_objects_count).and_return(1)
end
allow(tracker).to receive_message_chain(:pipeline_class, :relation).and_return('relation')
end end
let_it_be(:bulk_import) { create(:bulk_import) } let_it_be(:bulk_import) { create(:bulk_import) }
@ -433,6 +439,35 @@ RSpec.describe BulkImports::Pipeline::Runner, feature_category: :importers do
end end
end end
describe 'object counting' do
it 'increments object counters' do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
allow(extractor).to receive(:extract).with(context).and_return(extracted_data)
end
allow_next_instance_of(BulkImports::Transformer) do |transformer|
allow(transformer)
.to receive(:transform)
.with(context, extracted_data.data.first)
.and_return(extracted_data.data.first)
end
allow_next_instance_of(BulkImports::Loader) do |loader|
expect(loader).to receive(:load).with(context, extracted_data.data.first)
end
expect(BulkImports::ObjectCounter).to receive(:set).with(tracker, :source, 1)
expect(BulkImports::ObjectCounter).to receive(:increment).with(tracker, :fetched)
expect(BulkImports::ObjectCounter).to receive(:increment).with(tracker, :imported)
subject.run
expect(tracker.source_objects_count).to eq(1)
expect(tracker.fetched_objects_count).to eq(1)
expect(tracker.imported_objects_count).to eq(1)
end
end
def log_params(context, extra = {}) def log_params(context, extra = {})
{ {
bulk_import_id: context.bulk_import_id, bulk_import_id: context.bulk_import_id,

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) } let_it_be(:project) { create(:project, group: group) }
@ -41,6 +41,8 @@ RSpec.describe BulkImports::Projects::Pipelines::AutoDevopsPipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [auto_devops]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
expect(project.auto_devops.enabled).to be_truthy expect(project.auto_devops.enabled).to be_truthy

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) } let_it_be(:project) { create(:project, group: group) }
@ -43,6 +43,10 @@ RSpec.describe BulkImports::Projects::Pipelines::CiPipelinesPipeline do
subject(:pipeline) { described_class.new(context) } subject(:pipeline) { described_class.new(context) }
before do
allow(pipeline).to receive(:set_source_objects_counter)
end
describe '#run', :clean_gitlab_redis_cache do describe '#run', :clean_gitlab_redis_cache do
before do before do
group.add_owner(user) group.add_owner(user)

View File

@ -52,7 +52,7 @@ RSpec.describe BulkImports::Projects::Pipelines::CommitNotesPipeline, feature_ca
subject(:pipeline) { described_class.new(context) } subject(:pipeline) { described_class.new(context) }
describe '#run' do describe '#run' do
before do it 'imports ci pipeline notes into destination project' do
group.add_owner(user) group.add_owner(user)
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
@ -60,9 +60,9 @@ RSpec.describe BulkImports::Projects::Pipelines::CommitNotesPipeline, feature_ca
BulkImports::Pipeline::ExtractedData.new(data: [ci_pipeline_note]) BulkImports::Pipeline::ExtractedData.new(data: [ci_pipeline_note])
) )
end end
end
it 'imports ci pipeline notes into destination project' do allow(pipeline).to receive(:set_source_objects_counter)
expect { pipeline.run }.to change { project.notes.for_commit_id("sha-notes").count }.from(0).to(1) expect { pipeline.run }.to change { project.notes.for_commit_id("sha-notes").count }.from(0).to(1)
end end
end end

View File

@ -30,6 +30,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ContainerExpirationPolicyPipeli
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[policy, 0]]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
policy.each_pair do |key, value| policy.each_pair do |key, value|

View File

@ -22,6 +22,7 @@ RSpec.describe BulkImports::Projects::Pipelines::DesignBundlePipeline, feature_c
allow(portable).to receive(:lfs_enabled?).and_return(true) allow(portable).to receive(:lfs_enabled?).and_return(true)
allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
allow(pipeline).to receive(:set_source_objects_counter)
end end
after do after do

View File

@ -35,6 +35,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline, f
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[external_pull_request, 0]])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[external_pull_request, 0]]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) } let_it_be(:project) { create(:project, group: group) }
@ -45,6 +45,8 @@ RSpec.describe BulkImports::Projects::Pipelines::IssuesPipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [issue_with_index])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [issue_with_index]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
end end

View File

@ -101,6 +101,7 @@ RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline, feature_
allow(project.repository).to receive(:create_branch) allow(project.repository).to receive(:create_branch)
allow(::Projects::ImportExport::AfterImportMergeRequestsWorker).to receive(:perform_async) allow(::Projects::ImportExport::AfterImportMergeRequestsWorker).to receive(:perform_async)
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
end end

View File

@ -43,6 +43,8 @@ RSpec.describe BulkImports::Projects::Pipelines::PipelineSchedulesPipeline, :cle
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [schedule])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [schedule]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline, :with_license do RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline, :with_license, feature_category: :importers do
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import) } let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) } let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
@ -62,6 +62,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline, :wit
) )
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline do RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline, feature_category: :importers do
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) } let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
@ -35,6 +35,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectFeaturePipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[project_feature, 0]])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[project_feature, 0]]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
project_feature.each_pair do |key, value| project_feature.each_pair do |key, value|

View File

@ -36,6 +36,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ProjectPipeline, feature_catego
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data)) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data))
end end
allow(project_pipeline).to receive(:set_source_objects_counter)
group.add_owner(user) group.add_owner(user)
end end

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ProtectedBranchesPipeline do RSpec.describe BulkImports::Projects::Pipelines::ProtectedBranchesPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) } let_it_be(:bulk_import) { create(:bulk_import, user: user) }
@ -39,6 +39,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ProtectedBranchesPipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [protected_branch, 0])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [protected_branch, 0]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
imported_protected_branch = project.protected_branches.last imported_protected_branch = project.protected_branches.last

View File

@ -37,6 +37,10 @@ RSpec.describe BulkImports::Projects::Pipelines::ReferencesPipeline, feature_cat
subject(:pipeline) { described_class.new(context) } subject(:pipeline) { described_class.new(context) }
before do
allow(subject).to receive(:set_source_objects_counter)
end
describe '#run' do describe '#run' do
it "enqueues TransformReferencesWorker for the project's issues, mrs and their notes" do it "enqueues TransformReferencesWorker for the project's issues, mrs and their notes" do
expect(BulkImports::TransformReferencesWorker).to receive(:perform_in) expect(BulkImports::TransformReferencesWorker).to receive(:perform_in)

View File

@ -46,6 +46,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline, feature_categ
allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [with_index])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [with_index]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
end end
it 'imports release into destination project' do it 'imports release into destination project' do

View File

@ -21,6 +21,7 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryBundlePipeline, featu
source.repository.bundle_to_disk(bundle_path) source.repository.bundle_to_disk(bundle_path)
allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir)
allow(pipeline).to receive(:set_source_objects_counter)
end end
after do after do

View File

@ -32,6 +32,8 @@ RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline, feature_cat
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data) allow(extractor).to receive(:extract).and_return(extracted_data)
end end
allow(pipeline).to receive(:set_source_objects_counter)
end end
describe '#run' do describe '#run' do

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline, feature_category: :importers do
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) } let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project) }
let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
@ -17,6 +17,8 @@ RSpec.describe BulkImports::Projects::Pipelines::ServiceDeskSettingPipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[setting, 0]]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
setting.each_pair do |key, value| setting.each_pair do |key, value|

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline, feature_category: :importers do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) } let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) } let_it_be(:project) { create(:project, group: group) }
@ -49,6 +49,8 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsPipeline do
allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [snippet_with_index])) allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [snippet_with_index]))
end end
allow(pipeline).to receive(:set_source_objects_counter)
pipeline.run pipeline.run
end end

View File

@ -45,6 +45,10 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline, fea
let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) } let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info) }
before do
allow(pipeline).to receive(:set_source_objects_counter)
end
describe 'extractor' do describe 'extractor' do
it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do it 'is a GraphqlExtractor with Graphql::GetSnippetRepositoryQuery' do
expect(described_class.get_extractor).to eq( expect(described_class.get_extractor).to eq(

View File

@ -171,6 +171,40 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache, :clean
end end
end end
describe '.hash_increment' do
it 'increments a value in a hash' do
described_class.hash_increment('foo', 'field', 1)
described_class.hash_increment('foo', 'field', 5)
key = described_class.cache_key_for('foo')
values = Gitlab::Redis::Cache.with { |r| r.hgetall(key) }
expect(values).to eq({ 'field' => '6' })
end
context 'when the value is not an integer' do
it 'returns' do
described_class.hash_increment('another-foo', 'another-field', 'not-an-integer')
key = described_class.cache_key_for('foo')
values = Gitlab::Redis::Cache.with { |r| r.hgetall(key) }
expect(values).to eq({})
end
end
context 'when the value is less than 0' do
it 'returns' do
described_class.hash_increment('another-foo', 'another-field', -5)
key = described_class.cache_key_for('foo')
values = Gitlab::Redis::Cache.with { |r| r.hgetall(key) }
expect(values).to eq({})
end
end
end
describe '.write_multiple' do describe '.write_multiple' do
it 'sets multiple keys when key_prefix not set' do it 'sets multiple keys when key_prefix not set' do
mapping = { 'foo' => 10, 'bar' => 20 } mapping = { 'foo' => 10, 'bar' => 20 }

View File

@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator, feature_category: :team_planning do
subject { described_class } subject { described_class }
shared_examples 'arg line with invalid parameters' do shared_examples 'arg line with invalid parameters' do
@ -51,23 +51,38 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
end end
end end
context 'only time present in arg line' do context 'time present in arg line' do
it_behaves_like 'arg line with valid parameters' do let(:time_part) { '2m 3m 5m 1h' }
let(:valid_arg) { '2m 3m 5m 1h' } let(:valid_arg) { time_part }
let(:time) { Gitlab::TimeTrackingFormatter.parse(valid_arg) } let(:time) { Gitlab::TimeTrackingFormatter.parse(time_part) }
let(:date) { DateTime.current } let(:date) { DateTime.current }
let(:expected_response) { [time, date] } let(:expected_response) { [time, date, nil] }
it_behaves_like 'arg line with valid parameters'
context 'timecategory present in arg line' do
let(:valid_arg) { "#{time_part} [timecategory:dev]" }
let(:expected_response) { [time, date, 'dev'] }
it_behaves_like 'arg line with valid parameters'
end end
end end
context 'simple time with date in arg line' do context 'simple time with date in arg line' do
it_behaves_like 'arg line with valid parameters' do
let(:raw_time) { '10m' } let(:raw_time) { '10m' }
let(:raw_date) { '2016-02-02' } let(:raw_date) { '2016-02-02' }
let(:valid_arg) { "#{raw_time} #{raw_date}" } let(:valid_arg) { "#{raw_time} #{raw_date}" }
let(:date) { Date.parse(raw_date) } let(:date) { Date.parse(raw_date) }
let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) } let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) }
let(:expected_response) { [time, date] } let(:expected_response) { [time, date, nil] }
it_behaves_like 'arg line with valid parameters'
context 'timecategory present in arg line' do
let(:valid_arg) { "#{raw_time} #{raw_date} [timecategory:support]" }
let(:expected_response) { [time, date, 'support'] }
it_behaves_like 'arg line with valid parameters'
end end
end end
@ -78,7 +93,7 @@ RSpec.describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
let(:valid_arg) { "#{raw_time} #{raw_date}" } let(:valid_arg) { "#{raw_time} #{raw_date}" }
let(:date) { Date.parse(raw_date) } let(:date) { Date.parse(raw_date) }
let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) } let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) }
let(:expected_response) { [time, date] } let(:expected_response) { [time, date, nil] }
end end
end end
end end

View File

@ -487,4 +487,71 @@ RSpec.describe BulkImports::Entity, type: :model, feature_category: :importers d
expect(subject.source_version).to eq(subject.bulk_import.source_version_info) expect(subject.source_version).to eq(subject.bulk_import.source_version_info)
end end
end end
describe '#checksums' do
let(:entity) { create(:bulk_import_entity) }
it 'returns checksums for all imported relations' do
create(:bulk_import_tracker,
entity: entity,
relation: 'BulkImports::Common::Pipelines::MilestonesPipeline',
source_objects_count: 7,
fetched_objects_count: 6,
imported_objects_count: 5
)
create(:bulk_import_tracker,
entity: entity,
relation: 'BulkImports::Common::Pipelines::LabelsPipeline',
source_objects_count: 10,
fetched_objects_count: 9,
imported_objects_count: 8
)
expect(entity.checksums).to eq(
{
milestones: {
source: 7,
fetched: 6,
imported: 5
},
labels: {
source: 10,
fetched: 9,
imported: 8
}
}
)
end
context 'when tracker should not be included' do
let(:tracker) { create(:bulk_import_tracker, entity: entity, relation: 'BulkImports::Common::Pipelines::MilestonesPipeline') }
context 'when tracker is for a file extraction pipeline' do
it 'does not include the tracker' do
expect(entity.checksums).to eq({})
end
end
context 'when tracker is skipped' do
it 'does not include the tracker' do
tracker.skip!
expect(entity.checksums).to eq({})
end
end
context 'when tracker checksums are zeros' do
it 'does not include the tracker' do
tracker.update!(
source_objects_count: 0,
fetched_objects_count: 0,
imported_objects_count: 0
)
expect(entity.checksums).to eq({})
end
end
end
end
end end

View File

@ -389,4 +389,12 @@ RSpec.describe BulkImports::ExportStatus, :clean_gitlab_redis_cache, feature_cat
end end
end end
end end
describe '#total_objects_count' do
let(:status) { BulkImports::Export::FINISHED }
it 'returns total objects count' do
expect(subject.total_objects_count).to eq(1)
end
end
end end

View File

@ -110,4 +110,100 @@ RSpec.describe BulkImports::Tracker, type: :model, feature_category: :importers
end end
end end
end end
describe '#checksums' do
let(:tracker) { create(:bulk_import_tracker) }
let(:checksums) { { source: 1, fetched: 1, imported: 1 } }
before do
allow(tracker).to receive(:file_extraction_pipeline?).and_return(true)
allow(tracker).to receive_message_chain(:pipeline_class, :relation, :to_sym).and_return(:labels)
end
context 'when checksums are cached' do
it 'returns the cached checksums' do
allow(BulkImports::ObjectCounter).to receive(:summary).and_return(checksums)
expect(tracker.checksums).to eq({ labels: checksums })
end
end
context 'when checksums are persisted' do
it 'returns the persisted checksums' do
allow(BulkImports::ObjectCounter).to receive(:summary).and_return(nil)
tracker.update!(
source_objects_count: checksums[:source],
fetched_objects_count: checksums[:fetched],
imported_objects_count: checksums[:imported]
)
expect(tracker.checksums).to eq({ labels: checksums })
end
end
context 'when pipeline is not a file extraction pipeline' do
it 'returns nil' do
allow(tracker).to receive(:file_extraction_pipeline?).and_return(false)
expect(tracker.checksums).to be_nil
end
end
end
describe '#checksums_empty?' do
let(:tracker) { create(:bulk_import_tracker) }
before do
allow(tracker).to receive_message_chain(:pipeline_class, :relation, :to_sym).and_return(:labels)
end
context 'when checksums are missing' do
it 'returns true' do
allow(tracker).to receive(:checksums).and_return(nil)
expect(tracker.checksums_empty?).to eq(true)
end
end
context 'when checksums are present' do
it 'returns false' do
allow(tracker)
.to receive(:checksums)
.and_return({ labels: { source: 1, fetched: 1, imported: 1 } })
expect(tracker.checksums_empty?).to eq(false)
end
end
context 'when checksums are all zeros' do
it 'returns true' do
allow(tracker)
.to receive(:checksums)
.and_return({ labels: { source: 0, fetched: 0, imported: 0 } })
expect(tracker.checksums_empty?).to eq(true)
end
end
end
describe 'checksums persistence' do
let(:tracker) { create(:bulk_import_tracker, :started) }
context 'when transitioned to finished' do
it 'persists the checksums' do
expect(BulkImports::ObjectCounter).to receive(:persist!).with(tracker)
tracker.finish!
end
end
context 'when transitioned to failed' do
it 'persists the checksums' do
expect(BulkImports::ObjectCounter).to receive(:persist!).with(tracker)
tracker.fail_op!
end
end
end
end end

View File

@ -10,6 +10,38 @@ RSpec.describe API::BulkImports, feature_category: :importers do
let_it_be(:entity_2) { create(:bulk_import_entity, bulk_import: import_1) } let_it_be(:entity_2) { create(:bulk_import_entity, bulk_import: import_1) }
let_it_be(:entity_3) { create(:bulk_import_entity, bulk_import: import_2) } let_it_be(:entity_3) { create(:bulk_import_entity, bulk_import: import_2) }
let_it_be(:failure_3) { create(:bulk_import_failure, entity: entity_3) } let_it_be(:failure_3) { create(:bulk_import_failure, entity: entity_3) }
let_it_be(:tracker_1) do
create(
:bulk_import_tracker,
entity: entity_1,
relation: 'BulkImports::Common::Pipelines::LabelsPipeline',
source_objects_count: 3,
fetched_objects_count: 2,
imported_objects_count: 1
)
end
let_it_be(:tracker_2) do
create(
:bulk_import_tracker,
entity: entity_2,
relation: 'BulkImports::Common::Pipelines::MilestonesPipeline',
source_objects_count: 5,
fetched_objects_count: 4,
imported_objects_count: 3
)
end
let_it_be(:tracker_3) do
create(
:bulk_import_tracker,
entity: entity_3,
relation: 'BulkImports::Common::Pipelines::BoardsPipeline',
source_objects_count: 10,
fetched_objects_count: 9,
imported_objects_count: 8
)
end
before do before do
stub_application_setting(bulk_import_enabled: true) stub_application_setting(bulk_import_enabled: true)
@ -370,6 +402,16 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id) expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id)
end end
it 'includes entity stats' do
request
expect(json_response.pluck('stats')).to contain_exactly(
{ 'labels' => { 'source' => 3, 'fetched' => 2, 'imported' => 1 } },
{ 'milestones' => { 'source' => 5, 'fetched' => 4, 'imported' => 3 } },
{ 'boards' => { 'source' => 10, 'fetched' => 9, 'imported' => 8 } }
)
end
it_behaves_like 'disabled feature' it_behaves_like 'disabled feature'
end end
@ -397,6 +439,14 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response.first['failures'].first['exception_message']).to eq(failure_3.exception_message) expect(json_response.first['failures'].first['exception_message']).to eq(failure_3.exception_message)
end end
it 'includes entity stats' do
request
expect(json_response.pluck('stats')).to contain_exactly(
{ 'boards' => { 'source' => 10, 'fetched' => 9, 'imported' => 8 } }
)
end
it_behaves_like 'disabled feature' it_behaves_like 'disabled feature'
end end
@ -410,6 +460,12 @@ RSpec.describe API::BulkImports, feature_category: :importers do
expect(json_response['id']).to eq(entity_2.id) expect(json_response['id']).to eq(entity_2.id)
end end
it 'includes entity stats' do
request
expect(json_response['stats']).to eq({ 'milestones' => { 'source' => 5, 'fetched' => 4, 'imported' => 3 } })
end
it_behaves_like 'disabled feature' it_behaves_like 'disabled feature'
end end

View File

@ -40,7 +40,7 @@ RSpec.describe BulkImports::RelationExportService, feature_category: :importers
expect(export.batched?).to eq(false) expect(export.batched?).to eq(false)
expect(export.batches_count).to eq(0) expect(export.batches_count).to eq(0)
expect(export.batches.count).to eq(0) expect(export.batches.count).to eq(0)
expect(export.total_objects_count).to eq(0) expect(export.total_objects_count).to eq(1)
end end
it 'removes temp export files' do it 'removes temp export files' do

View File

@ -105,6 +105,19 @@ RSpec.describe Notes::QuickActionsService, feature_category: :team_planning do
end end
end end
context 'with a timecategory' do
let!(:timelog_category) { create(:timelog_category, name: 'bob', namespace: project.root_namespace) }
let(:note_text) { "a note \n/spend 1h [timecategory:bob]" }
it 'sets the category of the new timelog' do
new_content, update_params = service.execute(note)
note.update!(note: new_content)
service.apply_updates(update_params, note)
expect(Timelog.last.timelog_category_id).to eq(timelog_category.id)
end
end
context 'adds a system note' do context 'adds a system note' do
context 'when not specifying a date' do context 'when not specifying a date' do
let(:note_text) { "/spend 1h" } let(:note_text) { "/spend 1h" }

View File

@ -3,6 +3,15 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_projects do RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, :crm_enabled) }
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:owner) { create(:user) }
let_it_be(:issue) { create(:issue, project: project, title: 'Issue 1') }
before_all do
project.add_owner(owner)
end
describe '#issues' do describe '#issues' do
describe 'confidential issues' do describe 'confidential issues' do
let(:author) { create(:user) } let(:author) { create(:user) }
@ -10,8 +19,6 @@ RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_proj
let(:non_member) { create(:user) } let(:non_member) { create(:user) }
let(:member) { create(:user) } let(:member) { create(:user) }
let(:admin) { create(:admin) } let(:admin) { create(:admin) }
let(:project) { create(:project, :public) }
let!(:issue) { create(:issue, project: project, title: 'Issue 1') }
let!(:security_issue_1) { create(:issue, :confidential, project: project, title: 'Security issue 1', author: author) } let!(:security_issue_1) { create(:issue, :confidential, project: project, title: 'Security issue 1', author: author) }
let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignees: [assignee]) } let!(:security_issue_2) { create(:issue, :confidential, title: 'Security issue 2', project: project, assignees: [assignee]) }
@ -107,8 +114,6 @@ RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_proj
describe '#milestones' do describe '#milestones' do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, group: group) }
let!(:group_milestone1) { create(:milestone, group: group, due_date: '2017-01-01', title: 'Second Title') } let!(:group_milestone1) { create(:milestone, group: group, due_date: '2017-01-01', title: 'Second Title') }
let!(:group_milestone2) { create(:milestone, group: group, due_date: '2017-01-01', title: 'First Title') } let!(:group_milestone2) { create(:milestone, group: group, due_date: '2017-01-01', title: 'First Title') }
let!(:project_milestone) { create(:milestone, project: project, due_date: '2016-01-01') } let!(:project_milestone) { create(:milestone, project: project, due_date: '2016-01-01') }
@ -150,8 +155,6 @@ RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_proj
describe '#contacts' do describe '#contacts' do
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :crm_enabled) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:contact_1) { create(:contact, group: group) } let_it_be(:contact_1) { create(:contact, group: group) }
let_it_be(:contact_2) { create(:contact, group: group) } let_it_be(:contact_2) { create(:contact, group: group) }
let_it_be(:contact_3) { create(:contact, :inactive, group: group) } let_it_be(:contact_3) { create(:contact, :inactive, group: group) }
@ -252,4 +255,30 @@ RSpec.describe Projects::AutocompleteService, feature_category: :groups_and_proj
end end
end end
end end
describe '#commands' do
subject(:commands) { described_class.new(project, owner).commands(issue) }
context 'spend' do
it 'params include timecategory' do
expect(commands).to include(a_hash_including(
name: :spend,
params: ['time(1h30m | -1h30m) <date(YYYY-MM-DD)> <[timecategory:category-name]>']
))
end
context 'when timelog_category_quick_action feature flag is disabled' do
before do
stub_feature_flags(timelog_categories: false)
end
it 'params do not include timecategory' do
expect(commands).to include(a_hash_including(
name: :spend,
params: ['time(1h30m | -1h30m) <date(YYYY-MM-DD)>']
))
end
end
end
end
end end

View File

@ -384,6 +384,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
_, updates, _ = service.execute(content, issuable) _, updates, _ = service.execute(content, issuable)
expect(updates).to eq(spend_time: { expect(updates).to eq(spend_time: {
category: nil,
duration: 3600, duration: 3600,
user_id: developer.id, user_id: developer.id,
spent_at: DateTime.current spent_at: DateTime.current
@ -398,6 +399,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
_, updates, _ = service.execute(content, issuable) _, updates, _ = service.execute(content, issuable)
expect(updates).to eq(spend_time: { expect(updates).to eq(spend_time: {
category: nil,
duration: -7200, duration: -7200,
user_id: developer.id, user_id: developer.id,
spent_at: DateTime.current spent_at: DateTime.current
@ -417,6 +419,7 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
_, updates, _ = service.execute(content, issuable) _, updates, _ = service.execute(content, issuable)
expect(updates).to eq(spend_time: { expect(updates).to eq(spend_time: {
category: nil,
duration: 1800, duration: 1800,
user_id: developer.id, user_id: developer.id,
spent_at: Date.parse(date) spent_at: Date.parse(date)
@ -440,6 +443,14 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
end end
end end
shared_examples 'spend command with category' do
it 'populates spend_time with expected attributes' do
_, updates, _ = service.execute(content, issuable)
expect(updates).to match(spend_time: a_hash_including(category: 'pm'))
end
end
shared_examples 'remove_estimate command' do shared_examples 'remove_estimate command' do
it 'populates time_estimate: 0 if content contains /remove_estimate' do it 'populates time_estimate: 0 if content contains /remove_estimate' do
_, updates, _ = service.execute(content, issuable) _, updates, _ = service.execute(content, issuable)
@ -1544,6 +1555,11 @@ RSpec.describe QuickActions::InterpretService, feature_category: :team_planning
let(:issuable) { issue } let(:issuable) { issue }
end end
it_behaves_like 'spend command with category' do
let(:content) { '/spent 30m [timecategory:pm]' }
let(:issuable) { issue }
end
it_behaves_like 'failed command' do it_behaves_like 'failed command' do
let(:content) { '/spend' } let(:content) { '/spend' }
let(:issuable) { issue } let(:issuable) { issue }

View File

@ -16,6 +16,8 @@ RSpec.shared_examples 'wiki pipeline imports a wiki for an entity' do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor).to receive(:extract).and_return(extracted_data) allow(extractor).to receive(:extract).and_return(extracted_data)
end end
allow(subject).to receive(:set_source_objects_counter)
end end
context 'when wiki exists' do context 'when wiki exists' do