Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-12-03 21:34:01 +00:00
parent 0eded9955f
commit bf865669fc
107 changed files with 2590 additions and 1492 deletions

View File

@ -442,9 +442,9 @@ jest vue3 check quarantined:
expire_in: 31d
when: always
paths:
- jest_stdout
- jest_stderr
- junit_jest.xml
- tmp/tests/frontend/jest_stdout
- tmp/tests/frontend/jest_stderr
- tmp/tests/frontend/jest_results.json
jest-with-fixtures vue3 check quarantined:
extends:
@ -461,9 +461,9 @@ jest-with-fixtures vue3 check quarantined:
expire_in: 31d
when: always
paths:
- jest_stdout
- jest_stderr
- junit_jest.xml
- tmp/tests/frontend/jest_stdout
- tmp/tests/frontend/jest_stderr
- tmp/tests/frontend/jest_results.json
jest-integration:
extends:

View File

@ -1,7 +1,7 @@
**Please note:** if the incident relates to sensitive data or is security-related, consider
labeling this issue with ~security and mark it confidential, or create it in a private repository.
There is now a separate internal-only RCA template for SIRT issues referenced https://about.gitlab.com/handbook/security/root-cause-analysis.html
There is now a separate internal-only RCA template for SIRT issues referenced https://handbook.gitlab.com/handbook/security/root-cause-analysis/
***
## Summary

View File

@ -21,7 +21,7 @@ export default {
return this.member.user?.lastActivityOn;
},
accessGranted() {
return this.member.requestAcceptedAt || this.member.createdAt;
return this.member.inviteAcceptedAt || this.member.requestAcceptedAt || this.member.createdAt;
},
},
};
@ -47,7 +47,7 @@ export default {
name="check"
:title="s__('Members|Access granted')"
/>
<user-date :date="accessGranted" />
<user-date data-testid="access-granted-date" :date="accessGranted" />
</div>
<div v-if="lastActivity" class="gl-flex gl-gap-3">
<gl-icon

View File

@ -76,7 +76,7 @@ export default {
return { fullPath: this.currentPath };
},
skip() {
return !this.isAsyncSidebarCountsFlagEnabled || !this.currentPath;
return !this.currentPath;
},
update(data) {
return data?.namespace?.sidebar ?? {};
@ -87,9 +87,6 @@ export default {
},
},
computed: {
isAsyncSidebarCountsFlagEnabled() {
return this.glFeatures.asyncSidebarCounts;
},
// Returns the list of items that we want to have static at the top.
// Only sidebars that support pins also support a static section.
staticItems() {

View File

@ -22,7 +22,7 @@ export default {
</h1>
<div
v-if="$scopedSlots.actions"
class="page-heading-actions gl-flex gl-w-full gl-shrink-0 gl-flex-wrap gl-items-start gl-gap-3 sm:gl-w-auto md:gl-mt-1 lg:gl-mt-2"
class="page-heading-actions gl-flex gl-w-full gl-shrink-0 gl-flex-wrap gl-items-center gl-gap-3 sm:gl-w-auto md:gl-mt-1 lg:gl-mt-2"
data-testid="page-heading-actions"
>
<slot name="actions"></slot>

View File

@ -4,7 +4,7 @@
= heading || @heading
- if actions?
.page-heading-actions.gl-self-start.md:gl-mt-1.lg:gl-mt-2.gl-flex.gl-flex-wrap.gl-items-start.gl-gap-3.gl-w-full.sm:gl-w-auto.gl-shrink-0{ data: { testid: 'page-heading-actions' } }
.page-heading-actions.gl-self-start.md:gl-mt-1.lg:gl-mt-2.gl-flex.gl-flex-wrap.gl-items-center.gl-gap-3.gl-w-full.sm:gl-w-auto.gl-shrink-0{ data: { testid: 'page-heading-actions' } }
= actions
- if description? || @description

View File

@ -15,7 +15,6 @@ class Groups::ApplicationController < ApplicationController
before_action do
push_namespace_setting(:math_rendering_limits_enabled, @group)
push_frontend_feature_flag(:async_sidebar_counts, @group&.root_ancestor)
end
private

View File

@ -12,7 +12,6 @@ class Projects::ApplicationController < ApplicationController
before_action do
push_namespace_setting(:math_rendering_limits_enabled, @project&.parent)
push_frontend_feature_flag(:async_sidebar_counts, @project&.root_ancestor)
end
helper_method :repository, :can_collaborate_with_project?, :user_access

View File

@ -277,13 +277,13 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
if can_current_user_push_to_default_branch?
new_file_path = empty_repo? ? ide_edit_path(project, default_branch_or_main) : project_new_blob_path(project, default_branch_or_main)
AnchorData.new(false, statistic_icon('plus', '!gl-text-blue-500 gl-mr-3') + _('New file'), new_file_path)
AnchorData.new(false, statistic_icon('plus', 'gl-mr-3') + _('New file'), new_file_path)
end
end
def readme_anchor_data
if can_current_user_push_to_default_branch? && readme_path.nil?
icon = statistic_icon('plus', '!gl-text-blue-500 gl-mr-3')
icon = statistic_icon('plus', 'gl-mr-3')
label = icon + _('Add README')
AnchorData.new(false, label, empty_repo? ? add_readme_ide_path : add_readme_path)
elsif readme_path
@ -329,7 +329,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
'license'
)
elsif can_current_user_push_to_default_branch?
icon = statistic_icon('plus', '!gl-text-blue-500 gl-mr-3')
icon = statistic_icon('plus', 'gl-mr-3')
label = icon + _('Add LICENSE')
AnchorData.new(
false,
@ -341,7 +341,7 @@ class ProjectPresenter < Gitlab::View::Presenter::Delegated
def contribution_guide_anchor_data
if can_current_user_push_to_default_branch? && repository.contribution_guide.blank?
icon = statistic_icon('plus', '!gl-text-blue-500 gl-mr-3')
icon = statistic_icon('plus', 'gl-mr-3')
label = icon + _('Add CONTRIBUTING')
AnchorData.new(
false,

View File

@ -11,6 +11,7 @@ class MemberEntity < Grape::Entity
end
expose :requested_at
expose :request_accepted_at
expose :invite_accepted_at
expose :created_by,
if: ->(member) { member.created_by.present? && member.is_source_accessible_to_current_user } do |member|

View File

@ -31,6 +31,21 @@
"joining_project": {
"description": "Setting to understand if a user is joining a project or not during onboarding",
"type": "boolean"
},
"role": {
"description": "User persona collected during onboarding",
"type": "integer",
"enum": [
0,
1,
2,
3,
4,
5,
6,
7,
8
]
}
},
"additionalProperties": false

View File

@ -1,9 +1,9 @@
---
name: async_sidebar_counts
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/498901
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/171405
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/502206
milestone: '17.6'
group: group::project management
name: bitbucket_server_user_mapping
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/466356
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/165855
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/tbd
milestone: '17.7'
group: group::import and integrate
type: wip
default_enabled: false

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillIssuableMetricImagesNamespaceId
description: Backfills sharding key `issuable_metric_images.namespace_id` from `issues`.
feature_category: observability
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174508
milestone: '17.7'
queued_migration_version: 20241203081756
finalized_by: # version of the migration that finalized this BBM

View File

@ -0,0 +1,8 @@
---
migration_job_name: BackfillResourceLinkEventsNamespaceId
description: Backfills sharding key `resource_link_events.namespace_id` from `issues`.
feature_category: team_planning
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/174402
milestone: '17.7'
queued_migration_version: 20241202141411
finalized_by: # version of the migration that finalized this BBM

View File

@ -18,3 +18,4 @@ desired_sharding_key:
sharding_key: namespace_id
belongs_to: issue
table_size: small
desired_sharding_key_migration_job_name: BackfillIssuableMetricImagesNamespaceId

View File

@ -18,3 +18,4 @@ desired_sharding_key:
sharding_key: namespace_id
belongs_to: issue
table_size: small
desired_sharding_key_migration_job_name: BackfillResourceLinkEventsNamespaceId

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddNamespaceIdToResourceLinkEvents < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
add_column :resource_link_events, :namespace_id, :bigint
end
end

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddNamespaceIdToIssuableMetricImages < Gitlab::Database::Migration[2.2]
milestone '17.7'
def change
add_column :issuable_metric_images, :namespace_id, :bigint
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexResourceLinkEventsOnNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
INDEX_NAME = 'index_resource_link_events_on_namespace_id'
def up
add_concurrent_index :resource_link_events, :namespace_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :resource_link_events, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddResourceLinkEventsNamespaceIdFk < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :resource_link_events, :namespaces, column: :namespace_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :resource_link_events, column: :namespace_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddResourceLinkEventsNamespaceIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.7'
def up
install_sharding_key_assignment_trigger(
table: :resource_link_events,
sharding_key: :namespace_id,
parent_table: :issues,
parent_sharding_key: :namespace_id,
foreign_key: :issue_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :resource_link_events,
sharding_key: :namespace_id,
parent_table: :issues,
parent_sharding_key: :namespace_id,
foreign_key: :issue_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillResourceLinkEventsNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillResourceLinkEventsNamespaceId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:resource_link_events,
:id,
:namespace_id,
:issues,
:namespace_id,
:issue_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:resource_link_events,
:id,
[
:namespace_id,
:issues,
:namespace_id,
:issue_id
]
)
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class IndexIssuableMetricImagesOnNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
INDEX_NAME = 'index_issuable_metric_images_on_namespace_id'
def up
add_concurrent_index :issuable_metric_images, :namespace_id, name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :issuable_metric_images, INDEX_NAME
end
end

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class AddIssuableMetricImagesNamespaceIdFk < Gitlab::Database::Migration[2.2]
milestone '17.7'
disable_ddl_transaction!
def up
add_concurrent_foreign_key :issuable_metric_images, :namespaces, column: :namespace_id, on_delete: :cascade
end
def down
with_lock_retries do
remove_foreign_key :issuable_metric_images, column: :namespace_id
end
end
end

View File

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AddIssuableMetricImagesNamespaceIdTrigger < Gitlab::Database::Migration[2.2]
milestone '17.7'
def up
install_sharding_key_assignment_trigger(
table: :issuable_metric_images,
sharding_key: :namespace_id,
parent_table: :issues,
parent_sharding_key: :namespace_id,
foreign_key: :issue_id
)
end
def down
remove_sharding_key_assignment_trigger(
table: :issuable_metric_images,
sharding_key: :namespace_id,
parent_table: :issues,
parent_sharding_key: :namespace_id,
foreign_key: :issue_id
)
end
end

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class QueueBackfillIssuableMetricImagesNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.7'
restrict_gitlab_migration gitlab_schema: :gitlab_main_cell
MIGRATION = "BackfillIssuableMetricImagesNamespaceId"
DELAY_INTERVAL = 2.minutes
BATCH_SIZE = 1000
SUB_BATCH_SIZE = 100
def up
queue_batched_background_migration(
MIGRATION,
:issuable_metric_images,
:id,
:namespace_id,
:issues,
:namespace_id,
:issue_id,
job_interval: DELAY_INTERVAL,
batch_size: BATCH_SIZE,
sub_batch_size: SUB_BATCH_SIZE
)
end
def down
delete_batched_background_migration(
MIGRATION,
:issuable_metric_images,
:id,
[
:namespace_id,
:issues,
:namespace_id,
:issue_id
]
)
end
end

View File

@ -0,0 +1 @@
78389292d70f52ad0b6103481ffeced191010541190736788dc0f806a4617a8f

View File

@ -0,0 +1 @@
f0cafbe8bad0b7c86865306e63d50a7a3a23b226c9f8dd2da78a0ae4b54d3886

View File

@ -0,0 +1 @@
e218d674abaee1771822d3eccb3489f6c73472bb25611fe071223f3ecfc3c8a8

View File

@ -0,0 +1 @@
9d71690c631de041db2899ed82263340819e76304a4e60aa3891547eb93ae3dc

View File

@ -0,0 +1 @@
8c61555eeea37bc1c64d81b2281322a64ca258e0da94fd9caaebeb30ebbbc61b

View File

@ -0,0 +1 @@
d9484a87ee12cd6b97fcf0b2fc62adf2efe4f4516ecccdb776f08f107cd66093

View File

@ -0,0 +1 @@
4a0ec8fcc840c8d90ef3f7e327738492a62c328dc3292c1cce5de03443c46b44

View File

@ -0,0 +1 @@
64ce1b126cea404c01011731b2c4acd410df9c5a28e3fb7abc03fb593b5f14fd

View File

@ -0,0 +1 @@
55aa6dcb29a41b91c03618002adb45d17980283295f118b0730ebe13deddefd3

View File

@ -0,0 +1 @@
936d67bdefcf3bffec6fddbb9ce6bee50723d47b89f5da1cd053e937147e3151

View File

@ -1988,6 +1988,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_7943cb549289() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."namespace_id"
FROM "issues"
WHERE "issues"."id" = NEW."issue_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_7a8b08eed782() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -2833,6 +2849,22 @@ RETURN NEW;
END
$$;
CREATE FUNCTION trigger_e815625b59fa() RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW."namespace_id" IS NULL THEN
SELECT "namespace_id"
INTO NEW."namespace_id"
FROM "issues"
WHERE "issues"."id" = NEW."issue_id";
END IF;
RETURN NEW;
END
$$;
CREATE FUNCTION trigger_ebab34f83f1d() RETURNS trigger
LANGUAGE plpgsql
AS $$
@ -13564,6 +13596,7 @@ CREATE TABLE issuable_metric_images (
file text NOT NULL,
url text,
url_text text,
namespace_id bigint,
CONSTRAINT check_3bc6d47661 CHECK ((char_length(url_text) <= 128)),
CONSTRAINT check_5b3011e234 CHECK ((char_length(url) <= 255)),
CONSTRAINT check_7ed527062f CHECK ((char_length(file) <= 255))
@ -19004,7 +19037,8 @@ CREATE TABLE resource_link_events (
issue_id bigint NOT NULL,
child_work_item_id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
system_note_metadata_id bigint
system_note_metadata_id bigint,
namespace_id bigint
);
CREATE SEQUENCE resource_link_events_id_seq
@ -30710,6 +30744,8 @@ CREATE INDEX index_ip_restrictions_on_group_id ON ip_restrictions USING btree (g
CREATE INDEX index_issuable_metric_images_on_issue_id ON issuable_metric_images USING btree (issue_id);
CREATE INDEX index_issuable_metric_images_on_namespace_id ON issuable_metric_images USING btree (namespace_id);
CREATE INDEX index_issuable_resource_links_on_issue_id ON issuable_resource_links USING btree (issue_id);
CREATE INDEX index_issuable_resource_links_on_namespace_id ON issuable_resource_links USING btree (namespace_id);
@ -32168,6 +32204,8 @@ CREATE INDEX index_resource_link_events_on_child_work_item_id ON resource_link_e
CREATE INDEX index_resource_link_events_on_issue_id ON resource_link_events USING btree (issue_id);
CREATE INDEX index_resource_link_events_on_namespace_id ON resource_link_events USING btree (namespace_id);
CREATE INDEX index_resource_link_events_on_user_id ON resource_link_events USING btree (user_id);
CREATE INDEX index_resource_milestone_events_created_at ON resource_milestone_events USING btree (created_at);
@ -35450,6 +35488,8 @@ CREATE TRIGGER trigger_740afa9807b8 BEFORE INSERT OR UPDATE ON subscription_user
CREATE TRIGGER trigger_77d9fbad5b12 BEFORE INSERT OR UPDATE ON packages_debian_project_distribution_keys FOR EACH ROW EXECUTE FUNCTION trigger_77d9fbad5b12();
CREATE TRIGGER trigger_7943cb549289 BEFORE INSERT OR UPDATE ON issuable_metric_images FOR EACH ROW EXECUTE FUNCTION trigger_7943cb549289();
CREATE TRIGGER trigger_7a8b08eed782 BEFORE INSERT OR UPDATE ON boards_epic_board_positions FOR EACH ROW EXECUTE FUNCTION trigger_7a8b08eed782();
CREATE TRIGGER trigger_7de792ddbc05 BEFORE INSERT OR UPDATE ON dast_site_validations FOR EACH ROW EXECUTE FUNCTION trigger_7de792ddbc05();
@ -35560,6 +35600,8 @@ CREATE TRIGGER trigger_e1da4a738230 BEFORE INSERT OR UPDATE ON vulnerability_ext
CREATE TRIGGER trigger_e49ab4d904a0 BEFORE INSERT OR UPDATE ON vulnerability_finding_links FOR EACH ROW EXECUTE FUNCTION trigger_e49ab4d904a0();
CREATE TRIGGER trigger_e815625b59fa BEFORE INSERT OR UPDATE ON resource_link_events FOR EACH ROW EXECUTE FUNCTION trigger_e815625b59fa();
CREATE TRIGGER trigger_ebab34f83f1d BEFORE INSERT OR UPDATE ON packages_debian_publications FOR EACH ROW EXECUTE FUNCTION trigger_ebab34f83f1d();
CREATE TRIGGER trigger_ec1934755627 BEFORE INSERT OR UPDATE ON alert_management_alert_metric_images FOR EACH ROW EXECUTE FUNCTION trigger_ec1934755627();
@ -36361,6 +36403,9 @@ ALTER TABLE ONLY cluster_agent_tokens
ALTER TABLE ONLY protected_tag_create_access_levels
ADD CONSTRAINT fk_7537413f9d FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
ALTER TABLE ONLY resource_link_events
ADD CONSTRAINT fk_75961aea6b FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY environments
ADD CONSTRAINT fk_75c2098045 FOREIGN KEY (cluster_agent_id) REFERENCES cluster_agents(id) ON DELETE SET NULL;
@ -36682,6 +36727,9 @@ ALTER TABLE ONLY abuse_report_user_mentions
ALTER TABLE ONLY security_orchestration_policy_configurations
ADD CONSTRAINT fk_a50430b375 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY issuable_metric_images
ADD CONSTRAINT fk_a53e03ca65 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY operations_strategies
ADD CONSTRAINT fk_a542e10c31 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;

View File

@ -214,6 +214,8 @@ To avoid having a breaking change affect your integrations, you should:
For more information, see [Deprecating GitLab features](../../development/deprecation_guidelines/index.md).
For GitLab self-managed instances, [downgrading](../../downgrade_ee_to_ce/index.md) from an EE instance to CE causes breaking changes.
### Breaking change exemptions
Schema items labeled as experiments in the [GraphQL API reference](reference/index.md)

View File

@ -461,3 +461,5 @@ notice:
- Elements labeled in the [REST API resources](../api_resources.md) as
[experimental or beta](../../policy/development_stages_support.md).
- Fields behind a feature flag and disabled by default.
For GitLab self-managed instances, [downgrading](../../downgrade_ee_to_ce/index.md) from an EE instance to CE causes breaking changes.

View File

@ -11,7 +11,7 @@ When adding new columns to store strings or other textual information:
1. We always use the `text` data type instead of the `string` data type.
1. `text` columns should always have a limit set, either by using the `create_table` with
the `#text ... limit: 100` helper (see below) when creating a table, or by using the `add_text_limit`
when altering an existing table.
when altering an existing table. Without a limit, the longest possible [character string is about 1 GB](https://www.postgresql.org/docs/current/datatype-character.html).
The standard Rails `text` column type cannot be defined with a limit, but we extend `create_table` to
add a `limit: 255` option. Outside of `create_table`, `add_text_limit` can be used to add a [check constraint](https://www.postgresql.org/docs/11/ddl-constraints.html)

View File

@ -17,6 +17,8 @@ type: reference, howto
- [GraphQL Schema](enabling_the_analyzer.md#graphql-schema)
- [HTTP Archive (HAR)](enabling_the_analyzer.md#http-archive-har)
- [Postman Collection v2.0 or v2.1](enabling_the_analyzer.md#postman-collection)
Each scan supports exactly one specification. To scan more than one specification, use multiple scans.
- [GitLab Runner](../../../../ci/runners/index.md) available, with the
[`docker` executor](https://docs.gitlab.com/runner/executors/docker.html) on Linux/amd64.
- Target application deployed. For more details, read [Deployment options](#application-deployment-options).

View File

@ -1001,6 +1001,32 @@ the `secret-detection` job on.
The GitLab pipeline secret detection analyzer [only supports](#enable-the-analyzer) running on the `amd64` CPU architecture.
This message indicates that the job is being run on a different architecture, such as `arm`.
#### Error: `fatal: detected dubious ownership in repository at '/builds/<project dir>'`
Secret detection might fail with an exit status of 128. This can be caused by a change to the user on the Docker image.
For example:
```shell
$ /analyzer run
[INFO] [secrets] [2024-06-06T07:28:13Z] ▶ GitLab secrets analyzer v6.0.1
[INFO] [secrets] [2024-06-06T07:28:13Z] ▶ Detecting project
[INFO] [secrets] [2024-06-06T07:28:13Z] ▶ Analyzer will attempt to analyze all projects in the repository
[INFO] [secrets] [2024-06-06T07:28:13Z] ▶ Loading ruleset for /builds....
[WARN] [secrets] [2024-06-06T07:28:13Z] ▶ /builds/....secret-detection-ruleset.toml not found, ruleset support will be disabled.
[INFO] [secrets] [2024-06-06T07:28:13Z] ▶ Running analyzer
[FATA] [secrets] [2024-06-06T07:28:13Z] ▶ get commit count: exit status 128
```
To work around this issue, add a `before_script` with the following:
```yaml
before_script:
- git config --global --add safe.directory "$CI_PROJECT_DIR"
```
For more information about this issue, see [issue 465974](https://gitlab.com/gitlab-org/gitlab/-/issues/465974).
## Warnings
### Responding to a leaked secret

View File

@ -58,9 +58,9 @@ GitLab in a Kubernetes cluster, you might need a different version of Kubernetes
You can upgrade your
Kubernetes version to a supported version at any time:
- 1.31 (support ends when GitLab version 18.7 is released or when 1.34 becomes supported)
- 1.30 (support ends when GitLab version 18.2 is released or when 1.33 becomes supported)
- 1.29 (support ends when GitLab version 17.10 is released or when 1.32 becomes supported)
- 1.28 (support ends when GitLab version 17.5 is released or when 1.31 becomes supported)
GitLab aims to support a new minor Kubernetes version three months after its initial release. GitLab supports at least three production-ready Kubernetes minor
versions at any given time.

View File

@ -166,6 +166,40 @@ container_scanning:
repository: "your-custom-registry/your-image-path"
```
## Configure scan timeout
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/497460) in GitLab 17.7.
By default, the Trivy scan times out after five minutes. The agent itself provides an extra 15 minutes to read the chained configmaps and transmit the vulnerabilities.
To customize the Trivy timeout duration:
- Specify the duration in seconds with the `scanner_timeout` field.
For example:
```yaml
container_scanning:
scanner_timeout: "3600s" # 60 minutes
```
## Configure Trivy report size
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/497460) in GitLab 17.7.
By default, the Trivy report is limited to 100 MB, which is sufficient for most scans. However, if you have a lot of workloads, you might need to increase the limit.
To do this:
- Specify the limit in bytes with the `report_max_size` field.
For example:
```yaml
container_scanning:
report_max_size: "300000000" # 300MB
```
## View cluster vulnerabilities
To view vulnerability information in GitLab:
@ -200,4 +234,17 @@ In GitLab agent 16.9 and later, operational container scanning:
### `Error running Trivy scan. Container terminated reason: OOMKilled`
OCS might fail with an OOM error if there are too many resources to be scanned or if the images being scanned are large.
To resolve this, [configure the resource requirement](#configure-scanner-resource-requirements) to increase the amount of memory available.
### `Error running Trivy scan due to context timeout`
OCS might fail to complete a scan if it takes Trivy too long to complete the scan. The default scan timeout is 5 minutes, with an extra 15 minutes for the agent to read the results and transmit the vulnerabilities.
To resolve this, [configure the scanner timeout](#configure-scan-timeout) to increase the amount of memory available.
### `trivy report size limit exceeded`
OCS might fail with this error if the generated Trivy report size is larger than the default maximum limit.
To resolve this, [configure the max Trivy report size](#configure-trivy-report-size) to increase the maximum allowed size of the Trivy report.

View File

@ -13,13 +13,13 @@ It can help you troubleshoot your pipeline, write tests, address vulnerabilities
## Step 1: Ensure you have a subscription
Your organization has purchased a GitLab Duo add-on subscription: Either Duo Pro or Duo Enterprise.
Your organization has purchased a GitLab Duo add-on subscription: either Duo Pro or Duo Enterprise.
Each subscription includes a set of AI-powered features to help improve your workflow.
After your organization purchases a subscription, an administrator must assign seats to users.
You likely received an email that notified you of your seat.
The AI-powered features you have access to use large language models (LLMs) to help streamline
The AI-powered features you have access to use language models to help streamline
your workflow. If you're on self-managed GitLab, your administrator can choose to use
GitLab models, or self-host their own models.
@ -30,7 +30,7 @@ For more information, see:
- [Assign seats to users](../../subscriptions/subscription-add-ons.md#assign-gitlab-duo-seats).
- [Features included in Duo Pro and Duo Enterprise](https://about.gitlab.com/gitlab-duo/#pricing).
- [List of GitLab Duo features and their LLMs](../gitlab_duo/index.md).
- [List of GitLab Duo features and their language models](../gitlab_duo/index.md).
- [Self-hosted models](../../administration/self_hosted_models/index.md).
- [Health check details](../gitlab_duo/turn_on_off.md#run-a-health-check-for-gitlab-duo).

View File

@ -129,11 +129,12 @@ For use cases and best practices, follow the [GitLab Duo examples documentation]
## Open tabs as context
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/464767) in GitLab 17.2 [with a flag](../../../../administration/feature_flags.md) named `advanced_context_resolver`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/462750) in GitLab 17.2 [with a flag](../../../../administration/feature_flags.md) named `code_suggestions_context`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/464767) in GitLab 17.1 [with a flag](../../../../administration/feature_flags.md) named `advanced_context_resolver`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/462750) in GitLab 17.1 [with a flag](../../../../administration/feature_flags.md) named `code_suggestions_context`. Disabled by default.
> - [Introduced](https://gitlab.com/gitlab-org/editor-extensions/gitlab-lsp/-/issues/276) in GitLab Workflow for VS Code 4.20.0.
> - [Introduced](https://gitlab.com/gitlab-org/editor-extensions/gitlab-jetbrains-plugin/-/issues/462) in GitLab Duo for JetBrains 2.7.0.
> - [Added](https://gitlab.com/gitlab-org/editor-extensions/gitlab.vim/-/merge_requests/152) to the GitLab Neovim plugin on July 16, 2024.
> - Feature flags `advanced_context_resolver` and `code_suggestions_context` enabled on GitLab.com in GitLab 17.2.
> - Feature flags `advanced_context_resolver` and `code_suggestions_context` [enabled on self-managed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/161538) in GitLab 17.4.
FLAG:

View File

@ -356,7 +356,8 @@ module API
mount ::API::Users
mount ::API::UserCounts
mount ::API::UserRunners
mount ::API::VirtualRegistries::Packages::Maven
mount ::API::VirtualRegistries::Packages::Maven::Registries
mount ::API::VirtualRegistries::Packages::Maven::Endpoints
mount ::API::WebCommits
mount ::API::Wikis

View File

@ -1,115 +0,0 @@
# frozen_string_literal: true
module API
module Concerns
module VirtualRegistries
module Packages
module Maven
module RegistryEndpoints
extend ActiveSupport::Concern
included do
desc 'Get the list of all maven virtual registries' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success ::API::Entities::VirtualRegistries::Packages::Maven::Registry
failure [
{ code: 400, message: 'Bad Request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
requires :group_id, type: Integer, desc: 'The ID of the group', allow_blank: false
end
get do
group = find_group!(declared_params[:group_id])
authorize! :read_virtual_registry, ::VirtualRegistries::Packages::Policies::Group.new(group)
registries = ::VirtualRegistries::Packages::Maven::Registry.for_group(group)
present registries, with: ::API::Entities::VirtualRegistries::Packages::Maven::Registry
end
desc 'Create a new maven virtual registry' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success code: 201
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
requires :group_id, type: Integer, desc: 'The ID of the group. Must be a top-level group',
allow_blank: false
end
post do
group = find_group!(declared_params[:group_id])
authorize! :create_virtual_registry, ::VirtualRegistries::Packages::Policies::Group.new(group)
new_reg = ::VirtualRegistries::Packages::Maven::Registry.new(declared_params(include_missing: false))
render_validation_error!(new_reg) unless new_reg.save
created!
end
route_param :id, type: Integer, desc: 'The ID of the maven virtual registry' do
desc 'Get a specific maven virtual registry' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success ::API::Entities::VirtualRegistries::Packages::Maven::Registry
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
get do
authorize! :read_virtual_registry, registry
present registry, with: ::API::Entities::VirtualRegistries::Packages::Maven::Registry
end
desc 'Delete a specific maven virtual registry' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success code: 204
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' },
{ code: 412, message: 'Precondition Failed' }
]
tags %w[maven_virtual_registries]
hidden true
end
delete do
authorize! :destroy_virtual_registry, registry
destroy_conditionally!(registry)
end
end
end
end
end
end
end
end
end

View File

@ -1,172 +0,0 @@
# frozen_string_literal: true
module API
module VirtualRegistries
module Packages
class Maven < ::API::Base
include ::API::Helpers::Authentication
feature_category :virtual_registry
urgency :low
SHA1_CHECKSUM_HEADER = 'x-checksum-sha1'
MD5_CHECKSUM_HEADER = 'x-checksum-md5'
authenticate_with do |accept|
accept.token_types(:personal_access_token).sent_through(:http_private_token_header)
accept.token_types(:deploy_token).sent_through(:http_deploy_token_header)
accept.token_types(:job_token).sent_through(:http_job_token_header)
accept.token_types(
:personal_access_token_with_username,
:deploy_token_with_username,
:job_token_with_username
).sent_through(:http_basic_auth)
end
helpers do
include ::Gitlab::Utils::StrongMemoize
delegate :group, :upstream, :registry_upstream, to: :registry
def require_dependency_proxy_enabled!
not_found! unless ::Gitlab.config.dependency_proxy.enabled
end
def registry
::VirtualRegistries::Packages::Maven::Registry.find(params[:id])
end
strong_memoize_attr :registry
params :id_and_path do
requires :id,
type: Integer,
desc: 'The ID of the Maven virtual registry'
requires :path,
type: String,
file_path: true,
desc: 'Package path',
documentation: { example: 'foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar' }
end
end
after_validation do
not_found! unless Feature.enabled?(:virtual_registry_maven, current_user)
require_dependency_proxy_enabled!
authenticate!
end
namespace 'virtual_registries/packages/maven' do
namespace :registries do
include ::API::Concerns::VirtualRegistries::Packages::Maven::RegistryEndpoints
route_param :id, type: Integer, desc: 'The ID of the maven virtual registry' do
namespace :upstreams do
include ::API::Concerns::VirtualRegistries::Packages::Maven::UpstreamEndpoints
route_param :upstream_id, type: Integer, desc: 'The ID of the maven virtual registry upstream' do
namespace :cached_responses do
include ::API::Concerns::VirtualRegistries::Packages::Maven::CachedResponseEndpoints
end
end
end
end
end
namespace ':id/*path' do
include ::API::Concerns::VirtualRegistries::Packages::Endpoint
helpers do
def download_file_extra_response_headers(action_params:)
{
SHA1_CHECKSUM_HEADER => action_params[:file_sha1],
MD5_CHECKSUM_HEADER => action_params[:file_md5]
}
end
end
desc 'Download endpoint of the Maven virtual registry.' do
detail 'This feature was introduced in GitLab 17.3. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
end
get format: false do
service_response = ::VirtualRegistries::Packages::Maven::HandleFileRequestService.new(
registry: registry,
current_user: current_user,
params: { path: declared_params[:path] }
).execute
send_error_response_from!(service_response: service_response) if service_response.error?
send_successful_response_from(service_response: service_response)
end
desc 'Workhorse upload endpoint of the Maven virtual registry. Only workhorse can access it.' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
requires :file,
type: ::API::Validations::Types::WorkhorseFile,
desc: 'The file being uploaded',
documentation: { type: 'file' }
end
post 'upload' do
require_gitlab_workhorse!
authorize!(:read_virtual_registry, registry)
etag, content_type, upstream_gid = request.headers.fetch_values(
'Etag',
::Gitlab::Workhorse::SEND_DEPENDENCY_CONTENT_TYPE_HEADER,
UPSTREAM_GID_HEADER
) { nil }
# TODO: revisit this part when multiple upstreams are supported
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
# coherence check
not_found!('Upstream') unless upstream == GlobalID::Locator.locate(upstream_gid)
service_response = ::VirtualRegistries::Packages::Maven::CachedResponses::CreateOrUpdateService.new(
upstream: upstream,
current_user: current_user,
params: declared_params.merge(etag: etag, content_type: content_type)
).execute
send_error_response_from!(service_response: service_response) if service_response.error?
ok_empty_response
end
end
end
end
end
end
end

View File

@ -0,0 +1,172 @@
# frozen_string_literal: true
module API
module VirtualRegistries
module Packages
module Maven
class Endpoints < ::API::Base
include ::API::Helpers::Authentication
feature_category :virtual_registry
urgency :low
SHA1_CHECKSUM_HEADER = 'x-checksum-sha1'
MD5_CHECKSUM_HEADER = 'x-checksum-md5'
authenticate_with do |accept|
accept.token_types(:personal_access_token).sent_through(:http_private_token_header)
accept.token_types(:deploy_token).sent_through(:http_deploy_token_header)
accept.token_types(:job_token).sent_through(:http_job_token_header)
accept.token_types(
:personal_access_token_with_username,
:deploy_token_with_username,
:job_token_with_username
).sent_through(:http_basic_auth)
end
helpers do
include ::Gitlab::Utils::StrongMemoize
delegate :group, :upstream, :registry_upstream, to: :registry
def require_dependency_proxy_enabled!
not_found! unless ::Gitlab.config.dependency_proxy.enabled
end
def registry
::VirtualRegistries::Packages::Maven::Registry.find(params[:id])
end
strong_memoize_attr :registry
params :id_and_path do
requires :id,
type: Integer,
desc: 'The ID of the Maven virtual registry'
requires :path,
type: String,
file_path: true,
desc: 'Package path',
documentation: { example: 'foo/bar/mypkg/1.0-SNAPSHOT/mypkg-1.0-SNAPSHOT.jar' }
end
end
after_validation do
not_found! unless Feature.enabled?(:virtual_registry_maven, current_user)
require_dependency_proxy_enabled!
authenticate!
end
namespace 'virtual_registries/packages/maven' do
namespace :registries do
route_param :id, type: Integer, desc: 'The ID of the maven virtual registry' do
namespace :upstreams do
include ::API::Concerns::VirtualRegistries::Packages::Maven::UpstreamEndpoints
route_param :upstream_id, type: Integer, desc: 'The ID of the maven virtual registry upstream' do
namespace :cached_responses do
include ::API::Concerns::VirtualRegistries::Packages::Maven::CachedResponseEndpoints
end
end
end
end
end
namespace ':id/*path' do
include ::API::Concerns::VirtualRegistries::Packages::Endpoint
helpers do
def download_file_extra_response_headers(action_params:)
{
SHA1_CHECKSUM_HEADER => action_params[:file_sha1],
MD5_CHECKSUM_HEADER => action_params[:file_md5]
}
end
end
desc 'Download endpoint of the Maven virtual registry.' do
detail 'This feature was introduced in GitLab 17.3. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
end
get format: false do
service_response = ::VirtualRegistries::Packages::Maven::HandleFileRequestService.new(
registry: registry,
current_user: current_user,
params: { path: declared_params[:path] }
).execute
send_error_response_from!(service_response: service_response) if service_response.error?
send_successful_response_from(service_response: service_response)
end
desc 'Workhorse upload endpoint of the Maven virtual registry. Only workhorse can access it.' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in experiment state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success [
{ code: 200 }
]
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not Found' }
]
tags %w[maven_virtual_registries]
hidden true
end
params do
use :id_and_path
requires :file,
type: ::API::Validations::Types::WorkhorseFile,
desc: 'The file being uploaded',
documentation: { type: 'file' }
end
post 'upload' do
require_gitlab_workhorse!
authorize!(:read_virtual_registry, registry)
etag, content_type, upstream_gid = request.headers.fetch_values(
'Etag',
::Gitlab::Workhorse::SEND_DEPENDENCY_CONTENT_TYPE_HEADER,
UPSTREAM_GID_HEADER
) { nil }
# TODO: revisit this part when multiple upstreams are supported
# https://gitlab.com/gitlab-org/gitlab/-/issues/480461
# coherence check
not_found!('Upstream') unless upstream == GlobalID::Locator.locate(upstream_gid)
service_response = ::VirtualRegistries::Packages::Maven::CachedResponses::CreateOrUpdateService.new(
upstream: upstream,
current_user: current_user,
params: declared_params.merge(etag: etag, content_type: content_type)
).execute
send_error_response_from!(service_response: service_response) if service_response.error?
ok_empty_response
end
end
end
end
end
end
end
end

View File

@ -0,0 +1,153 @@
# frozen_string_literal: true
module API
module VirtualRegistries
module Packages
module Maven
class Registries < ::API::Base
include ::API::Helpers::Authentication
feature_category :virtual_registry
urgency :low
authenticate_with do |accept|
accept.token_types(:personal_access_token).sent_through(:http_private_token_header)
accept.token_types(:deploy_token).sent_through(:http_deploy_token_header)
accept.token_types(:job_token).sent_through(:http_job_token_header)
end
helpers do
include ::Gitlab::Utils::StrongMemoize
def group
find_group!(params[:id])
end
strong_memoize_attr :group
def registry
::VirtualRegistries::Packages::Maven::Registry.find(params[:id])
end
strong_memoize_attr :registry
def policy_subject
::VirtualRegistries::Packages::Policies::Group.new(group)
end
def require_dependency_proxy_enabled!
not_found! unless ::Gitlab.config.dependency_proxy.enabled
end
end
after_validation do
not_found! unless Feature.enabled?(:virtual_registry_maven, current_user)
require_dependency_proxy_enabled!
authenticate!
end
resource :groups, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
params do
requires :id, types: [String, Integer], desc: 'The group ID or full group path. Must be a top-level group'
end
namespace ':id/-/virtual_registries/packages/maven/registries' do
desc 'Get the list of all maven virtual registries' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success ::API::Entities::VirtualRegistries::Packages::Maven::Registry
failure [
{ code: 400, message: 'Bad Request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
get do
authorize! :read_virtual_registry, policy_subject
registries = ::VirtualRegistries::Packages::Maven::Registry.for_group(group)
present registries, with: ::API::Entities::VirtualRegistries::Packages::Maven::Registry
end
desc 'Create a new maven virtual registry' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success ::API::Entities::VirtualRegistries::Packages::Maven::Registry
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
post do
authorize! :create_virtual_registry, policy_subject
new_reg = ::VirtualRegistries::Packages::Maven::Registry.new(group:)
render_validation_error!(new_reg) unless new_reg.save
present new_reg, with: ::API::Entities::VirtualRegistries::Packages::Maven::Registry
end
end
end
namespace 'virtual_registries/packages/maven/registries' do
route_param :id, type: Integer, desc: 'The ID of the maven virtual registry' do
desc 'Get a specific maven virtual registry' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success ::API::Entities::VirtualRegistries::Packages::Maven::Registry
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' }
]
tags %w[maven_virtual_registries]
hidden true
end
get do
authorize! :read_virtual_registry, registry
present registry, with: ::API::Entities::VirtualRegistries::Packages::Maven::Registry
end
desc 'Delete a specific maven virtual registry' do
detail 'This feature was introduced in GitLab 17.4. \
This feature is currently in an experimental state. \
This feature is behind the `virtual_registry_maven` feature flag.'
success code: 204
failure [
{ code: 400, message: 'Bad request' },
{ code: 401, message: 'Unauthorized' },
{ code: 403, message: 'Forbidden' },
{ code: 404, message: 'Not found' },
{ code: 412, message: 'Precondition Failed' }
]
tags %w[maven_virtual_registries]
hidden true
end
delete do
authorize! :destroy_virtual_registry, registry
destroy_conditionally!(registry)
end
end
end
end
end
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillIssuableMetricImagesNamespaceId < BackfillDesiredShardingKeyJob
operation_name :backfill_issuable_metric_images_namespace_id
feature_category :observability
end
end
end

View File

@ -0,0 +1,10 @@
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
class BackfillResourceLinkEventsNamespaceId < BackfillDesiredShardingKeyJob
operation_name :backfill_resource_link_events_namespace_id
feature_category :team_planning
end
end
end

View File

@ -5,6 +5,7 @@ module Gitlab
module Importers
class PullRequestImporter
include Loggable
include ::Import::PlaceholderReferences::Pusher
def initialize(project, hash)
@project = project
@ -15,6 +16,8 @@ module Gitlab
# Object should behave as a object so we can remove object.is_a?(Hash) check
# This will be fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/412328
@object = hash.with_indifferent_access
@reviewer_references = {}
end
def execute
@ -32,7 +35,7 @@ module Gitlab
target_branch: Gitlab::Git.ref_name(object[:target_branch_name]),
target_branch_sha: object[:target_branch_sha],
state_id: MergeRequest.available_states[object[:state]],
author_id: user_finder.author_id(object),
author_id: author_id(object),
created_at: object[:created_at],
updated_at: object[:updated_at],
imported_from: ::Import::HasImportSource::IMPORT_SOURCES[:bitbucket_server]
@ -41,6 +44,8 @@ module Gitlab
creator = Gitlab::Import::MergeRequestCreator.new(project)
merge_request = creator.execute(attributes)
push_reference(project, merge_request, :author_id, object[:author_username])
push_reviewer_references(merge_request)
# Create refs/merge-requests/iid/head reference for the merge request
merge_request.fetch_ref!
@ -57,27 +62,50 @@ module Gitlab
description += author_line
description += object[:description] if object[:description]
if Feature.enabled?(:bitbucket_server_convert_mentions_to_users, project.creator)
description = mentions_converter.convert(description)
end
description = mentions_converter.convert(description) if convert_mentions?
description
end
def convert_mentions?
Feature.enabled?(:bitbucket_server_convert_mentions_to_users, project.creator) &&
!user_mapping_enabled?(project)
end
def author_line
return '' if user_finder.uid(object)
return '' if user_mapping_enabled?(project) || user_finder.uid(object)
formatter.author_line(object[:author])
end
def author_id(pull_request_data)
if user_mapping_enabled?(project)
user_finder.author_id(
username: pull_request_data['author_username'],
display_name: pull_request_data['author']
)
else
user_finder.author_id(pull_request_data)
end
end
def reviewers
return [] unless object[:reviewers].present?
object[:reviewers].filter_map do |reviewer|
if Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_finder.find_user_id(by: :username, value: reviewer.dig('user', 'slug'))
object[:reviewers].filter_map do |reviewer_data|
if user_mapping_enabled?(project)
uid = user_finder.uid(
username: reviewer_data.dig('user', 'slug'),
display_name: reviewer_data.dig('user', 'displayName')
)
@reviewer_references[uid] = reviewer_data.dig('user', 'slug')
uid
elsif Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_finder.find_user_id(by: :username, value: reviewer_data.dig('user', 'slug'))
else
user_finder.find_user_id(by: :email, value: reviewer.dig('user', 'emailAddress'))
user_finder.find_user_id(by: :email, value: reviewer_data.dig('user', 'emailAddress'))
end
end
end
@ -89,6 +117,13 @@ module Gitlab
project.repository.find_commits_by_message(object[:source_branch_sha])&.first&.sha
end
def push_reviewer_references(merge_request)
mr_reviewers = merge_request.merge_request_reviewers
mr_reviewers.each do |mr_reviewer|
push_reference(project, mr_reviewer, :user_id, @reviewer_references[mr_reviewer.user_id])
end
end
end
end
end

View File

@ -15,7 +15,12 @@ module Gitlab
event_id: approved_event[:id]
)
user_id = if Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_id = if user_mapping_enabled?(project)
user_finder.uid(
username: approved_event[:approver_username],
display_name: approved_event[:approver_name]
)
elsif Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_finder.find_user_id(by: :username, value: approved_event[:approver_username])
else
user_finder.find_user_id(by: :email, value: approved_event[:approver_email])
@ -34,8 +39,12 @@ module Gitlab
submitted_at = approved_event[:created_at] || merge_request[:updated_at]
create_approval!(project.id, merge_request.id, user_id, submitted_at)
create_reviewer!(merge_request.id, user_id, submitted_at)
approval, approval_note = create_approval!(project.id, merge_request.id, user_id, submitted_at)
push_reference(project, approval, :user_id, approved_event[:approver_username])
push_reference(project, approval_note, :author_id, approved_event[:approver_username])
reviewer = create_reviewer!(merge_request.id, user_id, submitted_at)
push_reference(project, reviewer, :user_id, approved_event[:approver_username]) if reviewer
log_info(
import_stage: 'import_approved_event',

View File

@ -7,6 +7,7 @@ module Gitlab
# Base class for importing pull request notes during project import from Bitbucket Server
class BaseImporter
include Loggable
include ::Import::PlaceholderReferences::Pusher
# @param project [Project]
# @param merge_request [MergeRequest]

View File

@ -28,6 +28,8 @@ module Gitlab
if note.valid?
note.save
push_reference(project, note, :author_id, comment[:author_username])
return note
end
@ -52,7 +54,7 @@ module Gitlab
end
def pull_request_comment_attributes(comment)
author = user_finder.uid(comment)
author = author(comment)
note = ''
unless author
@ -79,16 +81,30 @@ module Gitlab
}
end
def author(comment)
if user_mapping_enabled?(project)
user_finder.uid(
username: comment[:author_username],
display_name: comment[:author_name]
)
else
user_finder.uid(comment)
end
end
def create_basic_fallback_note(merge_request, comment, position)
attributes = pull_request_comment_attributes(comment)
note = "*Comment on"
note_text = "*Comment on"
note += " #{position.old_path}:#{position.old_line} -->" if position.old_line
note += " #{position.new_path}:#{position.new_line}" if position.new_line
note += "*\n\n#{comment[:note]}"
note_text += " #{position.old_path}:#{position.old_line} -->" if position.old_line
note_text += " #{position.new_path}:#{position.new_line}" if position.new_line
note_text += "*\n\n#{comment[:note]}"
attributes[:note] = note
merge_request.notes.create!(attributes)
attributes[:note] = note_text
note = merge_request.notes.create!(attributes)
push_reference(project, note, :author_id, comment[:author_username])
note
end
end
end

View File

@ -15,7 +15,12 @@ module Gitlab
event_id: declined_event[:id]
)
user_id = if Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_id = if user_mapping_enabled?(project)
user_finder.uid(
username: declined_event[:decliner_username],
display_name: declined_event[:decliner_name]
)
elsif Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_finder.find_user_id(by: :username, value: declined_event[:decliner_username])
else
user_finder.find_user_id(by: :email, value: declined_event[:decliner_email])
@ -37,8 +42,15 @@ module Gitlab
user = User.new(id: user_id)
SystemNoteService.change_status(merge_request, merge_request.target_project, user, 'closed', nil)
EventCreateService.new.close_mr(merge_request, user)
create_merge_request_metrics(latest_closed_by_id: user_id, latest_closed_at: declined_event[:created_at])
event = record_event(user_id)
push_reference(project, event, :author_id, declined_event[:decliner_username])
metric = create_merge_request_metrics(
latest_closed_by_id: user_id,
latest_closed_at: declined_event[:created_at]
)
push_reference(project, metric, :latest_closed_by_id, declined_event[:decliner_username])
log_info(
import_stage: 'import_declined_event',
@ -47,6 +59,19 @@ module Gitlab
event_id: declined_event[:id]
)
end
private
def record_event(user_id)
Event.create!(
project_id: project.id,
author_id: user_id,
action: 'closed',
target_type: 'MergeRequest',
target_id: merge_request.id,
imported_from: ::Import::HasImportSource::IMPORT_SOURCES[:bitbucket_server]
)
end
end
end
end

View File

@ -15,12 +15,21 @@ module Gitlab
event_id: merge_event[:id]
)
committer = merge_event[:committer_email]
user_id = if user_mapping_enabled?(project)
user_finder.uid(
username: merge_event[:committer_username],
display_name: merge_event[:committer_user]
)
else
user_finder.find_user_id(by: :email, value: merge_event[:committer_email])
end
user_id ||= project.creator_id
user_id = user_finder.find_user_id(by: :email, value: committer) || project.creator_id
timestamp = merge_event[:merge_timestamp]
merge_request.update({ merge_commit_sha: merge_event[:merge_commit] })
create_merge_request_metrics(merged_by_id: user_id, merged_at: timestamp)
metric = create_merge_request_metrics(merged_by_id: user_id, merged_at: timestamp)
push_reference(project, metric, :merged_by_id, merge_event[:committer_username])
log_info(
import_stage: 'import_merge_event',

View File

@ -13,10 +13,12 @@ module Gitlab
comment_id: comment[:id]
)
merge_request.notes.create!(pull_request_comment_attributes(comment))
note = merge_request.notes.create!(pull_request_comment_attributes(comment))
push_reference(note.project, note, :author_id, comment[:author_username])
comment[:comments].each do |reply|
merge_request.notes.create!(pull_request_comment_attributes(reply))
note = merge_request.notes.create!(pull_request_comment_attributes(reply))
push_reference(note.project, note, :author_id, reply[:author_username])
end
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(

View File

@ -6,6 +6,7 @@ module Gitlab
class PullRequestNotesImporter
include ::Gitlab::Import::MergeRequestHelpers
include Loggable
include ::Import::PlaceholderReferences::Pusher
def initialize(project, hash)
@project = project
@ -56,13 +57,23 @@ module Gitlab
def import_merge_event(merge_request, merge_event)
log_info(import_stage: 'import_merge_event', message: 'starting', iid: merge_request.iid)
committer = merge_event.committer_email
user_id = if user_mapping_enabled?(project)
user_finder.uid(
username: merge_event.committer_username,
display_name: merge_event.committer_name
)
else
user_finder.find_user_id(by: :email, value: merge_event.committer_email)
end
user_id ||= project.creator_id
user_id = user_finder.find_user_id(by: :email, value: committer) || project.creator_id
timestamp = merge_event.merge_timestamp
merge_request.update({ merge_commit_sha: merge_event.merge_commit })
metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request)
metric.update(merged_by_id: user_id, merged_at: timestamp)
push_reference(project, metric, :merged_by_id, merge_event.committer_username)
log_info(import_stage: 'import_merge_event', message: 'finished', iid: merge_request.iid)
end
@ -76,7 +87,12 @@ module Gitlab
event_id: approved_event.id
)
user_id = if Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_id = if user_mapping_enabled?(project)
user_finder.uid(
username: approved_event.approver_username,
display_name: approved_event.approver_name
)
elsif Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
user_finder.find_user_id(by: :username, value: approved_event.approver_username)
else
user_finder.find_user_id(by: :email, value: approved_event.approver_email)
@ -86,8 +102,12 @@ module Gitlab
submitted_at = approved_event.created_at || merge_request.updated_at
create_approval!(project.id, merge_request.id, user_id, submitted_at)
create_reviewer!(merge_request.id, user_id, submitted_at)
approval, approval_note = create_approval!(project.id, merge_request.id, user_id, submitted_at)
push_reference(project, approval, :user_id, approved_event.approver_username)
push_reference(project, approval_note, :author_id, approved_event.approver_username)
reviewer = create_reviewer!(merge_request.id, user_id, submitted_at)
push_reference(project, reviewer, :user_id, approved_event.approver_username) if reviewer
log_info(
import_stage: 'import_approved_event',
@ -125,6 +145,7 @@ module Gitlab
if note.valid?
note.save
push_reference(project, note, :author_id, comment.author_username)
return note
end
@ -152,7 +173,9 @@ module Gitlab
note += "*\n\n#{comment.note}"
attributes[:note] = note
merge_request.notes.create!(attributes)
note = merge_request.notes.create!(attributes)
push_reference(project, note, :author_id, comment.author_username)
note
end
def build_position(merge_request, pr_comment)
@ -171,10 +194,12 @@ module Gitlab
log_info(import_stage: 'import_standalone_pr_comments', message: 'starting', iid: merge_request.iid)
pr_comments.each do |comment|
merge_request.notes.create!(pull_request_comment_attributes(comment))
note = merge_request.notes.create!(pull_request_comment_attributes(comment))
push_reference(project, note, :author_id, comment.author_username)
comment.comments.each do |replies|
merge_request.notes.create!(pull_request_comment_attributes(replies))
note = merge_request.notes.create!(pull_request_comment_attributes(replies))
push_reference(project, note, :author_id, comment.author_username)
end
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(
@ -190,7 +215,7 @@ module Gitlab
end
def pull_request_comment_attributes(comment)
author = user_finder.uid(comment)
author = author(comment)
note = ''
unless author
@ -198,7 +223,7 @@ module Gitlab
note = "*By #{comment.author_username} (#{comment.author_email})*\n\n"
end
comment_note = if Feature.enabled?(:bitbucket_server_convert_mentions_to_users, project.creator)
comment_note = if convert_mentions?
mentions_converter.convert(comment.note)
else
comment.note
@ -222,6 +247,22 @@ module Gitlab
}
end
def convert_mentions?
Feature.enabled?(:bitbucket_server_convert_mentions_to_users, project.creator) &&
!user_mapping_enabled?(project)
end
def author(comment)
if user_mapping_enabled?(project)
user_finder.uid(
username: comment.author_username,
display_name: comment.author_name
)
else
user_finder.uid(comment)
end
end
def client
BitbucketServer::Client.new(project.import_data.credentials)
end

View File

@ -20,6 +20,10 @@ module Gitlab
bitbucket_server_notes_separate_worker_enabled =
Feature.enabled?(:bitbucket_server_notes_separate_worker, current_user)
user_contribution_mapping_enabled =
Feature.enabled?(:importer_user_mapping, current_user) &&
Feature.enabled?(:bitbucket_server_user_mapping, current_user)
::Projects::CreateService.new(
current_user,
name: name,
@ -37,7 +41,8 @@ module Gitlab
project_key: project_key,
repo_slug: repo_slug,
timeout_strategy: timeout_strategy,
bitbucket_server_notes_separate_worker: bitbucket_server_notes_separate_worker_enabled
bitbucket_server_notes_separate_worker: bitbucket_server_notes_separate_worker_enabled,
user_contribution_mapping_enabled: user_contribution_mapping_enabled
}
},
skip_wiki: true

View File

@ -22,9 +22,11 @@ module Gitlab
# Object should behave as a object so we can remove object.is_a?(Hash) check
# This will be fixed in https://gitlab.com/gitlab-org/gitlab/-/issues/412328
def uid(object)
# We want this to only match either username or email depending on the flag state.
# There should be no fall-through.
if Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
# We want this to only match either placeholder, username, or email
# depending on the flag state. There should be no fall-through.
if user_mapping_enabled?(project)
source_user_for_author(object).mapped_user_id
elsif Feature.enabled?(:bitbucket_server_user_mapping_by_username, project, type: :ops)
find_user_id(by: :username, value: object.is_a?(Hash) ? object[:author_username] : object.author_username)
else
find_user_id(by: :email, value: object.is_a?(Hash) ? object[:author_email] : object.author_email)
@ -60,6 +62,26 @@ module Gitlab
def build_cache_key(by, value)
format(CACHE_KEY, project_id: project.id, by: by, value: value)
end
def user_mapping_enabled?(project)
!!project.import_data.user_mapping_enabled?
end
def source_user_for_author(user_data)
source_user_mapper.find_or_create_source_user(
source_user_identifier: user_data[:username],
source_name: user_data[:display_name],
source_username: user_data[:username]
)
end
def source_user_mapper
@source_user_mapper ||= Gitlab::Import::SourceUserMapper.new(
namespace: project.root_ancestor,
import_type: ::Import::SOURCE_BITBUCKET_SERVER,
source_hostname: project.import_url
)
end
end
end
end

View File

@ -6,9 +6,11 @@ module Gitlab
include DatabaseHelpers
# @param attributes [Hash]
# @return MergeRequest::Metrics
def create_merge_request_metrics(attributes)
metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request) # rubocop: disable CodeReuse/ActiveRecord -- no need to move this to ActiveRecord model
metric.update(attributes)
metric
end
# rubocop: disable CodeReuse/ActiveRecord

View File

@ -0,0 +1,42 @@
# frozen_string_literal: true
module Import
module PlaceholderReferences
module Pusher
def push_reference(project, record, attribute, source_user_identifier)
return unless user_mapping_enabled?(project)
source_user = source_user_mapper(project).find_source_user(source_user_identifier)
# Do not create a reference if the object is already associated
# with a real user.
return if source_user_mapped_to_human?(record, attribute, source_user)
::Import::PlaceholderReferences::PushService.from_record(
import_source: ::Import::SOURCE_BITBUCKET_SERVER,
import_uid: project.import_state.id,
record: record,
source_user: source_user,
user_reference_column: attribute
).execute
end
def source_user_mapped_to_human?(record, attribute, source_user)
source_user.nil? ||
(source_user.accepted_status? && record[attribute] == source_user.reassign_to_user_id)
end
def source_user_mapper(project)
@user_mapper ||= ::Gitlab::Import::SourceUserMapper.new(
namespace: project.root_ancestor,
source_hostname: project.import_url,
import_type: ::Import::SOURCE_BITBUCKET_SERVER
)
end
def user_mapping_enabled?(project)
!!project.import_data.user_mapping_enabled?
end
end
end
end

View File

@ -32,27 +32,9 @@ module Sidebars
true
end
override :pill_count
def pill_count
return if Feature.enabled?(:async_sidebar_counts, context.group.root_ancestor)
strong_memoize(:pill_count) do
count_service = ::Groups::OpenIssuesCountService
format_cached_count(
count_service::CACHED_COUNT_THRESHOLD,
count_service.new(context.group, context.current_user, fast_timeout: true).count
)
end
rescue ActiveRecord::QueryCanceled => e # rubocop:disable Database/RescueQueryCanceled -- used with fast_read_statement_timeout to prevent counts from slowing down the request
Gitlab::ErrorTracking.log_exception(e, group_id: context.group.id, query: 'group_sidebar_issues_count')
nil
end
override :pill_count_field
def pill_count_field
'openIssuesCount' if Feature.enabled?(:async_sidebar_counts, context.group.root_ancestor)
'openIssuesCount'
end
override :pill_html_options

View File

@ -31,21 +31,9 @@ module Sidebars
true
end
override :pill_count
def pill_count
return if Feature.enabled?(:async_sidebar_counts, context.group.root_ancestor)
strong_memoize(:pill_count) do
count_service = ::Groups::MergeRequestsCountService
count = count_service.new(context.group, context.current_user).count
format_cached_count(count_service::CACHED_COUNT_THRESHOLD, count)
end
end
override :pill_count_field
def pill_count_field
'openMergeRequestsCount' if Feature.enabled?(:async_sidebar_counts, context.group.root_ancestor)
'openMergeRequestsCount'
end
override :pill_html_options

View File

@ -47,19 +47,9 @@ module Sidebars
end
end
override :pill_count
def pill_count
return if Feature.enabled?(:async_sidebar_counts, context.project.root_ancestor)
strong_memoize(:pill_count) do
count = context.project.open_issues_count(context.current_user)
format_cached_count(1000, count)
end
end
override :pill_count_field
def pill_count_field
'openIssuesCount' if Feature.enabled?(:async_sidebar_counts, context.project.root_ancestor)
'openIssuesCount'
end
override :pill_html_options

View File

@ -37,17 +37,9 @@ module Sidebars
true
end
override :pill_count
def pill_count
return if Feature.enabled?(:async_sidebar_counts, context.project.root_ancestor)
count = @pill_count ||= context.project.open_merge_requests_count
format_cached_count(1000, count)
end
override :pill_count_field
def pill_count_field
'openMergeRequestsCount' if Feature.enabled?(:async_sidebar_counts, context.project.root_ancestor)
'openMergeRequestsCount'
end
override :pill_html_options

View File

@ -24,8 +24,8 @@
"jest:ci:vue3-mr:with-fixtures": "JEST_FIXTURE_JOBS_ONLY=1 jest --config jest.config.js --ci --testSequencer ./scripts/frontend/skip_specs_broken_in_vue_compat_fixture_ci_sequencer.js --shard \"${CI_NODE_INDEX:-1}/${CI_NODE_TOTAL:-1}\" --logHeapUsage",
"jest:ci:vue3-mr:predictive-without-fixtures": "jest --config jest.config.js --ci --findRelatedTests $(cat $RSPEC_CHANGED_FILES_PATH) $(cat $RSPEC_MATCHING_JS_FILES_PATH) --passWithNoTests --testSequencer ./scripts/frontend/skip_specs_broken_in_vue_compat_fixture_ci_sequencer.js --shard \"${CI_NODE_INDEX:-1}/${CI_NODE_TOTAL:-1}\" --logHeapUsage",
"jest:ci:vue3-mr:predictive-with-fixtures": "JEST_FIXTURE_JOBS_ONLY=1 jest --config jest.config.js --ci --findRelatedTests $(cat $RSPEC_CHANGED_FILES_PATH) $(cat $RSPEC_MATCHING_JS_FILES_PATH) --passWithNoTests --testSequencer ./scripts/frontend/skip_specs_broken_in_vue_compat_fixture_ci_sequencer.js --shard \"${CI_NODE_INDEX:-1}/${CI_NODE_TOTAL:-1}\" --logHeapUsage",
"jest:ci:vue3-mr:check-quarantined-without-fixtures": "node ./scripts/frontend/check_jest_vue3_quarantine.js",
"jest:ci:vue3-mr:check-quarantined-with-fixtures": "JEST_FIXTURE_JOBS_ONLY=1 node ./scripts/frontend/check_jest_vue3_quarantine.js",
"jest:ci:vue3-mr:check-quarantined-without-fixtures": "./scripts/frontend/check_jest_vue3_quarantine.js",
"jest:ci:vue3-mr:check-quarantined-with-fixtures": "JEST_FIXTURE_JOBS_ONLY=1 ./scripts/frontend/check_jest_vue3_quarantine.js",
"jest:contract": "PACT_DO_NOT_TRACK=true jest --config jest.config.contract.js --runInBand",
"jest:integration": "jest --config jest.config.integration.js",
"jest:scripts": "jest --config jest.config.scripts.js",
@ -75,7 +75,7 @@
"@gitlab/fonts": "^1.3.0",
"@gitlab/query-language-rust": "0.1.2",
"@gitlab/svgs": "3.121.0",
"@gitlab/ui": "104.0.0",
"@gitlab/ui": "104.1.0",
"@gitlab/vue-router-vue3": "npm:vue-router@4.1.6",
"@gitlab/vuex-vue3": "npm:vuex@4.0.0",
"@gitlab/web-ide": "^0.0.1-dev-20241112063543",
@ -291,7 +291,6 @@
"eslint-plugin-no-jquery": "3.1.0",
"eslint-plugin-no-unsanitized": "^4.1.2",
"fake-indexeddb": "^4.0.1",
"fast-xml-parser": "^3.21.1",
"gettext-extractor": "^3.7.0",
"gettext-extractor-vue": "^5.1.0",
"glob": "^7.1.6",

222
scripts/frontend/check_jest_vue3_quarantine.js Normal file → Executable file
View File

@ -1,25 +1,98 @@
#!/usr/bin/env node
const { spawnSync } = require('node:child_process');
const { readFile, open, stat } = require('node:fs/promises');
const parser = require('fast-xml-parser');
const { readFile, open, stat, mkdir } = require('node:fs/promises');
const { join, relative, dirname } = require('node:path');
const defaultChalk = require('chalk');
const program = require('commander');
const { getLocalQuarantinedFiles } = require('./jest_vue3_quarantine_utils');
// Always use basic color output
const chalk = new defaultChalk.constructor({ level: 1 });
const ROOT = join(__dirname, '..', '..');
const IS_CI = Boolean(process.env.CI);
const FIXTURES_HELP_URL =
// eslint-disable-next-line no-restricted-syntax
'https://docs.gitlab.com/ee/development/testing_guide/frontend_testing.html#download-fixtures';
const DIR = join(ROOT, 'tmp/tests/frontend');
const JEST_JSON_OUTPUT = join(DIR, 'jest_results.json');
const JEST_STDOUT = join(DIR, 'jest_stdout');
const JEST_STDERR = join(DIR, 'jest_stderr');
// Force basic color output in CI
const chalk = new defaultChalk.constructor({ level: IS_CI ? 1 : undefined });
let quarantinedFiles;
let filesThatChanged;
async function parseJUnitReport() {
let junit;
function parseArguments() {
program
.usage('[options] <SPEC ...>')
.description(
`
Checks whether Jest specs quarantined under Vue 3 should be unquarantined.
Usage examples
--------------
In CI:
# Check quarantined files which were affected by changes in the merge request.
$ scripts/frontend/check_jest_vue3_quarantine.js
# Check all quarantined files, still subject to sharding/fixture separation.
# Useful for tier 3 pipelines, or when dependencies change.
$ scripts/frontend/check_jest_vue3_quarantine.js --all
Locally:
# Run all quarantined files, including those which need fixtures.
# See ${FIXTURES_HELP_URL}
$ scripts/frontend/check_jest_vue3_quarantine.js --all
# Run a particular spec
$ scripts/frontend/check_jest_vue3_quarantine.js spec/frontend/foo_spec.js
# Run specs in this branch that were modified since master
$ scripts/frontend/check_jest_vue3_quarantine.js $(git diff master... --name-only)
# Write to stdio normally instead of to temporary files
$ scripts/frontend/check_jest_vue3_quarantine.js --stdio spec/frontend/foo_spec.js
`.trim(),
)
.option(
'--all',
'Run all quarantined specs. Good for local testing, or in CI when configuration files have changed.',
)
.option(
'--stdio',
`Let Jest write to stdout/stderr as normal. By default, it writes to ${JEST_STDOUT} and ${JEST_STDERR}. Should not be used in CI, as it can exceed maximum job log size.`,
)
.parse(process.argv);
let invalidArgumentsMessage;
if (!IS_CI) {
if (!program.all && program.args.length === 0) {
invalidArgumentsMessage =
'No spec files to check!\n\nWhen run locally, either add the --all option, or a list of spec files to check.';
}
if (program.all && program.args.length > 0) {
invalidArgumentsMessage = `Do not pass arguments in addition to the --all option.`;
}
}
if (invalidArgumentsMessage) {
console.warn(`${chalk.red(invalidArgumentsMessage)}\n`);
program.help();
}
}
async function parseResults() {
let results;
try {
const xml = await readFile('./junit_jest.xml', 'UTF-8');
junit = parser.parse(xml, {
arrayMode: true,
attributeNamePrefix: '',
parseNodeValue: false,
ignoreAttributes: false,
});
results = JSON.parse(await readFile(JEST_JSON_OUTPUT, 'UTF-8'));
} catch (e) {
console.warn(e);
// No JUnit report exists, or there was a parsing error. Either way, we
@ -27,29 +100,13 @@ async function parseJUnitReport() {
return [];
}
const failuresByFile = new Map();
for (const testsuites of junit.testsuites) {
for (const testsuite of testsuites.testsuite || []) {
for (const testcase of testsuite.testcase) {
const { file } = testcase;
if (!failuresByFile.has(file)) {
failuresByFile.set(file, 0);
}
const failuresSoFar = failuresByFile.get(file);
const testcaseFailed = testcase.failure ? 1 : 0;
failuresByFile.set(file, failuresSoFar + testcaseFailed);
}
return results.testResults.reduce((acc, { name, status }) => {
if (status === 'passed') {
acc.push(relative(ROOT, name));
}
}
const passed = [];
for (const [file, failures] of failuresByFile.entries()) {
if (failures === 0 && quarantinedFiles.has(file)) passed.push(file);
}
return passed;
return acc;
}, []);
}
function reportSpecsShouldBeUnquarantined(files) {
@ -72,6 +129,11 @@ function reportSpecsShouldBeUnquarantined(files) {
}
async function changedFiles() {
if (!IS_CI) {
// We're not in CI, so `detect-tests` artifacts won't be available.
return [];
}
const { RSPEC_CHANGED_FILES_PATH, RSPEC_MATCHING_JS_FILES_PATH } = process.env;
const files = await Promise.all(
@ -96,7 +158,13 @@ function intersection(a, b) {
async function getRemovedQuarantinedSpecs() {
const removedQuarantinedSpecs = [];
for (const file of intersection(filesThatChanged, quarantinedFiles)) {
const filesToCheckIfTheyExist = IS_CI
? // In CI, only check quarantined files the author has touched
intersection(filesThatChanged, quarantinedFiles)
: // Locally, check all quarantined files
quarantinedFiles;
for (const file of filesToCheckIfTheyExist) {
try {
// eslint-disable-next-line no-await-in-loop
await stat(file);
@ -108,13 +176,74 @@ async function getRemovedQuarantinedSpecs() {
return removedQuarantinedSpecs;
}
function getTestArguments() {
if (IS_CI) {
const ciArguments = (touchedFiles) => [
'--findRelatedTests',
...touchedFiles,
'--passWithNoTests',
// Explicitly have one shard, so that the `shard` method of the sequencer is called.
'--shard=1/1',
'--testSequencer',
'./scripts/frontend/check_jest_vue3_quarantine_sequencer.js',
];
if (program.all) {
console.warn(
'Running in CI with --all. Checking all quarantined specs, subject to FixtureCISequencer sharding behavior.',
);
return ciArguments(quarantinedFiles);
}
console.warn(
'Running in CI. Only specs affected by changes in the merge request will be checked.',
);
return ciArguments(filesThatChanged);
}
if (program.all) {
console.warn('Running locally with --all. Checking all quarantined specs.');
return ['--runTestsByPath', ...quarantinedFiles];
}
if (program.args.length > 0) {
const specs = program.args.filter((spec) => {
const isQuarantined = quarantinedFiles.has(relative(ROOT, spec));
if (!isQuarantined) console.warn(`Omitting file as it is not in quarantine list: ${spec}`);
return isQuarantined;
});
if (specs.length === 0) {
console.warn(`No quarantined specs to run!`);
process.exit(1);
}
console.warn('Running locally. Checking given specs.');
return ['--runTestsByPath', ...specs];
}
// ESLint's consistent-return rule requires something like this.
return ['--this-should-never-happen-and-jest-should-fail'];
}
async function getStdio() {
if (program.stdio) {
return 'inherit';
}
await mkdir(dirname(JEST_STDOUT), { recursive: true });
const jestStdout = (await open(JEST_STDOUT, 'w')).createWriteStream();
const jestStderr = (await open(JEST_STDERR, 'w')).createWriteStream();
return ['inherit', jestStdout, jestStderr];
}
async function main() {
parseArguments();
filesThatChanged = await changedFiles();
quarantinedFiles = new Set(await getLocalQuarantinedFiles());
const jestStdout = (await open('jest_stdout', 'w')).createWriteStream();
const jestStderr = (await open('jest_stderr', 'w')).createWriteStream();
console.log('Running quarantined specs...');
// Note: we don't care what Jest's exit code is.
//
@ -133,17 +262,13 @@ async function main() {
'--config',
'jest.config.js',
'--ci',
'--findRelatedTests',
...filesThatChanged,
'--passWithNoTests',
// Explicitly have one shard, so that the `shard` method of the sequencer is called.
'--shard=1/1',
'--testSequencer',
'./scripts/frontend/check_jest_vue3_quarantine_sequencer.js',
'--logHeapUsage',
'--json',
`--outputFile=${JEST_JSON_OUTPUT}`,
...getTestArguments(),
],
{
stdio: ['inherit', jestStdout, jestStderr],
stdio: await getStdio(),
env: {
...process.env,
VUE_VERSION: '3',
@ -151,13 +276,14 @@ async function main() {
},
);
const passed = await parseJUnitReport();
const passed = await parseResults();
const removedQuarantinedSpecs = await getRemovedQuarantinedSpecs();
const filesToReport = [...passed, ...removedQuarantinedSpecs];
if (filesToReport.length === 0) {
// No tests ran, or there was some unexpected error. Either way, exit
// successfully.
console.warn('No spec files need to be removed from quarantine.');
return;
}

View File

@ -208,6 +208,12 @@ FactoryBot.define do
import_status { :canceled }
end
trait :bitbucket_server_import do
import_started
import_url { 'https://bitbucket.example.com' }
import_type { :bitbucket_server }
end
trait :jira_dvcs_server do
before(:create) do |project|
create(:project_feature_usage, :dvcs_server, project: project)

View File

@ -62,9 +62,9 @@ RSpec.describe 'Virtual Registries Packages Maven', :api, :js, feature_category:
it 'returns the file contents from the cache' do
expect(::Gitlab::HTTP).not_to receive(:head)
expect { request }.not_to change { upstream.cached_responses.count }
expect(request.headers[::API::VirtualRegistries::Packages::Maven::SHA1_CHECKSUM_HEADER])
expect(request.headers[::API::VirtualRegistries::Packages::Maven::Endpoints::SHA1_CHECKSUM_HEADER])
.to be_an_instance_of(String)
expect(request.headers[::API::VirtualRegistries::Packages::Maven::MD5_CHECKSUM_HEADER])
expect(request.headers[::API::VirtualRegistries::Packages::Maven::Endpoints::MD5_CHECKSUM_HEADER])
.to be_an_instance_of(String)
end

View File

@ -17,8 +17,6 @@ RSpec.describe 'Group merge requests page', feature_category: :code_review_workf
let(:issuable_archived) { create(:merge_request, source_project: project_archived, target_project: project_archived, title: 'issuable of an archived project') }
before do
stub_feature_flags(async_sidebar_counts: false)
issuable_archived
visit path
end

View File

@ -18,74 +18,15 @@ exports[`MemberActivity with a member that does not have all of the fields rende
href="file-mock#check"
/>
</svg>
<span>
<span
data-testid="access-granted-date"
>
Aug 06, 2020
</span>
</div>
</div>
`;
exports[`MemberActivity with a member that has all fields and "requestAcceptedAt" field in the member entity is not null should use this field to display an access granted date 1`] = `
<div
class="gl-flex gl-flex-col gl-gap-2"
>
<div
class="gl-flex gl-gap-3"
>
<svg
aria-hidden="true"
class="-gl-mr-2 gl-fill-current gl-icon gl-ml-2 gl-text-subtle s16"
data-testid="assignee-icon"
role="img"
title="User created"
>
<use
href="file-mock#assignee"
/>
</svg>
<span>
Mar 10, 2022
</span>
</div>
<div
class="gl-flex gl-gap-3"
>
<svg
aria-hidden="true"
class="gl-fill-current gl-icon gl-text-subtle s16"
data-testid="check-icon"
role="img"
title="Access granted"
>
<use
href="file-mock#check"
/>
</svg>
<span>
Jul 27, 2020
</span>
</div>
<div
class="gl-flex gl-gap-3"
>
<svg
aria-hidden="true"
class="gl-fill-current gl-icon gl-text-subtle s16"
data-testid="hourglass-icon"
role="img"
title="Last activity"
>
<use
href="file-mock#hourglass"
/>
</svg>
<span>
Mar 15, 2022
</span>
</div>
</div>
`;
exports[`MemberActivity with a member that has all fields renders \`User created\`, \`Access granted\`, and \`Last activity\` fields 1`] = `
<div
class="gl-flex gl-flex-col gl-gap-2"
@ -122,8 +63,10 @@ exports[`MemberActivity with a member that has all fields renders \`User created
href="file-mock#check"
/>
</svg>
<span>
Jul 17, 2020
<span
data-testid="access-granted-date"
>
Jul 27, 2020
</span>
</div>
<div

View File

@ -18,6 +18,8 @@ describe('MemberActivity', () => {
});
};
const findAccessGrantedDate = () => wrapper.findByTestId('access-granted-date');
describe('with a member that has all fields', () => {
beforeEach(() => {
createComponent();
@ -27,15 +29,33 @@ describe('MemberActivity', () => {
expect(wrapper.element).toMatchSnapshot();
});
describe('and "requestAcceptedAt" field in the member entity is not null', () => {
describe('when "inviteAcceptedAt" field is null and "requestAcceptedAt" field is not null', () => {
beforeEach(() => {
createComponent();
});
it('uses the "requestAcceptedAt" field to display an access granted date', () => {
const element = findAccessGrantedDate();
expect(element.exists()).toBe(true);
expect(element.text()).toBe('Jul 27, 2020');
});
});
describe('when "inviteAcceptedAt" field is not null', () => {
beforeEach(() => {
createComponent({
propsData: { member: { ...memberMock, requestAcceptedAt: '2020-07-27T16:22:46.923Z' } },
propsData: {
member: { ...memberMock, inviteAcceptedAt: '2021-08-01T16:22:46.923Z' },
},
});
});
it('should use this field to display an access granted date', () => {
expect(wrapper.element).toMatchSnapshot();
it('uses the "inviteAcceptedAt" field to display an access granted date', () => {
const element = findAccessGrantedDate();
expect(element.exists()).toBe(true);
expect(element.text()).toBe('Aug 01, 2021');
});
});
});

View File

@ -45,6 +45,8 @@ export const member = {
},
id: 238,
createdAt: '2020-07-17T16:22:46.923Z',
requestAcceptedAt: '2020-07-27T16:22:46.923Z',
inviteAcceptedAt: null,
expiresAt: null,
usingLicense: false,
groupSso: false,

View File

@ -25,12 +25,7 @@ describe('Sidebar Menu', () => {
let wrapper;
let handler;
const createWrapper = ({
queryHandler = handler,
asyncSidebarCountsFlagEnabled = false,
provide = {},
...extraProps
}) => {
const createWrapper = ({ queryHandler = handler, provide = {}, ...extraProps }) => {
wrapper = shallowMountExtended(SidebarMenu, {
apolloProvider: createMockApollo([[superSidebarDataQuery, queryHandler]]),
propsData: {
@ -42,9 +37,6 @@ describe('Sidebar Menu', () => {
...extraProps,
},
provide: {
glFeatures: {
asyncSidebarCounts: asyncSidebarCountsFlagEnabled,
},
currentPath: 'group',
...provide,
},
@ -223,173 +215,120 @@ describe('Sidebar Menu', () => {
});
describe('Fetching async nav item pill count', () => {
describe('when flag `asyncSidebarCounts` is disabled', () => {
handler = jest.fn().mockResolvedValue(
sidebarDataCountResponse({
openIssuesCount: 8,
openMergeRequestsCount: 2,
}),
);
handler = jest.fn().mockResolvedValue(
sidebarDataCountResponse({
openIssuesCount: 8,
openMergeRequestsCount: 2,
}),
);
it('async sidebar count query is not called, even with `currentPath` provided', async () => {
createWrapper({ asyncSidebarCountsFlagEnabled: false });
await waitForPromises();
expect(handler).not.toHaveBeenCalled();
it('when there is no `currentPath` prop, the query is not called', async () => {
createWrapper({
provide: { currentPath: null },
});
await waitForPromises();
expect(handler).not.toHaveBeenCalled();
});
describe('when flag `asyncSidebarCounts` is enabled', () => {
handler = jest.fn().mockResolvedValue(
sidebarDataCountResponse({
openIssuesCount: 8,
openMergeRequestsCount: 2,
}),
it('when there is a `currentPath` prop, the query is called', async () => {
createWrapper({
provide: {
currentPath: 'group',
},
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
});
});
describe('Child components receive correct asyncCount prop', () => {
const emptyData = {
data: null,
};
const emptyNamespace = {
data: {
namespace: null,
},
};
const emptySidebar = {
data: {
namespace: {
id: 'gid://gitlab/Project/11',
sidebar: null,
__typename: 'Namespace',
},
},
};
describe('When the query is successful', () => {
it.each`
component | panelType | property | response | componentAsyncProp
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${'data'} | ${emptyData} | ${findStaticItems}
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${'namespace'} | ${emptyNamespace} | ${findStaticItems}
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${'sidebar'} | ${emptySidebar} | ${findStaticItems}
${'non-static NavItem'} | ${'explore'} | ${'data'} | ${emptyData} | ${findNonStaticItems}
${'non-static NavItem'} | ${'explore'} | ${'namespace'} | ${emptyNamespace} | ${findNonStaticItems}
${'non-static NavItem'} | ${'explore'} | ${'sidebar'} | ${emptySidebar} | ${findNonStaticItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${'data'} | ${emptyData} | ${findNonStaticSectionItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${'namespace'} | ${emptyNamespace} | ${findNonStaticSectionItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${'sidebar'} | ${emptySidebar} | ${findNonStaticSectionItems}
`(
'asyncCount prop returns an empty object when `$property` is undefined for `$component`',
async ({ response, panelType, componentAsyncProp }) => {
handler = jest.fn().mockResolvedValue(response);
createWrapper({
items: menuItems,
panelType,
handler,
provide: {
currentPath: 'group',
},
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(componentAsyncProp().wrappers.map((w) => w.props('asyncCount'))[0]).toEqual({});
},
);
it('when there is no `currentPath` prop, the query is not called', async () => {
createWrapper({
asyncSidebarCountsFlagEnabled: true,
provide: { currentPath: null },
});
await waitForPromises();
it.each`
component | panelType | property | response
${'PinnedSection'} | ${'project'} | ${'data'} | ${emptyData}
${'PinnedSection'} | ${'project'} | ${'namespace'} | ${emptyNamespace}
${'PinnedSection'} | ${'project'} | ${'sidebar'} | ${emptySidebar}
`(
'asyncCount prop returns an empty object when `$property` is undefined for `$component`',
async ({ response, panelType }) => {
handler = jest.fn().mockResolvedValue(response);
expect(handler).not.toHaveBeenCalled();
});
createWrapper({
items: menuItems,
panelType,
handler,
provide: {
currentPath: 'group',
},
});
it('when there is a `currentPath` prop, the query is called', async () => {
createWrapper({
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
await waitForPromises();
expect(handler).toHaveBeenCalled();
});
});
describe('Child components receive correct asyncCount prop', () => {
const emptyData = {
data: null,
};
const emptyNamespace = {
data: {
namespace: null,
expect(handler).toHaveBeenCalled();
expect(findPinnedSection().props('asyncCount')).toEqual({});
},
};
const emptySidebar = {
data: {
namespace: {
id: 'gid://gitlab/Project/11',
sidebar: null,
__typename: 'Namespace',
},
},
};
);
describe('When the query is successful', () => {
it.each`
component | panelType | property | response | componentAsyncProp
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${'data'} | ${emptyData} | ${findStaticItems}
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${'namespace'} | ${emptyNamespace} | ${findStaticItems}
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${'sidebar'} | ${emptySidebar} | ${findStaticItems}
${'non-static NavItem'} | ${'explore'} | ${'data'} | ${emptyData} | ${findNonStaticItems}
${'non-static NavItem'} | ${'explore'} | ${'namespace'} | ${emptyNamespace} | ${findNonStaticItems}
${'non-static NavItem'} | ${'explore'} | ${'sidebar'} | ${emptySidebar} | ${findNonStaticItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${'data'} | ${emptyData} | ${findNonStaticSectionItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${'namespace'} | ${emptyNamespace} | ${findNonStaticSectionItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${'sidebar'} | ${emptySidebar} | ${findNonStaticSectionItems}
`(
'asyncCount prop returns an empty object when `$property` is undefined for `$component`',
async ({ response, panelType, componentAsyncProp }) => {
handler = jest.fn().mockResolvedValue(response);
createWrapper({
items: menuItems,
panelType,
handler,
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(componentAsyncProp().wrappers.map((w) => w.props('asyncCount'))[0]).toEqual({});
},
);
it.each`
component | panelType | property | response
${'PinnedSection'} | ${'project'} | ${'data'} | ${emptyData}
${'PinnedSection'} | ${'project'} | ${'namespace'} | ${emptyNamespace}
${'PinnedSection'} | ${'project'} | ${'sidebar'} | ${emptySidebar}
`(
'asyncCount prop returns an empty object when `$property` is undefined for `$component`',
async ({ response, panelType }) => {
handler = jest.fn().mockResolvedValue(response);
createWrapper({
items: menuItems,
panelType,
handler,
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(findPinnedSection().props('asyncCount')).toEqual({});
},
);
it.each`
component | panelType | componentAsyncProp
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${findStaticItems}
${'non-static NavItem'} | ${'explore'} | ${findNonStaticItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${findNonStaticSectionItems}
`(
'asyncCount prop returns the sidebar object for `$component` when it exists',
async ({ panelType, componentAsyncProp }) => {
const asyncCountData = {
openIssuesCount: 8,
openMergeRequestsCount: 2,
__typename: 'NamespaceSidebar',
};
handler = jest.fn().mockResolvedValue(
sidebarDataCountResponse({
openIssuesCount: 8,
openMergeRequestsCount: 2,
}),
);
createWrapper({
items: menuItems,
panelType,
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(
componentAsyncProp().wrappers.map((w) => w.props('asyncCount'))[0],
).toMatchObject(asyncCountData);
},
);
it('asyncCount prop returns the sidebar object for PinnedSection when it exists', async () => {
it.each`
component | panelType | componentAsyncProp
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${findStaticItems}
${'non-static NavItem'} | ${'explore'} | ${findNonStaticItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${findNonStaticSectionItems}
`(
'asyncCount prop returns the sidebar object for `$component` when it exists',
async ({ panelType, componentAsyncProp }) => {
const asyncCountData = {
openIssuesCount: 8,
openMergeRequestsCount: 2,
@ -405,66 +344,93 @@ describe('Sidebar Menu', () => {
createWrapper({
items: menuItems,
panelType: 'project',
panelType,
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(findPinnedSection().props('asyncCount')).toMatchObject(asyncCountData);
});
});
expect(componentAsyncProp().wrappers.map((w) => w.props('asyncCount'))[0]).toMatchObject(
asyncCountData,
);
},
);
describe('When the query is unsuccessful', () => {
beforeEach(() => {
handler = jest.fn().mockRejectedValue();
});
it('asyncCount prop returns the sidebar object for PinnedSection when it exists', async () => {
const asyncCountData = {
openIssuesCount: 8,
openMergeRequestsCount: 2,
__typename: 'NamespaceSidebar',
};
it.each`
component | panelType | componentAsyncProp
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${findStaticItems}
${'non-static NavItem'} | ${'explore'} | ${findNonStaticItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${findNonStaticSectionItems}
`(
'asyncCount prop returns an empty object for `$component` when the query fails',
async ({ panelType, componentAsyncProp }) => {
createWrapper({
items: menuItems,
panelType,
handler,
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(componentAsyncProp().wrappers.map((w) => w.props('asyncCount'))[0]).toEqual({});
},
handler = jest.fn().mockResolvedValue(
sidebarDataCountResponse({
openIssuesCount: 8,
openMergeRequestsCount: 2,
}),
);
it('asyncCount prop returns an empty object for PinnedSection when the query fails', async () => {
createWrapper({
items: menuItems,
panelType: 'project',
provide: {
currentPath: 'group',
},
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(findPinnedSection().props('asyncCount')).toMatchObject(asyncCountData);
});
});
describe('When the query is unsuccessful', () => {
beforeEach(() => {
handler = jest.fn().mockRejectedValue();
});
it.each`
component | panelType | componentAsyncProp
${'static NavItem'} | ${PANELS_WITH_PINS[0]} | ${findStaticItems}
${'non-static NavItem'} | ${'explore'} | ${findNonStaticItems}
${'MenuSection'} | ${PANELS_WITH_PINS[0]} | ${findNonStaticSectionItems}
`(
'asyncCount prop returns an empty object for `$component` when the query fails',
async ({ panelType, componentAsyncProp }) => {
createWrapper({
items: menuItems,
panelType: 'project',
panelType,
handler,
provide: {
currentPath: 'group',
},
asyncSidebarCountsFlagEnabled: true,
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(findPinnedSection().props('asyncCount')).toEqual({});
expect(componentAsyncProp().wrappers.map((w) => w.props('asyncCount'))[0]).toEqual({});
},
);
it('asyncCount prop returns an empty object for PinnedSection when the query fails', async () => {
createWrapper({
items: menuItems,
panelType: 'project',
handler,
provide: {
currentPath: 'group',
},
});
await waitForPromises();
expect(handler).toHaveBeenCalled();
expect(findPinnedSection().props('asyncCount')).toEqual({});
});
});
});

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillIssuableMetricImagesNamespaceId,
feature_category: :observability,
schema: 20241203081752 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :issuable_metric_images }
let(:backfill_column) { :namespace_id }
let(:backfill_via_table) { :issues }
let(:backfill_via_column) { :namespace_id }
let(:backfill_via_foreign_key) { :issue_id }
end
end

View File

@ -0,0 +1,15 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BackfillResourceLinkEventsNamespaceId,
feature_category: :team_planning,
schema: 20241202141407 do
include_examples 'desired sharding key backfill job' do
let(:batch_table) { :resource_link_events }
let(:backfill_column) { :namespace_id }
let(:backfill_via_table) { :issues }
let(:backfill_via_column) { :namespace_id }
let(:backfill_via_foreign_key) { :issue_id }
end
end

View File

@ -4,22 +4,26 @@ require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, feature_category: :importers do
include AfterNextHelpers
include Import::UserMappingHelper
let_it_be(:project) { create(:project, :repository) }
let_it_be(:reviewer_1) { create(:user, username: 'john_smith', email: 'john@smith.com') }
let_it_be(:reviewer_2) { create(:user, username: 'jane_doe', email: 'jane@doe.com') }
let_it_be_with_reload(:project) do
create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled)
end
# Identifiers taken from importers/bitbucket_server/pull_request.json
let_it_be(:author_source_user) { generate_source_user(project, 'username') }
let_it_be(:reviewer_1_source_user) { generate_source_user(project, 'john_smith') }
let_it_be(:reviewer_2_source_user) { generate_source_user(project, 'jane_doe') }
let(:pull_request_data) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
let(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
subject(:importer) { described_class.new(project, pull_request.to_hash) }
describe '#execute' do
describe '#execute', :clean_gitlab_redis_shared_state do
it 'imports the merge request correctly' do
expect_next(Gitlab::Import::MergeRequestCreator, project).to receive(:execute).and_call_original
expect_next(Gitlab::BitbucketServerImport::UserFinder, project).to receive(:author_id).and_call_original
expect_next(Gitlab::Import::MentionsConverter, 'bitbucket_server',
project).to receive(:convert).and_call_original
expect { importer.execute }.to change { MergeRequest.count }.by(1)
@ -30,14 +34,25 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
title: pull_request.title,
source_branch: 'root/CODE_OF_CONDUCTmd-1530600625006',
target_branch: 'master',
reviewer_ids: match_array([reviewer_1.id, reviewer_2.id]),
reviewer_ids: an_array_matching([reviewer_1_source_user.mapped_user_id, reviewer_2_source_user.mapped_user_id]),
state: pull_request.state,
author_id: project.creator_id,
description: "*Created by: #{pull_request.author}*\n\n#{pull_request.description}",
author_id: author_source_user.mapped_user_id,
description: pull_request.description,
imported_from: 'bitbucket_server'
)
end
it 'pushes placeholder references', :aggregate_failures do
importer.execute
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to contain_exactly(
['MergeRequestReviewer', instance_of(Integer), 'user_id', reviewer_1_source_user.id],
['MergeRequestReviewer', instance_of(Integer), 'user_id', reviewer_2_source_user.id],
['MergeRequest', instance_of(Integer), 'author_id', author_source_user.id]
)
end
describe 'refs/merge-requests/:iid/head creation' do
before do
project.repository.create_branch(pull_request.source_branch_name, 'master')
@ -56,32 +71,6 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
end
end
context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
end
it 'does not convert mentions' do
expect_next(Gitlab::Import::MentionsConverter, 'bitbucket_server', project).not_to receive(:convert)
importer.execute
end
end
context 'when the `bitbucket_server_user_mapping_by_username` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
it 'imports reviewers correctly' do
importer.execute
merge_request = project.merge_requests.find_by_iid(pull_request.iid)
expect(merge_request.reviewer_ids).to match_array([reviewer_1.id, reviewer_2.id])
end
end
describe 'merge request diff head_commit_sha' do
before do
allow(pull_request).to receive(:source_branch_sha).and_return(source_branch_sha)
@ -152,5 +141,73 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestImporter, fe
importer.execute
end
context 'when user contribution mapping is disabled' do
let_it_be(:reviewer_1) { create(:user, username: 'john_smith', email: 'john@smith.com') }
let_it_be(:reviewer_2) { create(:user, username: 'jane_doe', email: 'jane@doe.com') }
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
it 'annotates the description with the source username when no matching user is found' do
allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |finder|
allow(finder).to receive(:uid).and_return(nil)
end
importer.execute
merge_request = project.merge_requests.find_by_iid(pull_request.iid)
expect(merge_request).to have_attributes(
description: "*Created by: #{pull_request.author}*\n\n#{pull_request.description}"
)
end
context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
end
it 'does not convert mentions' do
expect_next(Gitlab::Import::MentionsConverter, 'bitbucket_server', project).not_to receive(:convert)
importer.execute
end
end
context 'when alternate UCM flags are disabled' do
before do
stub_feature_flags(
bitbucket_server_user_mapping_by_username: false,
bitbucket_server_convert_mentions_to_users: false,
bitbucket_server_user_mapping: false
)
end
it 'assigns the MR author' do
importer.execute
merge_request = project.merge_requests.find_by_iid(pull_request.iid)
expect(merge_request.author_id).to eq(project.creator_id)
end
it 'imports reviewers correctly' do
importer.execute
merge_request = project.merge_requests.find_by_iid(pull_request.iid)
expect(merge_request.reviewer_ids).to match_array([reviewer_1.id, reviewer_2.id])
end
end
it 'does not push placeholder references' do
importer.execute
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
end
end
end

View File

@ -3,31 +3,26 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::ApprovedEvent, feature_category: :importers do
let_it_be(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'token' => 'token' }
}
)
include Import::UserMappingHelper
let_it_be_with_reload(:project) do
create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled)
end
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:now) { Time.now.utc.change(usec: 0) }
let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
let(:approved_event) do
let_it_be(:approved_event) do
{
id: 4,
approver_username: pull_request_author.username,
approver_email: pull_request_author.email,
approver_name: 'John Approvals',
approver_username: 'pull_request_author',
approver_email: 'pull_request_author@example.org',
created_at: now
}
end
let_it_be(:source_user) { generate_source_user(project, approved_event[:approver_username]) }
def expect_log(stage:, message:, iid:, event_id:)
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
expect(Gitlab::BitbucketServerImport::Logger)
@ -37,6 +32,17 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Appro
subject(:importer) { described_class.new(project, merge_request) }
describe '#execute', :clean_gitlab_redis_shared_state do
it 'pushes placeholder references' do
importer.execute(approved_event)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to contain_exactly(
['Approval', instance_of(Integer), 'user_id', source_user.id],
['MergeRequestReviewer', instance_of(Integer), 'user_id', source_user.id],
['Note', instance_of(Integer), 'author_id', source_user.id]
)
end
it 'creates the approval, reviewer and approval note' do
expect { importer.execute(approved_event) }
.to change { merge_request.approvals.count }.from(0).to(1)
@ -45,61 +51,55 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Appro
approval = merge_request.approvals.first
expect(approval.user).to eq(pull_request_author)
expect(approval.user_id).to eq(source_user.mapped_user_id)
expect(approval.created_at).to eq(now)
note = merge_request.notes.first
expect(note.note).to eq('approved this merge request')
expect(note.author).to eq(pull_request_author)
expect(note.author_id).to eq(source_user.mapped_user_id)
expect(note.system).to be_truthy
expect(note.created_at).to eq(now)
reviewer = merge_request.reviewers.first
expect(reviewer.id).to eq(pull_request_author.id)
expect(reviewer.id).to eq(source_user.mapped_user_id)
end
context 'when a user with a matching username does not exist' do
let(:approved_event) { super().merge(approver_username: 'another_username') }
it 'logs its progress' do
expect_log(stage: 'import_approved_event', message: 'starting', iid: merge_request.iid, event_id: 4)
expect_log(stage: 'import_approved_event', message: 'finished', iid: merge_request.iid, event_id: 4)
it 'does not set an approver' do
expect_log(
stage: 'import_approved_event',
message: 'skipped due to missing user',
iid: merge_request.iid,
event_id: 4
)
importer.execute(approved_event)
end
context 'when user contribution mapping is disabled' do
let_it_be(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
it 'creates the approval, reviewer and approval note' do
expect { importer.execute(approved_event) }
.to not_change { merge_request.approvals.count }
.and not_change { merge_request.notes.count }
.and not_change { merge_request.reviewers.count }
.to change { merge_request.approvals.count }.from(0).to(1)
.and change { merge_request.notes.count }.from(0).to(1)
.and change { merge_request.reviewers.count }.from(0).to(1)
expect(merge_request.approvals).to be_empty
approval = merge_request.approvals.first
expect(approval.user_id).to eq(pull_request_author.id)
note = merge_request.notes.first
expect(note.author_id).to eq(pull_request_author.id)
reviewer = merge_request.reviewers.first
expect(reviewer.id).to eq(pull_request_author.id)
end
context 'when bitbucket_server_user_mapping_by_username flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
it 'finds the user based on email' do
importer.execute(approved_event)
approval = merge_request.approvals.first
expect(approval.user).to eq(pull_request_author)
end
end
context 'when no users match email or username' do
let(:approved_event) do
super().merge(
approver_username: 'another_username',
approver_email: 'anotheremail@example.com'
)
end
context 'when a user with a matching username does not exist' do
let(:approved_event) { super().merge(approver_username: 'another_username') }
it 'does not set an approver' do
expect_log(
@ -116,25 +116,53 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Appro
expect(merge_request.approvals).to be_empty
end
end
end
context 'if the reviewer already existed' do
before do
merge_request.reviewers = [pull_request_author]
merge_request.save!
context 'when bitbucket_server_user_mapping_by_username flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
it 'finds the user based on email' do
importer.execute(approved_event)
approval = merge_request.approvals.first
expect(approval.user).to eq(pull_request_author)
end
end
context 'when no users match email or username' do
let(:approved_event) do
super().merge(
approver_username: 'another_username',
approver_email: 'anotheremail@example.com'
)
end
it 'does not set an approver' do
expect_log(
stage: 'import_approved_event',
message: 'skipped due to missing user',
iid: merge_request.iid,
event_id: 4
)
expect { importer.execute(approved_event) }
.to not_change { merge_request.approvals.count }
.and not_change { merge_request.notes.count }
.and not_change { merge_request.reviewers.count }
expect(merge_request.approvals).to be_empty
end
end
end
it 'does not create the reviewer record' do
expect { importer.execute(approved_event) }.not_to change { merge_request.reviewers.count }
it 'does not push placeholder references' do
importer.execute(approved_event)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
end
it 'logs its progress' do
expect_log(stage: 'import_approved_event', message: 'starting', iid: merge_request.iid, event_id: 4)
expect_log(stage: 'import_approved_event', message: 'finished', iid: merge_request.iid, event_id: 4)
importer.execute(approved_event)
end
end
end

View File

@ -3,31 +3,25 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::DeclinedEvent, feature_category: :importers do
let_it_be(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'token' => 'token' }
}
)
include Import::UserMappingHelper
let_it_be_with_reload(:project) do
create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled)
end
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:now) { Time.now.utc.change(usec: 0) }
let!(:decliner_author) do
create(:user, username: 'decliner_author', email: 'decliner_author@example.org')
end
let(:declined_event) do
let_it_be(:declined_event) do
{
id: 7,
decliner_username: decliner_author.username,
decliner_email: decliner_author.email,
created_at: now
decliner_name: 'John Rejections',
decliner_username: 'decliner_author',
decliner_email: 'decliner_author@example.org',
created_at: Time.now.utc.change(usec: 0)
}
end
let_it_be(:source_user) { generate_source_user(project, declined_event[:decliner_username]) }
def expect_log(stage:, message:, iid:, event_id:)
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
expect(Gitlab::BitbucketServerImport::Logger)
@ -37,66 +31,92 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Decli
subject(:importer) { described_class.new(project, merge_request) }
describe '#execute', :clean_gitlab_redis_shared_state do
it 'pushes placeholder references' do
importer.execute(declined_event)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to contain_exactly(
['Event', instance_of(Integer), 'author_id', source_user.id],
['MergeRequest::Metrics', instance_of(Integer), 'latest_closed_by_id', source_user.id]
)
end
it 'imports the declined event' do
expect { importer.execute(declined_event) }
.to change { merge_request.events.count }.from(0).to(1)
.and change { merge_request.resource_state_events.count }.from(0).to(1)
metrics = merge_request.metrics.reload
expect(metrics.latest_closed_by).to eq(decliner_author)
expect(metrics.latest_closed_by_id).to eq(source_user.mapped_user_id)
expect(metrics.latest_closed_at).to eq(declined_event[:created_at])
event = merge_request.events.first
expect(event.author_id).to eq(source_user.mapped_user_id)
expect(event.action).to eq('closed')
resource_state_event = merge_request.resource_state_events.first
expect(resource_state_event.state).to eq('closed')
end
context 'when bitbucket_server_user_mapping_by_username flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
context 'when a user with a matching username does not exist' do
let(:another_username_event) do
declined_event.merge(decliner_username: 'another_username')
end
it 'finds the user based on email' do
importer.execute(another_username_event)
expect(merge_request.metrics.reload.latest_closed_by).to eq(decliner_author)
end
end
end
context 'when no users match email or username' do
let(:another_user_event) do
declined_event.merge(decliner_username: 'another_username', decliner_email: 'another_email@example.org')
end
it 'does not set a decliner' do
expect_log(
stage: 'import_declined_event',
message: 'skipped due to missing user',
iid: merge_request.iid,
event_id: 7
)
expect { importer.execute(another_user_event) }
.to not_change { merge_request.events.count }
.and not_change { merge_request.resource_state_events.count }
expect(merge_request.metrics.reload.latest_closed_by).to be_nil
end
end
it 'logs its progress' do
expect_log(stage: 'import_declined_event', message: 'starting', iid: merge_request.iid, event_id: 7)
expect_log(stage: 'import_declined_event', message: 'finished', iid: merge_request.iid, event_id: 7)
importer.execute(declined_event)
end
context 'when user contribution mapping is disabled' do
let_it_be(:decliner_author) { create(:user, username: 'decliner_author', email: 'decliner_author@example.org') }
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
context 'when bitbucket_server_user_mapping_by_username flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
context 'when a user with a matching username does not exist' do
let(:another_username_event) do
declined_event.merge(decliner_username: 'another_username')
end
it 'finds the user based on email' do
importer.execute(another_username_event)
expect(merge_request.metrics.reload.latest_closed_by).to eq(decliner_author)
end
end
end
context 'when no users match email or username' do
let(:another_user_event) do
declined_event.merge(decliner_username: 'another_username', decliner_email: 'another_email@example.org')
end
it 'does not set a decliner' do
expect_log(
stage: 'import_declined_event',
message: 'skipped due to missing user',
iid: merge_request.iid,
event_id: 7
)
expect { importer.execute(another_user_event) }
.to not_change { merge_request.events.count }
.and not_change { merge_request.resource_state_events.count }
expect(merge_request.metrics.reload.latest_closed_by).to be_nil
end
end
it 'does not push placeholder references' do
importer.execute(declined_event)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
end
end
end

View File

@ -3,27 +3,19 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inline, feature_category: :importers do
let_it_be(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'token' => 'token' }
}
)
include Import::UserMappingHelper
let_it_be_with_reload(:project) do
create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled)
end
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:now) { Time.now.utc.change(usec: 0) }
let_it_be(:mentions_converter) { Gitlab::Import::MentionsConverter.new('bitbucket_server', project) }
let_it_be(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
let_it_be(:inline_note_author) do
create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org')
end
let(:reply) do
let_it_be(:reply) do
{
author_email: reply_author.email,
author_username: reply_author.username,
author_email: 'reply_author@example.org',
author_username: 'reply_author',
note: 'I agree',
created_at: now,
updated_at: now,
@ -31,7 +23,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
}
end
let(:pr_inline_comment) do
let_it_be(:pr_inline_comment) do
{
id: 7,
file_type: 'ADDED',
@ -41,8 +33,8 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
old_pos: nil,
new_pos: 4,
note: 'Hello world',
author_email: inline_note_author.email,
author_username: inline_note_author.username,
author_email: 'inline_note_author@example.org',
author_username: 'inline_note_author',
comments: [reply],
created_at: now,
updated_at: now,
@ -50,6 +42,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
}
end
let_it_be(:reply_source_user) { generate_source_user(project, reply[:author_username]) }
let_it_be(:note_source_user) { generate_source_user(project, pr_inline_comment[:author_username]) }
before do
allow(Gitlab::Import::MentionsConverter).to receive(:new).and_return(mentions_converter)
end
@ -62,7 +57,17 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
subject(:importer) { described_class.new(project, merge_request) }
describe '#execute' do
describe '#execute', :clean_gitlab_redis_shared_state do
it 'pushes placeholder references' do
importer.execute(pr_inline_comment)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to contain_exactly(
['DiffNote', instance_of(Integer), 'author_id', note_source_user.id],
['DiffNote', instance_of(Integer), 'author_id', reply_source_user.id]
)
end
it 'imports the threaded discussion' do
expect(mentions_converter).to receive(:convert).and_call_original.twice
@ -78,11 +83,11 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
expect(start_note.updated_at).to eq(pr_inline_comment[:updated_at])
expect(start_note.position.old_line).to be_nil
expect(start_note.position.new_line).to eq(pr_inline_comment[:new_pos])
expect(start_note.author).to eq(inline_note_author)
expect(start_note.author_id).to eq(note_source_user.mapped_user_id)
reply_note = notes.last
expect(reply_note.note).to eq(reply[:note])
expect(reply_note.author).to eq(reply_author)
expect(reply_note.author_id).to eq(reply_source_user.mapped_user_id)
expect(reply_note.created_at).to eq(reply[:created_at])
expect(reply_note.updated_at).to eq(reply[:created_at])
expect(reply_note.position.old_line).to be_nil
@ -98,22 +103,11 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
context 'when note is invalid' do
let(:invalid_comment) do
{
id: 7,
file_type: 'ADDED',
from_sha: 'c5f4288162e2e6218180779c7f6ac1735bb56eab',
to_sha: 'a4c2164330f2549f67c13f36a93884cf66e976be',
file_path: '.gitmodules',
pr_inline_comment.merge(
old_pos: 3,
new_pos: 4,
note: '',
author_email: inline_note_author.email,
author_username: inline_note_author.username,
comments: [],
created_at: now,
updated_at: now,
parent_comment_note: nil
}
comments: []
)
end
it 'fallback to basic note' do
@ -143,13 +137,46 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Inlin
end
end
context 'when converting mention is failed' do
it 'logs its exception' do
expect(mentions_converter).to receive(:convert).and_raise(StandardError)
expect(Gitlab::ErrorTracking).to receive(:log_exception)
.with(StandardError, include(import_stage: 'create_diff_note'))
context 'when user contribution mapping is disabled' do
let_it_be(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
let_it_be(:inline_note_author) do
create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org')
end
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
it 'imports the threaded discussion' do
expect(mentions_converter).to receive(:convert).and_call_original.twice
expect { importer.execute(pr_inline_comment) }.to change { Note.count }.by(2)
expect(merge_request.discussions.count).to eq(1)
notes = merge_request.notes.order(:id).to_a
start_note = notes.first
expect(start_note.author_id).to eq(inline_note_author.id)
reply_note = notes.last
expect(reply_note.author_id).to eq(reply_author.id)
end
context 'when converting mention is failed' do
it 'logs its exception' do
expect(mentions_converter).to receive(:convert).and_raise(StandardError)
expect(Gitlab::ErrorTracking).to receive(:log_exception)
.with(StandardError, include(import_stage: 'create_diff_note'))
importer.execute(pr_inline_comment)
end
end
it 'does not push placeholder references' do
importer.execute(pr_inline_comment)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
end
end

View File

@ -3,31 +3,24 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::MergeEvent, feature_category: :importers do
let_it_be(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'token' => 'token' }
}
)
end
include Import::UserMappingHelper
let_it_be(:project) { create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:now) { Time.now.utc.change(usec: 0) }
let_it_be(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
let_it_be(:merge_event) do
{
id: 3,
committer_email: pull_request_author.email,
committer_user: 'John Merges',
committer_username: 'pull_request_author',
committer_email: 'pull_request_author@example.org',
merge_timestamp: now,
merge_commit: '12345678'
}
end
let_it_be(:source_user) { generate_source_user(project, merge_event[:committer_username]) }
def expect_log(stage:, message:, iid:, event_id:)
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
expect(Gitlab::BitbucketServerImport::Logger)
@ -36,14 +29,23 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Merge
subject(:importer) { described_class.new(project, merge_request) }
describe '#execute' do
describe '#execute', :clean_gitlab_redis_shared_state do
it 'pushes placeholder references' do
importer.execute(merge_event)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to contain_exactly(
['MergeRequest::Metrics', instance_of(Integer), 'merged_by_id', source_user.id]
)
end
it 'imports the merge event' do
importer.execute(merge_event)
merge_request.reload
metrics = merge_request.metrics.reload
expect(merge_request.metrics.merged_by).to eq(pull_request_author)
expect(merge_request.metrics.merged_at).to eq(merge_event[:merge_timestamp])
expect(metrics.merged_by_id).to eq(source_user.mapped_user_id)
expect(metrics.merged_at).to eq(merge_event[:merge_timestamp])
expect(merge_request.merge_commit_sha).to eq(merge_event[:merge_commit])
end
@ -53,5 +55,29 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Merge
importer.execute(merge_event)
end
context 'when user contribution mapping is disabled' do
let_it_be(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
it 'imports the merge event' do
importer.execute(merge_event)
metrics = merge_request.metrics.reload
expect(metrics.merged_by_id).to eq(pull_request_author.id)
end
it 'does not push placeholder references' do
importer.execute(merge_event)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
end
end
end

View File

@ -3,33 +3,33 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::StandaloneNotes, feature_category: :importers do
let_it_be(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'token' => 'token' }
}
)
end
include Import::UserMappingHelper
let_it_be(:project) { create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let_it_be(:now) { Time.now.utc.change(usec: 0) }
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
let_it_be(:mentions_converter) { Gitlab::Import::MentionsConverter.new('bitbucket_server', project) }
let_it_be(:author_details) do
{
author_name: 'John Notes',
author_username: 'note_author',
author_email: 'note_author@example.org'
}
end
let(:pr_comment) do
let_it_be(:pr_comment) do
{
id: 5,
note: 'Hello world',
author_email: note_author.email,
author_username: note_author.username,
comments: [],
created_at: now,
updated_at: now,
parent_comment_note: nil
}
}.merge(author_details)
end
let_it_be(:source_user) { generate_source_user(project, pr_comment[:author_username]) }
before do
allow(Gitlab::Import::MentionsConverter).to receive(:new).and_return(mentions_converter)
end
@ -42,7 +42,16 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Stand
subject(:importer) { described_class.new(project, merge_request) }
describe '#execute' do
describe '#execute', :clean_gitlab_redis_shared_state do
it 'pushes placeholder reference' do
importer.execute(pr_comment)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to contain_exactly(
['Note', instance_of(Integer), 'author_id', source_user.id]
)
end
it 'imports the stand alone comments' do
expect(mentions_converter).to receive(:convert).and_call_original
@ -51,7 +60,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Stand
expect(merge_request.notes.count).to eq(1)
expect(merge_request.notes.first).to have_attributes(
note: end_with(pr_comment[:note]),
author: note_author,
author_id: source_user.mapped_user_id,
created_at: pr_comment[:created_at],
updated_at: pr_comment[:created_at],
imported_from: 'bitbucket_server'
@ -63,28 +72,24 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Stand
{
id: 6,
note: 'Foo bar',
author_email: note_author.email,
author_username: note_author.username,
comments: [],
created_at: now,
updated_at: now,
parent_comment_note: nil,
imported_from: 'bitbucket_server'
}
}.merge(author_details)
end
let(:pr_comment) do
{
id: 5,
note: 'Hello world',
author_email: note_author.email,
author_username: note_author.username,
comments: [pr_comment_extra],
created_at: now,
updated_at: now,
parent_comment_note: nil,
imported_from: 'bitbucket_server'
}
}.merge(author_details)
end
it 'imports multiple comments' do
@ -95,14 +100,14 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Stand
expect(merge_request.notes.count).to eq(2)
expect(merge_request.notes.first).to have_attributes(
note: end_with(pr_comment[:note]),
author: note_author,
author_id: source_user.mapped_user_id,
created_at: pr_comment[:created_at],
updated_at: pr_comment[:created_at],
imported_from: 'bitbucket_server'
)
expect(merge_request.notes.last).to have_attributes(
note: end_with(pr_comment_extra[:note]),
author: note_author,
author_id: source_user.mapped_user_id,
created_at: pr_comment_extra[:created_at],
updated_at: pr_comment_extra[:created_at],
imported_from: 'bitbucket_server'
@ -110,33 +115,17 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Stand
end
end
context 'when the author is not found' do
before do
allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
allow(user_finder).to receive(:uid).and_return(nil)
end
end
it 'adds a note with the author username and email' do
importer.execute(pr_comment)
expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
end
end
context 'when the note has a parent note' do
let(:pr_comment) do
{
id: 5,
note: 'Note',
author_email: note_author.email,
author_username: note_author.username,
comments: [],
created_at: now,
updated_at: now,
parent_comment_note: 'Parent note',
imported_from: 'bitbucket_server'
}
}.merge(author_details)
end
it 'adds the parent note before the actual note' do
@ -176,5 +165,41 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotes::Stand
importer.execute(pr_comment)
end
end
context 'when user contribution mapping is disabled' do
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
it 'imports the merge event' do
expect { importer.execute(pr_comment) }.to change { Note.count }.by(1)
expect(merge_request.notes.first).to have_attributes(
author_id: note_author.id
)
end
it 'does not push placeholder references' do
importer.execute(pr_comment)
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
context 'when the author is not found' do
before do
allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
allow(user_finder).to receive(:uid).and_return(nil)
end
end
it 'adds a note with the author username and email' do
importer.execute(pr_comment)
expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
end
end
end
end
end

View File

@ -4,25 +4,17 @@ require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporter, feature_category: :importers do
include AfterNextHelpers
include Import::UserMappingHelper
let_it_be_with_reload(:project) do
create(:project, :repository, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'token' => 'token' }
}
)
create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled)
end
let_it_be(:pull_request_data) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
let_it_be(:pull_request) { BitbucketServer::Representation::PullRequest.new(pull_request_data) }
let_it_be(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
let_it_be_with_reload(:merge_request) { create(:merge_request, iid: pull_request.iid, source_project: project) }
let(:mentions_converter) { Gitlab::Import::MentionsConverter.new('bitbucket_server', project) }
let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
let(:merge_event) do
instance_double(
BitbucketServer::Representation::Activity,
@ -30,7 +22,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
comment?: false,
merge_event?: true,
approved_event?: false,
committer_email: pull_request_author.email,
committer_name: 'Pull Request Author',
committer_username: 'pull_request_author',
committer_email: 'pull_request_author@example.com',
merge_timestamp: now,
merge_commit: '12345678'
)
@ -43,8 +37,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
comment?: false,
merge_event?: false,
approved_event?: true,
approver_username: pull_request_author.username,
approver_email: pull_request_author.email,
approver_name: 'Pull Request Author',
approver_username: 'pull_request_author',
approver_email: 'pull_request_author@example.org',
created_at: now
)
end
@ -52,9 +47,24 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let(:pr_note) do
instance_double(
BitbucketServer::Representation::Comment,
id: 456,
note: 'Hello world',
author_email: note_author.email,
author_username: note_author.username,
author_name: 'Note Author',
author_email: 'note_author@example.org',
author_username: 'note_author',
comments: [pr_note_reply],
created_at: now,
updated_at: now,
parent_comment: nil)
end
let(:pr_note_reply) do
instance_double(
BitbucketServer::Representation::Comment,
note: 'Yes, absolutely.',
author_name: 'Note Author',
author_email: 'note_author@example.org',
author_username: 'note_author',
comments: [],
created_at: now,
updated_at: now,
@ -71,9 +81,16 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
comment: pr_note)
end
let!(:author_source_user) { generate_source_user(project, merge_event.committer_username) }
let!(:note_source_user) { generate_source_user(project, pr_note.author_username) }
let_it_be(:sample) { RepoHelpers.sample_compare }
let_it_be(:now) { Time.now.utc.change(usec: 0) }
let(:cached_references) do
placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
end
def expect_log(stage:, message:)
allow(Gitlab::BitbucketServerImport::Logger).to receive(:info).and_call_original
expect(Gitlab::BitbucketServerImport::Logger)
@ -86,8 +103,12 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
subject(:importer) { described_class.new(project.reload, pull_request.to_hash) }
describe '#execute' do
describe '#execute', :clean_gitlab_redis_shared_state do
context 'when a matching merge request is not found' do
before do
merge_request.update!(iid: merge_request.iid + 1)
end
it 'does nothing' do
expect { importer.execute }.not_to change { Note.count }
end
@ -100,9 +121,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
end
context 'when a matching merge request is found', :clean_gitlab_redis_shared_state do
let_it_be(:merge_request) { create(:merge_request, iid: pull_request.iid, source_project: project) }
context 'when a matching merge request is found' do
it 'logs its progress' do
allow_next(BitbucketServer::Client).to receive(:activities).and_return([])
@ -117,33 +136,35 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
allow_next(BitbucketServer::Client).to receive(:activities).and_return([pr_comment])
end
it 'pushes placeholder references' do
importer.execute
expect(cached_references).to contain_exactly(
['Note', instance_of(Integer), 'author_id', note_source_user.id],
["Note", instance_of(Integer), "author_id", note_source_user.id]
)
end
it 'imports the stand alone comments' do
expect(mentions_converter).to receive(:convert).and_call_original
expect { importer.execute }.to change { Note.count }.by(2)
expect { subject.execute }.to change { Note.count }.by(1)
notes = merge_request.notes.order(:id)
expect(merge_request.notes.count).to eq(1)
expect(merge_request.notes.first).to have_attributes(
expect(notes.first).to have_attributes(
note: end_with(pr_note.note),
author: note_author,
author_id: note_source_user.mapped_user_id,
created_at: pr_note.created_at,
updated_at: pr_note.created_at,
imported_from: 'bitbucket_server'
)
end
context 'when the author is not found' do
before do
allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
allow(user_finder).to receive(:uid).and_return(nil)
end
end
it 'adds a note with the author username and email' do
subject.execute
expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
end
expect(notes.last).to have_attributes(
note: end_with(pr_note_reply.note),
author_id: note_source_user.mapped_user_id,
created_at: pr_note_reply.created_at,
updated_at: pr_note_reply.created_at,
imported_from: 'bitbucket_server'
)
end
context 'when the note has a parent note' do
@ -151,8 +172,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
instance_double(
BitbucketServer::Representation::Comment,
note: 'Note',
author_email: note_author.email,
author_username: note_author.username,
author_name: 'Note Author',
author_email: 'note_author@example.org',
author_username: 'note_author',
comments: [],
created_at: now,
updated_at: now,
@ -164,8 +186,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
instance_double(
BitbucketServer::Representation::Comment,
note: 'Parent note',
author_email: note_author.email,
author_username: note_author.username,
author_name: 'Note Author',
author_email: 'note_author@example.org',
author_username: 'note_author',
comments: [],
created_at: now,
updated_at: now,
@ -174,12 +197,32 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
it 'adds the parent note before the actual note' do
subject.execute
importer.execute
expect(Note.first.note).to include("> #{pr_parent_note.note}\n\n")
end
end
context 'when an exception is raised during comment creation' do
before do
allow(importer).to receive(:pull_request_comment_attributes).and_raise(exception)
end
let(:exception) { StandardError.new('something went wrong') }
it 'logs the error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
exception,
import_stage: 'import_standalone_pr_comments',
comment_id: pr_note.id,
error: exception.message,
merge_request_id: merge_request.id
)
importer.execute
end
end
context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
@ -188,7 +231,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
it 'does not convert mentions' do
expect(mentions_converter).not_to receive(:convert)
subject.execute
importer.execute
end
end
@ -200,17 +243,13 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
end
context 'when PR has threaded discussion' do
let_it_be(:reply_author) { create(:user, username: 'reply_author', email: 'reply_author@example.org') }
let_it_be(:inline_note_author) do
create(:user, username: 'inline_note_author', email: 'inline_note_author@example.org')
end
context 'when PR has threaded inline discussion' do
let(:reply) do
instance_double(
BitbucketServer::Representation::PullRequestComment,
author_email: reply_author.email,
author_username: reply_author.username,
author_name: 'Reply Author',
author_email: 'reply_author@example.org',
author_username: 'reply_author',
note: 'I agree',
created_at: now,
updated_at: now,
@ -220,6 +259,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
let(:pr_inline_note) do
instance_double(
BitbucketServer::Representation::PullRequestComment,
id: 123,
file_type: 'ADDED',
from_sha: pull_request.target_branch_sha,
to_sha: pull_request.source_branch_sha,
@ -227,8 +267,9 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
old_pos: nil,
new_pos: 4,
note: 'Hello world',
author_email: inline_note_author.email,
author_username: inline_note_author.username,
author_name: 'Inline Note Author',
author_email: 'inline_note_author@example.org',
author_username: 'inline_note_author',
comments: [reply],
created_at: now,
updated_at: now,
@ -244,14 +285,15 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
comment: pr_inline_note)
end
let_it_be(:reply_source_user) { generate_source_user(project, 'reply_author') }
let_it_be(:note_source_user) { generate_source_user(project, 'inline_note_author') }
before do
allow_next(BitbucketServer::Client).to receive(:activities).and_return([pr_inline_comment])
end
it 'imports the threaded discussion' do
expect(mentions_converter).to receive(:convert).and_call_original.twice
expect { subject.execute }.to change { Note.count }.by(2)
expect { importer.execute }.to change { Note.count }.by(2)
expect(merge_request.discussions.count).to eq(1)
@ -263,12 +305,12 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
expect(start_note.updated_at).to eq(pr_inline_note.updated_at)
expect(start_note.position.old_line).to be_nil
expect(start_note.position.new_line).to eq(pr_inline_note.new_pos)
expect(start_note.author).to eq(inline_note_author)
expect(start_note.author_id).to eq(note_source_user.mapped_user_id)
expect(start_note.imported_from).to eq('bitbucket_server')
reply_note = notes.last
expect(reply_note.note).to eq(reply.note)
expect(reply_note.author).to eq(reply_author)
expect(reply_note.author_id).to eq(reply_source_user.mapped_user_id)
expect(reply_note.created_at).to eq(reply.created_at)
expect(reply_note.updated_at).to eq(reply.created_at)
expect(reply_note.position.old_line).to be_nil
@ -276,6 +318,64 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
expect(reply_note.imported_from).to eq('bitbucket_server')
end
it 'pushes placeholder references' do
importer.execute
expect(cached_references).to contain_exactly(
['DiffNote', instance_of(Integer), 'author_id', reply_source_user.id],
['DiffNote', instance_of(Integer), 'author_id', note_source_user.id]
)
end
context 'when a diff note is invalid' do
let(:pr_inline_note) do
instance_double(
BitbucketServer::Representation::PullRequestComment,
file_type: 'ADDED',
from_sha: pull_request.target_branch_sha,
to_sha: pull_request.source_branch_sha,
file_path: '.gitmodules',
old_pos: 3,
new_pos: nil,
note: 'Hello world',
author_name: 'Inline Note Author',
author_email: 'inline_note_author@example.org',
author_username: 'inline_note_author',
comments: [],
created_at: now,
updated_at: now,
parent_comment: nil)
end
it 'creates a fallback diff note' do
importer.execute
notes = merge_request.notes.order(:id).to_a
note = notes.first
expect(note.note).to eq("*Comment on .gitmodules:3 -->*\n\nHello world")
end
end
context 'when an exception is raised during DiffNote creation' do
before do
allow(importer).to receive(:pull_request_comment_attributes).and_raise(exception)
end
let(:exception) { StandardError.new('something went wrong') }
it 'logs the error' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
exception,
import_stage: 'create_diff_note',
comment_id: 123,
error: exception.message
)
importer.execute
end
end
context 'when the `bitbucket_server_convert_mentions_to_users` flag is disabled' do
before do
stub_feature_flags(bitbucket_server_convert_mentions_to_users: false)
@ -284,7 +384,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
it 'does not convert mentions' do
expect(mentions_converter).not_to receive(:convert)
subject.execute
importer.execute
end
end
@ -306,10 +406,18 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
merge_request.reload
expect(merge_request.metrics.merged_by).to eq(pull_request_author)
expect(merge_request.metrics.merged_by_id).to eq(author_source_user.mapped_user_id)
expect(merge_request.metrics.merged_at).to eq(merge_event.merge_timestamp)
expect(merge_request.merge_commit_sha).to eq(merge_event.merge_commit)
end
it 'pushes placeholder references' do
importer.execute
expect(cached_references).to contain_exactly(
["MergeRequest::Metrics", instance_of(Integer), "merged_by_id", author_source_user.id]
)
end
end
context 'when PR has an approved event' do
@ -325,70 +433,34 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
approval = merge_request.approvals.first
expect(approval.user).to eq(pull_request_author)
expect(approval.user_id).to eq(author_source_user.mapped_user_id)
expect(approval.created_at).to eq(now)
note = merge_request.notes.first
expect(note.note).to eq('approved this merge request')
expect(note.author).to eq(pull_request_author)
expect(note.author_id).to eq(author_source_user.mapped_user_id)
expect(note.system).to be_truthy
expect(note.created_at).to eq(now)
reviewer = merge_request.reviewers.first
expect(reviewer.id).to eq(pull_request_author.id)
expect(reviewer.id).to eq(author_source_user.mapped_user_id)
end
context 'when a user with a matching username does not exist' do
before do
pull_request_author.update!(username: 'another_username')
end
it 'pushes placeholder references' do
importer.execute
it 'does not set an approver' do
expect { importer.execute }
.to not_change { merge_request.approvals.count }
.and not_change { merge_request.notes.count }
.and not_change { merge_request.reviewers.count }
expect(merge_request.approvals).to be_empty
end
context 'when bitbucket_server_user_mapping_by_username flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
it 'finds the user based on email' do
importer.execute
approval = merge_request.approvals.first
expect(approval.user).to eq(pull_request_author)
end
end
context 'when no users match email or username' do
let_it_be(:another_author) { create(:user) }
before do
pull_request_author.destroy!
end
it 'does not set an approver' do
expect { importer.execute }
.to not_change { merge_request.approvals.count }
.and not_change { merge_request.notes.count }
.and not_change { merge_request.reviewers.count }
expect(merge_request.approvals).to be_empty
end
end
expect(cached_references).to contain_exactly(
['Approval', instance_of(Integer), 'user_id', author_source_user.id],
['MergeRequestReviewer', instance_of(Integer), 'user_id', author_source_user.id],
['Note', instance_of(Integer), 'author_id', author_source_user.id]
)
end
context 'if the reviewer already existed' do
context 'if the reviewer is already assigned to the MR' do
before do
merge_request.reviewers = [pull_request_author]
merge_request.reviewers = [author_source_user.mapped_user]
merge_request.save!
end
@ -417,21 +489,165 @@ RSpec.describe Gitlab::BitbucketServerImport::Importers::PullRequestNotesImporte
end
context 'when the import data does not have credentials' do
before do
project.import_data.credentials = nil
project.import_data.save!
let_it_be(:project) do
create(:project, :repository, :bitbucket_server_import,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: nil
}
)
end
include_examples 'import is skipped'
end
context 'when the import data does not have data' do
before do
project.import_data.data = nil
project.import_data.save!
let_it_be(:project) do
create(:project, :repository, :bitbucket_server_import,
import_data_attributes: {
data: nil,
credentials: { 'token' => 'token' }
}
)
end
include_examples 'import is skipped'
end
context 'when user contribution mapping is disabled' do
let!(:note_author) { create(:user, username: 'note_author', email: 'note_author@example.org') }
let!(:pull_request_author) do
create(:user, username: 'pull_request_author', email: 'pull_request_author@example.org')
end
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
allow_next(BitbucketServer::Client).to receive(:activities).and_return([approved_event])
end
it 'does not push placeholder references' do
importer.execute
cached_references = placeholder_user_references(::Import::SOURCE_BITBUCKET_SERVER, project.import_state.id)
expect(cached_references).to be_empty
end
context 'when the author is not found' do
before do
allow_next(BitbucketServer::Client).to receive(:activities).and_return([pr_comment])
allow_next_instance_of(Gitlab::BitbucketServerImport::UserFinder) do |user_finder|
allow(user_finder).to receive(:uid).and_return(nil)
end
end
it 'adds a note with the author username and email' do
importer.execute
expect(Note.first.note).to include("*By #{note_author.username} (#{note_author.email})")
end
end
context 'when bitbucket_server_user_mapping_by_username flag is disabled' do
before do
stub_feature_flags(bitbucket_server_user_mapping_by_username: false)
end
context 'when a user with a matching username does not exist' do
before do
pull_request_author.update!(username: 'another_username')
end
it 'finds the user based on email' do
importer.execute
approval = merge_request.approvals.first
expect(approval.user).to eq(pull_request_author)
end
context 'when no users match email or username' do
let_it_be(:another_author) { create(:user) }
before do
pull_request_author.destroy!
end
it 'does not set an approver' do
expect { importer.execute }
.to not_change { merge_request.approvals.count }
.and not_change { merge_request.notes.count }
.and not_change { merge_request.reviewers.count }
expect(merge_request.approvals).to be_empty
end
end
end
context 'when importing merge events' do
before do
allow_next(BitbucketServer::Client).to receive(:activities).and_return([merge_event])
end
it 'attributes the merge event to the project creator' do
importer.execute
expect(merge_request.metrics.merged_by_id).to eq(project.creator_id)
end
end
context 'when PR has threaded discussion' do
let(:reply) do
instance_double(
BitbucketServer::Representation::PullRequestComment,
author_name: 'Reply Author',
author_email: 'reply_author@example.org',
author_username: 'reply_author',
note: 'I agree',
created_at: now,
updated_at: now,
parent_comment: nil)
end
let(:pr_inline_note) do
instance_double(
BitbucketServer::Representation::PullRequestComment,
file_type: 'ADDED',
from_sha: pull_request.target_branch_sha,
to_sha: pull_request.source_branch_sha,
file_path: '.gitmodules',
old_pos: nil,
new_pos: 4,
note: 'Hello world',
author_name: 'Inline Note Author',
author_email: 'inline_note_author@example.org',
author_username: 'inline_note_author',
comments: [reply],
created_at: now,
updated_at: now,
parent_comment: nil)
end
let(:pr_inline_comment) do
instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: true,
merge_event?: false,
comment: pr_inline_note)
end
before do
allow_next(BitbucketServer::Client).to receive(:activities).and_return([pr_inline_comment])
end
it 'attributes the comments to the project creator' do
importer.execute
expect(merge_request.notes.collect(&:author_id)).to match_array([project.creator_id, project.creator_id])
end
end
end
end
end
end

View File

@ -0,0 +1,121 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::ProjectCreator, feature_category: :importers do
let(:project_key) { 'TEST' }
let(:repo_slug) { 'my-repo' }
let(:name) { 'Test Project' }
let(:namespace) { create(:group) }
let(:current_user) { create(:user) }
let(:session_data) { { 'token' => 'abc123' } }
let(:timeout_strategy) { 'default' }
let(:repo_data) do
{
'description' => 'Test repo',
'project' => {
'public' => true
},
'links' => {
'self' => [
{
'href' => 'http://localhost/brows',
'name' => 'http'
}
],
'clone' => [
{
'href' => 'http://localhost/clone',
'name' => 'http'
}
]
}
}
end
let(:repo) do
BitbucketServer::Representation::Repo.new(repo_data)
end
subject(:creator) do
described_class.new(
project_key,
repo_slug,
repo,
name,
namespace,
current_user,
session_data,
timeout_strategy
)
end
describe '#execute' do
let_it_be(:project) { create(:project) }
let(:service) { instance_double(Projects::CreateService) }
before do
allow(Projects::CreateService).to receive(:new).and_return(service)
allow(service).to receive(:execute).and_return(project)
end
it 'passes the arguments to Project::CreateService' do
expected_params = {
name: name,
path: name,
description: repo.description,
namespace_id: namespace.id,
organization_id: namespace.organization_id,
visibility_level: repo.visibility_level,
import_type: 'bitbucket_server',
import_source: repo.browse_url,
import_url: repo.clone_url,
import_data: {
credentials: session_data,
data: {
project_key: project_key,
repo_slug: repo_slug,
timeout_strategy: timeout_strategy,
bitbucket_server_notes_separate_worker: true,
user_contribution_mapping_enabled: true
}
},
skip_wiki: true
}
expect(Projects::CreateService).to receive(:new)
.with(current_user, expected_params)
creator.execute
end
context 'when feature flags are disabled' do
before do
stub_feature_flags(bitbucket_server_notes_separate_worker: false)
stub_feature_flags(importer_user_mapping: false)
stub_feature_flags(bitbucket_server_user_mapping: false)
end
it 'disables these options in the import_data' do
expected_params = {
import_data: {
credentials: session_data,
data: {
project_key: project_key,
repo_slug: repo_slug,
timeout_strategy: timeout_strategy,
bitbucket_server_notes_separate_worker: false,
user_contribution_mapping_enabled: false
}
}
}
expect(Projects::CreateService).to receive(:new)
.with(current_user, a_hash_including(expected_params))
creator.execute
end
end
end
end

View File

@ -5,50 +5,133 @@ require 'spec_helper'
RSpec.describe Gitlab::BitbucketServerImport::UserFinder, :clean_gitlab_redis_shared_state, feature_category: :importers do
let_it_be(:user) { create(:user) }
let(:created_id) { 1 }
let(:project) { build_stubbed(:project, creator_id: created_id, id: 1) }
let_it_be_with_reload(:project) do
create(:project, :repository, :bitbucket_server_import, :import_user_mapping_enabled)
end
let(:source_user) { build_stubbed(:import_source_user, :completed) }
let(:user_representation) do
{
username: user.username,
display_name: user.name
}
end
subject(:user_finder) { described_class.new(project) }
describe '#author_id' do
it 'calls uid method' do
object = { author_username: user.username }
expect(user_finder).to receive(:uid).with(object).and_return(10)
expect(user_finder.author_id(object)).to eq(10)
before do
allow_next_instance_of(Gitlab::Import::SourceUserMapper) do |isum|
allow(isum).to receive(:find_or_create_source_user).and_return(source_user)
end
end
context 'when corresponding user does not exist' do
it 'fallsback to project creator_id' do
object = { author_email: 'unknown' }
expect(user_finder.author_id(object)).to eq(created_id)
end
it 'returns the mapped user' do
expect(
user_finder.author_id(user_representation)
).to eq(source_user.mapped_user.id)
end
end
describe '#uid' do
context 'when provided object is a Hash' do
it 'maps to an existing user with the same username' do
before do
allow_next_instance_of(Gitlab::Import::SourceUserMapper) do |isum|
allow(isum).to receive(:find_or_create_source_user).and_return(source_user)
end
end
it 'takes a user data hash and finds the mapped user ID' do
user_id = user_finder.uid(user_representation)
expect(user_id).to eq(source_user.mapped_user.id)
end
end
context 'when user contribution mapping is disabled' do
before do
project.build_or_assign_import_data(data: { user_contribution_mapping_enabled: false }).save!
end
describe '#find_user_id' do
context 'when user cannot be found' do
it 'caches and returns nil' do
expect(User).to receive(:find_by_any_email).once.and_call_original
2.times do
user_id = user_finder.find_user_id(by: :email, value: 'nobody@example.com')
expect(user_id).to be_nil
end
end
end
context 'when user can be found' do
it 'caches and returns the user ID by email' do
expect(User).to receive(:find_by_any_email).once.and_call_original
2.times do
user_id = user_finder.find_user_id(by: :email, value: user.email)
expect(user_id).to eq(user.id)
end
end
it 'caches and returns the user ID by username' do
expect(User).to receive(:find_by_username).once.and_call_original
2.times do
user_id = user_finder.find_user_id(by: :username, value: user.username)
expect(user_id).to eq(user.id)
end
end
end
end
describe '#author_id' do
it 'calls uid method' do
object = { author_username: user.username }
expect(user_finder.uid(object)).to eq(user.id)
expect(user_finder).to receive(:uid).with(object).and_return(10)
expect(user_finder.author_id(object)).to eq(10)
end
context 'when corresponding user does not exist' do
before do
project.update!(creator_id: 123)
end
it 'falls back to project creator_id' do
object = { author_email: 'unknown' }
expect(user_finder.author_id(object)).to eq(123)
end
end
end
context 'when provided object is a representation Object' do
it 'maps to a existing user with the same username' do
object = instance_double(BitbucketServer::Representation::Comment, author_username: user.username)
describe '#uid' do
context 'when provided object is a Hash' do
it 'maps to an existing user with the same username' do
object = { author_username: user.username }
expect(user_finder.uid(object)).to eq(user.id)
expect(user_finder.uid(object)).to eq(user.id)
end
end
end
context 'when corresponding user does not exist' do
it 'returns nil' do
object = { author_username: 'unknown' }
context 'when provided object is a representation object' do
it 'maps to a existing user with the same username' do
object = instance_double(BitbucketServer::Representation::Comment, author_username: user.username)
expect(user_finder.uid(object)).to eq(nil)
expect(user_finder.uid(object)).to eq(user.id)
end
end
context 'when corresponding user does not exist' do
it 'returns nil' do
object = { author_username: 'unknown' }
expect(user_finder.uid(object)).to be_nil
end
end
end
@ -77,43 +160,7 @@ RSpec.describe Gitlab::BitbucketServerImport::UserFinder, :clean_gitlab_redis_sh
it 'returns nil' do
object = { author_email: 'unknown' }
expect(user_finder.uid(object)).to eq(nil)
end
end
end
end
describe '#find_user_id' do
context 'when user cannot be found' do
it 'caches and returns nil' do
expect(User).to receive(:find_by_any_email).once.and_call_original
2.times do
user_id = user_finder.find_user_id(by: :email, value: 'nobody@example.com')
expect(user_id).to be_nil
end
end
end
context 'when user can be found' do
it 'caches and returns the user ID by email' do
expect(User).to receive(:find_by_any_email).once.and_call_original
2.times do
user_id = user_finder.find_user_id(by: :email, value: user.email)
expect(user_id).to eq(user.id)
end
end
it 'caches and returns the user ID by username' do
expect(User).to receive(:find_by_username).once.and_call_original
2.times do
user_id = user_finder.find_user_id(by: :username, value: user.username)
expect(user_id).to eq(user.id)
expect(user_finder.uid(object)).to be_nil
end
end
end

View File

@ -119,4 +119,35 @@ RSpec.describe Gitlab::Import::MergeRequestHelpers, type: :helper, feature_categ
expect(note.system_note_metadata).to have_attributes(action: 'approved')
end
end
describe '.create_merge_request_metrics' do
let(:attributes) do
{
merged_by_id: user.id,
merged_at: Time.current - 1.hour,
latest_closed_by_id: user.id,
latest_closed_at: Time.current + 1.hour
}
end
subject(:metric) { helper.create_merge_request_metrics(attributes) }
before do
allow(helper).to receive(:merge_request).and_return(merge_request)
end
it 'returns a metric with the provided attributes' do
expect(metric).to have_attributes(attributes)
end
it 'creates a metric if none currently exists' do
merge_request.metrics.destroy!
expect { metric }.to change { MergeRequest::Metrics.count }.from(0).to(1)
end
it 'updates the existing record if one already exists' do
expect { metric }.not_to change { MergeRequest::Metrics.count }
end
end
end

View File

@ -48,44 +48,10 @@ RSpec.describe Sidebars::Groups::Menus::IssuesMenu, feature_category: :navigatio
end
end
it_behaves_like 'pill_count formatted results' do
let(:count_service) { ::Groups::OpenIssuesCountService }
end
describe '#pill_count_field' do
it 'returns the correct GraphQL field name' do
expect(menu.pill_count_field).to eq('openIssuesCount')
end
context 'when async_sidebar_counts feature flag is disabled' do
before do
stub_feature_flags(async_sidebar_counts: false)
end
it 'returns nil' do
expect(menu.pill_count_field).to be_nil
end
end
end
context 'when count query times out' do
let(:count_service) { ::Groups::OpenIssuesCountService }
before do
stub_feature_flags(async_sidebar_counts: false)
allow_next_instance_of(count_service) do |service|
allow(service).to receive(:count).and_raise(ActiveRecord::QueryCanceled)
end
end
it 'logs the error and returns a null count' do
expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
ActiveRecord::QueryCanceled, group_id: group.id, query: 'group_sidebar_issues_count'
).and_call_original
expect(menu.pill_count).to be_nil
end
end
it_behaves_like 'serializable as super_sidebar_menu_args' do

View File

@ -30,10 +30,6 @@ RSpec.describe Sidebars::Groups::Menus::MergeRequestsMenu, feature_category: :na
end
end
it_behaves_like 'pill_count formatted results' do
let(:count_service) { ::Groups::MergeRequestsCountService }
end
it_behaves_like 'serializable as super_sidebar_menu_args' do
let(:extra_attrs) do
{
@ -50,15 +46,5 @@ RSpec.describe Sidebars::Groups::Menus::MergeRequestsMenu, feature_category: :na
it 'returns the correct GraphQL field name' do
expect(menu.pill_count_field).to eq('openMergeRequestsCount')
end
context 'when async_sidebar_counts feature flag is disabled' do
before do
stub_feature_flags(async_sidebar_counts: false)
end
it 'returns nil' do
expect(menu.pill_count_field).to be_nil
end
end
end
end

View File

@ -55,66 +55,10 @@ RSpec.describe Sidebars::Projects::Menus::IssuesMenu, feature_category: :navigat
end
end
describe '#pill_count' do
before do
stub_feature_flags(async_sidebar_counts: false)
end
it 'returns zero when there are no open issues' do
expect(subject.pill_count).to eq '0'
end
it 'memoizes the query' do
subject.pill_count
control = ActiveRecord::QueryRecorder.new do
subject.pill_count
end
expect(control.count).to eq 0
end
context 'when there are open issues' do
it 'returns the number of open issues' do
create_list(:issue, 2, :opened, project: project)
build_stubbed(:issue, :closed, project: project)
expect(subject.pill_count).to eq '2'
end
end
describe 'formatting' do
it 'returns truncated digits for count value over 1000' do
allow(project).to receive(:open_issues_count).and_return 1001
expect(subject.pill_count).to eq('1k')
end
end
context 'when async_sidebar_counts feature flag is enabled' do
before do
stub_feature_flags(async_sidebar_counts: true)
end
it 'returns nil' do
expect(subject.pill_count).to be_nil
end
end
end
describe '#pill_count_field' do
it 'returns the correct GraphQL field name' do
expect(subject.pill_count_field).to eq('openIssuesCount')
end
context 'when async_sidebar_counts feature flag is disabled' do
before do
stub_feature_flags(async_sidebar_counts: false)
end
it 'returns nil' do
expect(subject.pill_count_field).to be_nil
end
end
end
describe 'Menu Items' do

View File

@ -49,70 +49,9 @@ RSpec.describe Sidebars::Projects::Menus::MergeRequestsMenu, feature_category: :
end
end
describe '#pill_count' do
before do
stub_feature_flags(async_sidebar_counts: false)
end
it 'returns zero when there are no open merge requests' do
expect(subject.pill_count).to eq '0'
end
it 'memoizes the query' do
subject.pill_count
control = ActiveRecord::QueryRecorder.new do
subject.pill_count
end
expect(control.count).to eq 0
end
context 'when there are open merge requests' do
it 'returns the number of open merge requests' do
create_list(:merge_request, 2, :unique_branches, source_project: project, author: user, state: :opened)
create(:merge_request, source_project: project, state: :merged)
expect(subject.pill_count).to eq '2'
end
end
describe 'formatting' do
context 'when the count value is over 1000' do
before do
allow(project).to receive(:open_merge_requests_count).and_return(1001)
end
it 'returns truncated digits' do
expect(subject.pill_count).to eq('1k')
end
end
end
context 'when async_sidebar_counts feature flag is enabled' do
before do
stub_feature_flags(async_sidebar_counts: true)
end
it 'returns nil' do
expect(subject.pill_count).to be_nil
end
end
end
describe '#pill_count_field' do
it 'returns the correct GraphQL field name' do
expect(subject.pill_count_field).to eq('openMergeRequestsCount')
end
context 'when async_sidebar_counts feature flag is disabled' do
before do
stub_feature_flags(async_sidebar_counts: false)
end
it 'returns nil' do
expect(subject.pill_count_field).to be_nil
end
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillResourceLinkEventsNamespaceId, feature_category: :team_planning do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :resource_link_events,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:namespace_id,
:issues,
:namespace_id,
:issue_id
]
)
}
end
end
end

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe QueueBackfillIssuableMetricImagesNamespaceId, feature_category: :observability do
let!(:batched_migration) { described_class::MIGRATION }
it 'schedules a new batched migration' do
reversible_migration do |migration|
migration.before -> {
expect(batched_migration).not_to have_scheduled_batched_migration
}
migration.after -> {
expect(batched_migration).to have_scheduled_batched_migration(
table_name: :issuable_metric_images,
column_name: :id,
interval: described_class::DELAY_INTERVAL,
batch_size: described_class::BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE,
gitlab_schema: :gitlab_main_cell,
job_arguments: [
:namespace_id,
:issues,
:namespace_id,
:issue_id
]
)
}
end
end
end

View File

@ -19,6 +19,7 @@ RSpec.describe UserDetail, feature_category: :system_access do
let(:glm_source) { 'glm_source' }
let(:glm_content) { 'glm_content' }
let(:joining_project) { true }
let(:role) { 0 }
let(:onboarding_status) do
{
step_url: step_url,
@ -27,7 +28,8 @@ RSpec.describe UserDetail, feature_category: :system_access do
registration_type: registration_type,
glm_source: glm_source,
glm_content: glm_content,
joining_project: joining_project
joining_project: joining_project,
role: role
}
end
@ -145,6 +147,22 @@ RSpec.describe UserDetail, feature_category: :system_access do
end
end
context 'for role' do
let(:onboarding_status) do
{
role: role
}
end
it { is_expected.to allow_value(onboarding_status).for(:onboarding_status) }
context "when 'role' is invalid" do
let(:role) { 10 }
it { is_expected.not_to allow_value(onboarding_status).for(:onboarding_status) }
end
end
context 'when there is no data' do
let(:onboarding_status) { {} }

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, feature_category: :virtual_registry do
RSpec.describe API::VirtualRegistries::Packages::Maven::Endpoints, :aggregate_failures, feature_category: :virtual_registry do
using RSpec::Parameterized::TableSyntax
include_context 'for maven virtual registry api setup'

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, feature_category: :virtual_registry do
RSpec.describe API::VirtualRegistries::Packages::Maven::Endpoints, :aggregate_failures, feature_category: :virtual_registry do
using RSpec::Parameterized::TableSyntax
include_context 'for maven virtual registry api setup'

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::VirtualRegistries::Packages::Maven, :aggregate_failures, feature_category: :virtual_registry do
RSpec.describe API::VirtualRegistries::Packages::Maven::Endpoints, :aggregate_failures, feature_category: :virtual_registry do
using RSpec::Parameterized::TableSyntax
include_context 'for maven virtual registry api setup'

Some files were not shown because too many files have changed in this diff Show More