Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
bf48291c0f
commit
330ecbc47d
|
|
@ -19,4 +19,4 @@ variables:
|
|||
# Retry failed specs in separate process
|
||||
QA_RETRY_FAILED_SPECS: "true"
|
||||
# helm chart ref used by test-on-cng pipeline
|
||||
GITLAB_HELM_CHART_REF: "6ed126a4874a1a9155503b2191554a890df1129a"
|
||||
GITLAB_HELM_CHART_REF: "3f89420cf319778195403711af12c57b4aab6511"
|
||||
|
|
|
|||
|
|
@ -487,7 +487,6 @@ Layout/LineLength:
|
|||
- 'ee/app/finders/projects/integrations/jira/by_ids_finder.rb'
|
||||
- 'ee/app/finders/projects/integrations/jira/issues_finder.rb'
|
||||
- 'ee/app/finders/security/pipeline_vulnerabilities_finder.rb'
|
||||
- 'ee/app/finders/security/vulnerabilities_finder.rb'
|
||||
- 'ee/app/graphql/ee/mutations/boards/lists/create.rb'
|
||||
- 'ee/app/graphql/mutations/analytics/devops_adoption/enabled_namespaces/bulk_enable.rb'
|
||||
- 'ee/app/graphql/mutations/audit_events/external_audit_event_destinations/create.rb'
|
||||
|
|
@ -1104,7 +1103,6 @@ Layout/LineLength:
|
|||
- 'ee/spec/finders/projects/integrations/jira/by_ids_finder_spec.rb'
|
||||
- 'ee/spec/finders/projects/integrations/jira/issues_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerabilities_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerability_reads_finder_spec.rb'
|
||||
- 'ee/spec/finders/snippets_finder_spec.rb'
|
||||
- 'ee/spec/finders/template_finder_spec.rb'
|
||||
|
|
|
|||
|
|
@ -183,7 +183,6 @@ RSpec/ContextWording:
|
|||
- 'ee/spec/finders/productivity_analytics_finder_spec.rb'
|
||||
- 'ee/spec/finders/scim_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerabilities_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerability_reads_finder_spec.rb'
|
||||
- 'ee/spec/finders/snippets_finder_spec.rb'
|
||||
- 'ee/spec/finders/template_finder_spec.rb'
|
||||
|
|
|
|||
|
|
@ -82,7 +82,6 @@ RSpec/NamedSubject:
|
|||
- 'ee/spec/finders/projects/integrations/jira/issues_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/approval_groups_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerabilities_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerability_feedbacks_finder_spec.rb'
|
||||
- 'ee/spec/finders/security/vulnerability_reads_finder_spec.rb'
|
||||
- 'ee/spec/finders/snippets_finder_spec.rb'
|
||||
|
|
|
|||
|
|
@ -215,7 +215,6 @@ Style/GuardClause:
|
|||
- 'ee/app/controllers/smartcard_controller.rb'
|
||||
- 'ee/app/finders/ee/template_finder.rb'
|
||||
- 'ee/app/finders/iterations_finder.rb'
|
||||
- 'ee/app/finders/security/vulnerabilities_finder.rb'
|
||||
- 'ee/app/graphql/mutations/concerns/mutations/shared_epic_arguments.rb'
|
||||
- 'ee/app/graphql/mutations/iterations/create.rb'
|
||||
- 'ee/app/graphql/mutations/projects/set_locked.rb'
|
||||
|
|
|
|||
|
|
@ -224,7 +224,6 @@ Style/IfUnlessModifier:
|
|||
- 'ee/app/controllers/projects/path_locks_controller.rb'
|
||||
- 'ee/app/controllers/projects/push_rules_controller.rb'
|
||||
- 'ee/app/finders/security/pipeline_vulnerabilities_finder.rb'
|
||||
- 'ee/app/finders/security/vulnerabilities_finder.rb'
|
||||
- 'ee/app/graphql/mutations/audit_events/external_audit_event_destinations/create.rb'
|
||||
- 'ee/app/graphql/mutations/audit_events/external_audit_event_destinations/destroy.rb'
|
||||
- 'ee/app/graphql/mutations/boards/scoped_board_mutation.rb'
|
||||
|
|
|
|||
10
CHANGELOG.md
10
CHANGELOG.md
|
|
@ -2,6 +2,16 @@
|
|||
documentation](doc/development/changelog.md) for instructions on adding your own
|
||||
entry.
|
||||
|
||||
## 17.7.2 (2025-01-14)
|
||||
|
||||
### Fixed (1 change)
|
||||
|
||||
- [Fix handling of short gzip metadata files](https://gitlab.com/gitlab-org/gitlab/-/commit/aaa32a37cf34a2e171f5ae1b7ce1259796605f92) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/177633))
|
||||
|
||||
### Changed (1 change)
|
||||
|
||||
- [Remove `download_code` dependency from access to read merge requests](https://gitlab.com/gitlab-org/gitlab/-/commit/01a950b82192d6b93de58bf3678767f58c08c5f2) ([merge request](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176667))
|
||||
|
||||
## 17.7.1 (2025-01-08)
|
||||
|
||||
### Fixed (3 changes)
|
||||
|
|
|
|||
|
|
@ -61,18 +61,17 @@ export default {
|
|||
<div v-for="(lists, i) in tab.lists" :key="`lists_${i}`">
|
||||
<div
|
||||
v-if="i === 1 && newListsEnabled"
|
||||
class="gl-mt-8 gl-rounded-base gl-bg-gray-50 gl-px-4 gl-py-2 gl-font-bold gl-text-subtle"
|
||||
class="gl-mb-5 gl-mt-8 gl-rounded-base gl-bg-gray-50 gl-px-4 gl-py-2 gl-font-bold gl-text-subtle"
|
||||
data-testid="merge-request-count-explanation"
|
||||
>
|
||||
{{ __('Items below are excluded from the active count') }}
|
||||
</div>
|
||||
<merge-requests-query
|
||||
v-for="list in lists"
|
||||
v-for="(list, listIndex) in lists"
|
||||
:key="`list_${list.id}`"
|
||||
:query="list.query"
|
||||
:variables="list.variables"
|
||||
:hide-count="list.hideCount"
|
||||
:class="{ '!gl-mt-3': i === 0 }"
|
||||
>
|
||||
<template #default="{ mergeRequests, count, hasNextPage, loadMore, loading, error }">
|
||||
<collapsible-section
|
||||
|
|
@ -81,6 +80,10 @@ export default {
|
|||
:title="list.title"
|
||||
:help-content="list.helpContent"
|
||||
:loading="loading"
|
||||
:class="{
|
||||
'!gl-mt-0': newListsEnabled && listIndex === 0,
|
||||
'!gl-mt-3': newListsEnabled && listIndex > 0,
|
||||
}"
|
||||
>
|
||||
<div>
|
||||
<div class="gl-overflow-x-auto">
|
||||
|
|
@ -102,7 +105,7 @@ export default {
|
|||
<span class="gl-sr-only">{{ __('Pipeline status') }}</span>
|
||||
</th>
|
||||
<th
|
||||
class="gl-px-3 gl-pb-3"
|
||||
class="gl-pb-3 gl-pl-5 gl-pr-3"
|
||||
:class="{ 'gl-text-sm gl-text-subtle': newListsEnabled }"
|
||||
>
|
||||
<template v-if="newListsEnabled">
|
||||
|
|
|
|||
|
|
@ -94,7 +94,10 @@ export default {
|
|||
/>
|
||||
<gl-icon v-else name="dash" />
|
||||
</td>
|
||||
<td class="gl-px-3 gl-py-4 gl-align-top">
|
||||
<td
|
||||
class="gl-py-4 gl-align-top"
|
||||
:class="{ 'gl-pl-5 gl-pr-3': newListsEnabled, 'gl-px-3': !newListsEnabled }"
|
||||
>
|
||||
<status-badge v-if="newListsEnabled" :merge-request="mergeRequest" :list-id="listId" />
|
||||
<approval-count v-else :merge-request="mergeRequest" />
|
||||
</td>
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ export default {
|
|||
anchor: 'container-registry-garbage-collection',
|
||||
}),
|
||||
runCleanupPoliciesHelpUrl: helpPagePath('administration/packages/container_registry', {
|
||||
anchor: 'run-the-cleanup-policy-now',
|
||||
anchor: 'run-the-cleanup-policy',
|
||||
}),
|
||||
apollo: {
|
||||
containerRepository: {
|
||||
|
|
|
|||
|
|
@ -80,6 +80,9 @@ class NotesFinder
|
|||
{ iid: iid }
|
||||
end
|
||||
|
||||
# the reads finder needs to query by vulnerability_id
|
||||
return noteables_for_type(type).find_by!(vulnerability_id: query[:id]) if type == 'vulnerability' # rubocop: disable CodeReuse/ActiveRecord
|
||||
|
||||
noteables_for_type(type).find_by!(query) # rubocop: disable CodeReuse/ActiveRecord
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ module Resolvers
|
|||
class ProjectPipelineResolver < BaseResolver
|
||||
include LooksAhead
|
||||
|
||||
calls_gitaly!
|
||||
|
||||
type ::Types::Ci::PipelineType, null: true
|
||||
|
||||
alias_method :project, :object
|
||||
|
|
@ -22,11 +24,11 @@ module Resolvers
|
|||
required: false,
|
||||
description: 'SHA of the Pipeline. For example, "dyd0f15ay83993f5ab66k927w28673882x99100b".'
|
||||
|
||||
validates required: { one_of: [:id, :iid, :sha], message: 'Provide one of ID, IID or SHA' }
|
||||
validates mutually_exclusive: [:id, :iid, :sha]
|
||||
|
||||
def self.resolver_complexity(args, child_complexity:)
|
||||
complexity = super
|
||||
complexity - 10
|
||||
complexity - 10 if args.present?
|
||||
end
|
||||
|
||||
def resolve(id: nil, iid: nil, sha: nil, **args)
|
||||
|
|
@ -44,12 +46,14 @@ module Resolvers
|
|||
|
||||
apply_lookahead(finder.execute).each { |pipeline| loader.call(pipeline.iid.to_s, pipeline) }
|
||||
end
|
||||
else
|
||||
elsif sha
|
||||
BatchLoader::GraphQL.for(sha).batch(key: project) do |shas, loader|
|
||||
finder = ::Ci::PipelinesFinder.new(project, current_user, sha: shas)
|
||||
|
||||
apply_lookahead(finder.execute).each { |pipeline| loader.call(pipeline.sha.to_s, pipeline) }
|
||||
end
|
||||
else
|
||||
project.last_pipeline
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -370,7 +370,7 @@ module Types
|
|||
|
||||
field :pipelines,
|
||||
null: true,
|
||||
description: 'Build pipelines of the project.',
|
||||
description: 'Pipelines of the project.',
|
||||
extras: [:lookahead],
|
||||
resolver: Resolvers::Ci::ProjectPipelinesResolver
|
||||
|
||||
|
|
@ -389,13 +389,14 @@ module Types
|
|||
|
||||
field :pipeline, Types::Ci::PipelineType,
|
||||
null: true,
|
||||
description: 'Build pipeline of the project.',
|
||||
description: 'Pipeline of the project. If no arguments are provided, returns the latest pipeline for the ' \
|
||||
'head commit on the default branch',
|
||||
extras: [:lookahead],
|
||||
resolver: Resolvers::Ci::ProjectPipelineResolver
|
||||
|
||||
field :pipeline_counts, Types::Ci::PipelineCountsType,
|
||||
null: true,
|
||||
description: 'Build pipeline counts of the project.',
|
||||
description: 'Pipeline counts of the project.',
|
||||
resolver: Resolvers::Ci::ProjectPipelineCountsResolver
|
||||
|
||||
field :ci_variables, Types::Ci::ProjectVariableType.connection_type,
|
||||
|
|
|
|||
|
|
@ -647,7 +647,7 @@ class Member < ApplicationRecord
|
|||
end
|
||||
|
||||
def user_is_not_placeholder
|
||||
if Gitlab::Import::PlaceholderUserCreator.placeholder_email_pattern.match?(invite_email)
|
||||
if Gitlab::Import::PlaceholderUserCreator.placeholder_email?(invite_email)
|
||||
errors.add(:invite_email, _('must not be a placeholder email'))
|
||||
elsif user&.placeholder?
|
||||
errors.add(:user_id, _("must not be a placeholder user"))
|
||||
|
|
|
|||
|
|
@ -117,6 +117,16 @@ class WorkItem < Issue
|
|||
def related_link_class
|
||||
WorkItems::RelatedWorkItemLink
|
||||
end
|
||||
|
||||
def sync_callback_class(association_name)
|
||||
::WorkItems::DataSync::NonWidgets.const_get(association_name.to_s.camelcase, false)
|
||||
rescue NameError
|
||||
nil
|
||||
end
|
||||
|
||||
def non_widgets
|
||||
[:related_vulnerabilities]
|
||||
end
|
||||
end
|
||||
|
||||
def create_dates_source_from_current_dates
|
||||
|
|
|
|||
|
|
@ -57,7 +57,11 @@ module Import
|
|||
|
||||
update_params = {}
|
||||
update_params[:name] = placeholder_creator.placeholder_name if params[:source_name]
|
||||
update_params[:username] = placeholder_creator.placeholder_username if params[:source_username]
|
||||
|
||||
if params[:source_username]
|
||||
update_params[:username] = placeholder_creator.send(:username_and_email_generator).username # rubocop:disable GitlabSecurity/PublicSend -- Safe to call, we don't want to publically expose this method.
|
||||
end
|
||||
|
||||
update_params
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -34,6 +34,20 @@ module WorkItems
|
|||
params: callback_params.merge({ operation: operation })
|
||||
)
|
||||
end
|
||||
|
||||
@callbacks += WorkItem.non_widgets.filter_map do |association_name|
|
||||
sync_callback_class = WorkItem.sync_callback_class(association_name)
|
||||
next if sync_callback_class.nil?
|
||||
|
||||
sync_callback_class.new(
|
||||
work_item: original_work_item,
|
||||
target_work_item: work_item,
|
||||
current_user: current_user,
|
||||
params: { operation: operation }
|
||||
)
|
||||
end
|
||||
|
||||
@callbacks
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,7 +13,11 @@ module WorkItems
|
|||
end
|
||||
|
||||
def execute
|
||||
cleanup_work_item_widgets_data if cleanup_data_source_work_item_data?
|
||||
if cleanup_data_source_work_item_data?
|
||||
cleanup_work_item_widgets_data
|
||||
cleanup_work_item_non_widgets_data
|
||||
end
|
||||
|
||||
cleanup_work_item
|
||||
end
|
||||
|
||||
|
|
@ -38,6 +42,21 @@ module WorkItems
|
|||
end
|
||||
end
|
||||
|
||||
def cleanup_work_item_non_widgets_data
|
||||
WorkItem.non_widgets.filter_map do |association_name|
|
||||
sync_callback_class = WorkItem.sync_callback_class(association_name)
|
||||
next if sync_callback_class.nil?
|
||||
|
||||
data_handler = sync_callback_class.new(
|
||||
work_item: work_item,
|
||||
target_work_item: nil,
|
||||
current_user: current_user,
|
||||
params: params
|
||||
)
|
||||
data_handler.post_move_cleanup
|
||||
end
|
||||
end
|
||||
|
||||
def cleanup_work_item
|
||||
close_service = Issues::CloseService.new(container: work_item.namespace, current_user: current_user)
|
||||
close_service.execute(work_item, notifications: false, system_note: true)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
- title: "`git_data_dirs` for configuring Gitaly storages"
|
||||
# The milestones for the deprecation announcement, and the removal.
|
||||
removal_milestone: "18.0"
|
||||
announcement_milestone: "16.0"
|
||||
# Change breaking_change to false if needed.
|
||||
breaking_change: true
|
||||
# The stage and GitLab username of the person reporting the change,
|
||||
# and a link to the deprecation issue
|
||||
reporter: jamesliu-gitlab
|
||||
stage: data-access
|
||||
issue_url: https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/8786
|
||||
impact: medium # Can be one of: [critical, high, medium, low]
|
||||
scope: instance # Can be one or a combination of: [instance, group, project]
|
||||
resolution_role: Admin # Can be one of: [Admin, Owner, Maintainer, Developer]
|
||||
manual_task: true # Can be true or false. Use this to denote whether a resolution action must be performed manually (true), or if it can be automated by using the API or other automation (false).
|
||||
body: |
|
||||
Support for using `git_data_dirs` to configure Gitaly storages for Linux package instances has been deprecated
|
||||
[since 16.0](https://docs.gitlab.com/ee/update/versions/gitlab_16_changes.html#gitaly-configuration-structure-change) and will be removed in 18.0.
|
||||
|
||||
For migration instructions, see
|
||||
[Migrating from `git_data_dirs`](https://docs.gitlab.com/omnibus/settings/configuration.html#migrating-from-git_data_dirs).
|
||||
|
|
@ -9,14 +9,6 @@ description: Relates epics to epic boards by position, unique to each combinatio
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48120
|
||||
milestone: '13.7'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
desired_sharding_key:
|
||||
group_id:
|
||||
references: namespaces
|
||||
backfill_via:
|
||||
parent:
|
||||
foreign_key: epic_board_id
|
||||
table: boards_epic_boards
|
||||
sharding_key: group_id
|
||||
belongs_to: epic_board
|
||||
desired_sharding_key_migration_job_name: BackfillBoardsEpicBoardPositionsGroupId
|
||||
table_size: small
|
||||
sharding_key:
|
||||
group_id: namespaces
|
||||
|
|
|
|||
|
|
@ -0,0 +1,14 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddBoardsEpicBoardPositionsGroupIdNotNullConstraint < Gitlab::Database::Migration[2.2]
|
||||
disable_ddl_transaction!
|
||||
milestone '17.9'
|
||||
|
||||
def up
|
||||
add_not_null_constraint :boards_epic_board_positions, :group_id
|
||||
end
|
||||
|
||||
def down
|
||||
remove_not_null_constraint :boards_epic_board_positions, :group_id
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
238788e34814fd270b40a8bd1e1d9fa22a620ec987196c8a80c4f38b06a57edf
|
||||
|
|
@ -9108,7 +9108,8 @@ CREATE TABLE boards_epic_board_positions (
|
|||
relative_position integer,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
group_id bigint
|
||||
group_id bigint,
|
||||
CONSTRAINT check_9d94ce874e CHECK ((group_id IS NOT NULL))
|
||||
);
|
||||
|
||||
CREATE SEQUENCE boards_epic_board_positions_id_seq
|
||||
|
|
|
|||
|
|
@ -1001,22 +1001,19 @@ notifications:
|
|||
|
||||
::EndTabs
|
||||
|
||||
## Run the Cleanup policy now
|
||||
## Run the cleanup policy
|
||||
|
||||
WARNING:
|
||||
If you're using a distributed architecture and Sidekiq is running on a different node, the cleanup
|
||||
policies don't work. To fix this:
|
||||
Prerequisites:
|
||||
|
||||
1. Configure the `gitlab.rb` file on the Sidekiq nodes to
|
||||
point to the correct registry URL.
|
||||
1. Copy the `registry.key` file to each Sidekiq node.
|
||||
- If you use a distributed architecture where the container registry runs on a different node than Sidekiq, follow the steps in [Configure the container registry when using an external Sidekiq](../sidekiq/index.md#configure-the-container-registry-when-using-an-external-sidekiq).
|
||||
|
||||
For more information, see the [Sidekiq configuration](../sidekiq/index.md)
|
||||
page.
|
||||
After you [create a cleanup policy](../../user/packages/container_registry/reduce_container_registry_storage.md#create-a-cleanup-policy), you can run it immediately to reduce the container registry storage space. You don't have to wait for the scheduled cleanup.
|
||||
|
||||
To reduce the amount of [Container Registry disk space used by a given project](#registry-disk-space-usage-by-project),
|
||||
administrators can set up cleanup policies
|
||||
and [run garbage collection](#container-registry-garbage-collection).
|
||||
To reduce the amount of container registry disk space used by a given project, administrators can:
|
||||
|
||||
1. [Check disk space usage by project](#registry-disk-space-usage-by-project) to identify projects that need cleanup.
|
||||
1. Run the cleanup policy using the GitLab Rails console to remove image tags.
|
||||
1. [Run garbage collection](#container-registry-garbage-collection) to remove unreferenced layers and untagged manifests.
|
||||
|
||||
### Registry Disk Space Usage by Project
|
||||
|
||||
|
|
|
|||
|
|
@ -60,11 +60,11 @@ This content has been moved to [Activate GitLab EE with a license file or key](.
|
|||
|
||||
### Registry Disk Space Usage by Project
|
||||
|
||||
Find this content in the [container registry troubleshooting documentation](../packages/container_registry.md#registry-disk-space-usage-by-project).
|
||||
To view storage space by project in the container registry, see [Registry Disk Space Usage by Project](../packages/container_registry.md#registry-disk-space-usage-by-project).
|
||||
|
||||
### Run the Cleanup policy now
|
||||
### Run the cleanup policy
|
||||
|
||||
Find this content in the [container registry troubleshooting documentation](../packages/container_registry.md#run-the-cleanup-policy-now).
|
||||
To reduce storage space in the container registry, see [Run the cleanup policy](../packages/container_registry.md#run-the-cleanup-policy).
|
||||
|
||||
## Sidekiq
|
||||
|
||||
|
|
|
|||
|
|
@ -33350,7 +33350,7 @@ four standard [pagination arguments](#pagination-arguments):
|
|||
|
||||
##### `Project.pipeline`
|
||||
|
||||
Build pipeline of the project.
|
||||
Pipeline of the project. If no arguments are provided, returns the latest pipeline for the head commit on the default branch.
|
||||
|
||||
Returns [`Pipeline`](#pipeline).
|
||||
|
||||
|
|
@ -33379,7 +33379,7 @@ Returns [`PipelineAnalytics`](#pipelineanalytics).
|
|||
|
||||
##### `Project.pipelineCounts`
|
||||
|
||||
Build pipeline counts of the project.
|
||||
Pipeline counts of the project.
|
||||
|
||||
Returns [`PipelineCounts`](#pipelinecounts).
|
||||
|
||||
|
|
@ -33428,7 +33428,7 @@ four standard [pagination arguments](#pagination-arguments):
|
|||
|
||||
##### `Project.pipelines`
|
||||
|
||||
Build pipelines of the project.
|
||||
Pipelines of the project.
|
||||
|
||||
Returns [`PipelineConnection`](#pipelineconnection).
|
||||
|
||||
|
|
|
|||
|
|
@ -110,3 +110,85 @@ Example response:
|
|||
...
|
||||
]
|
||||
```
|
||||
|
||||
## Get details on an enterprise user
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/176328) in GitLab 17.9.
|
||||
|
||||
Gets details on a specified enterprise user.
|
||||
|
||||
```plaintext
|
||||
GET /groups/:id/enterprise_users/:user_id
|
||||
```
|
||||
|
||||
Supported attributes:
|
||||
|
||||
| Attribute | Type | Required | Description |
|
||||
|:-----------------|:---------------|:---------|:------------|
|
||||
| `id` | integer/string | yes | ID or [URL-encoded path](rest/index.md#namespaced-paths) of a top-level group. |
|
||||
| `user_id` | integer | yes | ID of user account. |
|
||||
|
||||
Example request:
|
||||
|
||||
```shell
|
||||
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/groups/:id/enterprise_users/:user_id"
|
||||
```
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": 66,
|
||||
"username": "user22",
|
||||
"name": "Sidney Jones22",
|
||||
"state": "active",
|
||||
"avatar_url": "https://www.gravatar.com/avatar/xxx?s=80&d=identicon",
|
||||
"web_url": "http://my.gitlab.com/user22",
|
||||
"created_at": "2021-09-10T12:48:22.381Z",
|
||||
"bio": "",
|
||||
"location": null,
|
||||
"public_email": "",
|
||||
"skype": "",
|
||||
"linkedin": "",
|
||||
"twitter": "",
|
||||
"website_url": "",
|
||||
"organization": null,
|
||||
"job_title": "",
|
||||
"pronouns": null,
|
||||
"bot": false,
|
||||
"work_information": null,
|
||||
"followers": 0,
|
||||
"following": 0,
|
||||
"local_time": null,
|
||||
"last_sign_in_at": null,
|
||||
"confirmed_at": "2021-09-10T12:48:22.330Z",
|
||||
"last_activity_on": null,
|
||||
"email": "user22@example.org",
|
||||
"theme_id": 1,
|
||||
"color_scheme_id": 1,
|
||||
"projects_limit": 100000,
|
||||
"current_sign_in_at": null,
|
||||
"identities": [
|
||||
{
|
||||
"provider": "group_saml",
|
||||
"extern_uid": "2435223452345",
|
||||
"saml_provider_id": 1
|
||||
}
|
||||
],
|
||||
"can_create_group": true,
|
||||
"can_create_project": true,
|
||||
"two_factor_enabled": false,
|
||||
"external": false,
|
||||
"private_profile": false,
|
||||
"commit_email": "user22@example.org",
|
||||
"shared_runners_minutes_limit": null,
|
||||
"extra_shared_runners_minutes_limit": null,
|
||||
"scim_identities": [
|
||||
{
|
||||
"extern_uid": "2435223452345",
|
||||
"group_id": 1,
|
||||
"active": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1460,6 +1460,26 @@ Update all references to `ciUsedMinutes` from these types to `ciDuration`.
|
|||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### `git_data_dirs` for configuring Gitaly storages
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
||||
- Announced in GitLab <span class="milestone">16.0</span>
|
||||
- Removal in GitLab <span class="milestone">18.0</span> ([breaking change](https://docs.gitlab.com/ee/update/terminology.html#breaking-change))
|
||||
- To discuss this change or learn more, see the [deprecation issue](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/8786).
|
||||
|
||||
</div>
|
||||
|
||||
Support for using `git_data_dirs` to configure Gitaly storages for Linux package instances has been deprecated
|
||||
[since 16.0](https://docs.gitlab.com/ee/update/versions/gitlab_16_changes.html#gitaly-configuration-structure-change) and will be removed in 18.0.
|
||||
|
||||
For migration instructions, see
|
||||
[Migrating from `git_data_dirs`](https://docs.gitlab.com/omnibus/settings/configuration.html#migrating-from-git_data_dirs).
|
||||
|
||||
</div>
|
||||
|
||||
<div class="deprecation breaking-change" data-milestone="18.0">
|
||||
|
||||
### `mergeTrainIndex` and `mergeTrainsCount` GraphQL fields deprecated
|
||||
|
||||
<div class="deprecation-notes">
|
||||
|
|
|
|||
|
|
@ -1297,7 +1297,7 @@ configuration. Some `gitaly['..']` configuration options continue to be used by
|
|||
- `consul_service_name`
|
||||
- `consul_service_meta`
|
||||
|
||||
Migrate by moving your existing configuration under the new structure. `git_data_dirs` is supported [until GitLab 17.0](https://gitlab.com/gitlab-org/gitaly/-/issues/5133). The new structure is supported from GitLab 15.10.
|
||||
Migrate by moving your existing configuration under the new structure. `git_data_dirs` is supported [until GitLab 18.0](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/8786). The new structure is supported from GitLab 15.10.
|
||||
|
||||
**Migrate to the new structure**
|
||||
|
||||
|
|
|
|||
|
|
@ -129,3 +129,8 @@ To create test coverage:
|
|||
|
||||
- If the merge request includes a test file, it is updated with the suggested tests.
|
||||
- If the merge request does not include a test file, Amazon Q populates a comment with the suggested tests.
|
||||
|
||||
## Related topics
|
||||
|
||||
- [Set up GitLab Duo with Amazon Q](setup.md)
|
||||
- [GitLab Duo authentication and authorization](../gitlab_duo/security.md)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
---
|
||||
stage: AI-powered
|
||||
group: AI Framework
|
||||
info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments
|
||||
---
|
||||
|
||||
# GitLab Duo authentication and authorization
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/506641) in GitLab 17.9.
|
||||
|
||||
GitLab Duo with Amazon Q uses a composite identity to authenticate requests.
|
||||
|
||||
NOTE:
|
||||
Support for a composite identity in other areas of the product
|
||||
is proposed in [issue 511373](https://gitlab.com/gitlab-org/gitlab/-/issues/511373).
|
||||
|
||||
The token that authenticates requests is a composite of two identities:
|
||||
|
||||
- The primary author, which is the Amazon Q [service account](../profile/service_accounts.md).
|
||||
This service account is instance-wide and has the Developer role
|
||||
on the project where the Amazon Q quick action was used. The service account is the owner of the token.
|
||||
- The secondary author, which is the human user who submitted the quick action.
|
||||
This user's `id` is included in the scopes of the token.
|
||||
|
||||
This composite identity ensures that any activities authored by Amazon Q are
|
||||
correctly attributed to the Amazon Q service account.
|
||||
At the same time, the composite identity ensures that there is no
|
||||
[privilege escalation](https://en.wikipedia.org/wiki/Privilege_escalation) for the human user.
|
||||
|
||||
This [dynamic scope](https://github.com/doorkeeper-gem/doorkeeper/pull/1739)
|
||||
is checked during the authorization of the API request.
|
||||
When authorization is requested, GitLab validates that both the service account
|
||||
and the user who originated the quick action have sufficient permissions.
|
||||
|
||||
```mermaid
|
||||
%%{init: { "fontFamily": "GitLab Sans" }}%%
|
||||
flowchart TD
|
||||
A[API Request] --> B{Human user has access?}
|
||||
B -->|No| D[Access denied]
|
||||
B -->|Yes| C{Service account has access?}
|
||||
C -->|No| D
|
||||
C -->|Yes| E[API request succeeds]
|
||||
|
||||
style D fill:#ffcccc
|
||||
style E fill:#ccffcc
|
||||
```
|
||||
|
|
@ -476,7 +476,7 @@ To copy the task's email address:
|
|||
|
||||
Prerequisites:
|
||||
|
||||
- You must have at least the Reporter role for the project.
|
||||
- You must have at least the Guest role for the project.
|
||||
- The issue and task must belong to the same project.
|
||||
|
||||
To set an issue as a parent of a task:
|
||||
|
|
|
|||
|
|
@ -29,16 +29,20 @@ module CsvBuilder
|
|||
# * +header_to_value_hash+ - A hash of 'Column Heading' => 'value_method'.
|
||||
# * +associations_to_preload+ - An array of records to preload with a batch of records.
|
||||
# * +replace_newlines+ - default: false - If true, replaces newline characters with a literal "\n"
|
||||
# * +order_hint+ - default: :created_at - The column used to order the rows
|
||||
#
|
||||
# The value method will be called once for each object in the collection, to
|
||||
# determine the value for that row. It can either be the name of a method on
|
||||
# the object, or a lamda to call passing in the object.
|
||||
def self.new(collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false)
|
||||
def self.new(
|
||||
collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false,
|
||||
order_hint: :created_at)
|
||||
CsvBuilder::Builder.new(
|
||||
collection,
|
||||
header_to_value_hash,
|
||||
associations_to_preload,
|
||||
replace_newlines: replace_newlines
|
||||
replace_newlines: replace_newlines,
|
||||
order_hint: order_hint
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,13 +6,16 @@ module CsvBuilder
|
|||
|
||||
attr_reader :rows_written
|
||||
|
||||
def initialize(collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false)
|
||||
def initialize(
|
||||
collection, header_to_value_hash, associations_to_preload = [], replace_newlines: false,
|
||||
order_hint: :created_at)
|
||||
@header_to_value_hash = header_to_value_hash
|
||||
@collection = collection
|
||||
@truncated = false
|
||||
@rows_written = 0
|
||||
@associations_to_preload = associations_to_preload
|
||||
@replace_newlines = replace_newlines
|
||||
@order_hint = order_hint
|
||||
end
|
||||
|
||||
# Renders the csv to a string
|
||||
|
|
@ -57,7 +60,7 @@ module CsvBuilder
|
|||
|
||||
def each(&block)
|
||||
if @associations_to_preload&.any? && @collection.respond_to?(:each_batch)
|
||||
@collection.each_batch(order_hint: :created_at) do |relation|
|
||||
@collection.each_batch(order_hint: @order_hint) do |relation|
|
||||
relation.preload(@associations_to_preload).order(:id).each(&block)
|
||||
end
|
||||
elsif @collection.respond_to?(:find_each)
|
||||
|
|
|
|||
|
|
@ -215,6 +215,7 @@ module Gitlab
|
|||
|
||||
def self.clear_stubs!
|
||||
MUTEX.synchronize do
|
||||
@channels&.each_value(&:close)
|
||||
@stubs = nil
|
||||
@channels = nil
|
||||
end
|
||||
|
|
|
|||
|
|
@ -56,7 +56,8 @@ module Gitlab
|
|||
def username_and_email_generator
|
||||
Gitlab::Utils::UsernameAndEmailGenerator.new(
|
||||
username_prefix: username_prefix,
|
||||
email_domain: "noreply.#{Gitlab.config.gitlab.host}"
|
||||
email_domain: "noreply.#{Gitlab.config.gitlab.host}",
|
||||
random_segment: random_segment
|
||||
)
|
||||
end
|
||||
strong_memoize_attr :username_and_email_generator
|
||||
|
|
@ -74,7 +75,11 @@ module Gitlab
|
|||
end
|
||||
|
||||
def username_prefix
|
||||
"import_user_namespace_#{root_ancestor.id}"
|
||||
"import_user_#{root_ancestor.path}"
|
||||
end
|
||||
|
||||
def random_segment
|
||||
SecureRandom.alphanumeric(4)
|
||||
end
|
||||
|
||||
def import_user_in_cache
|
||||
|
|
|
|||
|
|
@ -3,19 +3,22 @@
|
|||
module Gitlab
|
||||
module Import
|
||||
class PlaceholderUserCreator
|
||||
LAMBDA_FOR_UNIQUE_USERNAME = ->(username) do
|
||||
::Namespace.by_path(username) || User.username_exists?(username)
|
||||
end.freeze
|
||||
LAMBDA_FOR_UNIQUE_EMAIL = ->(email) do
|
||||
User.find_by_email(email) || ::Email.find_by_email(email)
|
||||
end.freeze
|
||||
|
||||
delegate :import_type, :namespace, :source_user_identifier, :source_name, :source_username, to: :source_user,
|
||||
private: true
|
||||
|
||||
def self.placeholder_email_pattern
|
||||
import_type_matcher = ::Import::HasImportSource::IMPORT_SOURCES.except(:none).keys.join('|')
|
||||
::Gitlab::UntrustedRegexp.new("(#{import_type_matcher})(_[0-9A-Fa-f]+_[0-9]+@#{Settings.gitlab.host})")
|
||||
PLACEHOLDER_EMAIL_REGEX = ::Gitlab::UntrustedRegexp.new(
|
||||
"_placeholder_[[:alnum:]]+@noreply.#{Settings.gitlab.host}"
|
||||
)
|
||||
LEGACY_PLACEHOLDER_EMAIL_REGEX = ::Gitlab::UntrustedRegexp.new(
|
||||
"(#{::Import::HasImportSource::IMPORT_SOURCES.except(:none).keys.join('|')})" \
|
||||
'(_[0-9A-Fa-f]+_[0-9]+' \
|
||||
"@#{Settings.gitlab.host})"
|
||||
)
|
||||
|
||||
class << self
|
||||
def placeholder_email?(email)
|
||||
PLACEHOLDER_EMAIL_REGEX.match?(email) || LEGACY_PLACEHOLDER_EMAIL_REGEX.match?(email)
|
||||
end
|
||||
end
|
||||
|
||||
def initialize(source_user)
|
||||
|
|
@ -26,8 +29,8 @@ module Gitlab
|
|||
user = User.new(
|
||||
user_type: :placeholder,
|
||||
name: placeholder_name,
|
||||
username: placeholder_username,
|
||||
email: placeholder_email
|
||||
username: username_and_email_generator.username,
|
||||
email: username_and_email_generator.email
|
||||
)
|
||||
|
||||
user.skip_confirmation_notification!
|
||||
|
|
@ -46,47 +49,34 @@ module Gitlab
|
|||
"Placeholder #{source_name.slice(0, 127)}"
|
||||
end
|
||||
|
||||
def placeholder_username
|
||||
# Some APIs don't expose users' usernames, so set a default if it's nil
|
||||
username_pattern = "#{valid_username_segment}_placeholder_user_%s"
|
||||
|
||||
uniquify_string(username_pattern, LAMBDA_FOR_UNIQUE_USERNAME)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :source_user
|
||||
|
||||
def placeholder_email
|
||||
email_pattern = "#{fallback_username_segment}_%s@#{Settings.gitlab.host}"
|
||||
|
||||
uniquify_string(email_pattern, LAMBDA_FOR_UNIQUE_EMAIL)
|
||||
def username_and_email_generator
|
||||
@generator ||= Gitlab::Utils::UsernameAndEmailGenerator.new(
|
||||
username_prefix: username_prefix,
|
||||
email_domain: "noreply.#{Gitlab.config.gitlab.host}",
|
||||
random_segment: random_segment
|
||||
)
|
||||
end
|
||||
|
||||
def username_prefix
|
||||
"#{valid_username_segment}_placeholder"
|
||||
end
|
||||
|
||||
# Some APIs don't expose users' usernames, so set a fallback if it's nil
|
||||
def valid_username_segment
|
||||
return fallback_username_segment unless source_username
|
||||
return import_type unless source_username
|
||||
|
||||
sanitized_source_username = source_username.gsub(/[^A-Za-z0-9]/, '')
|
||||
return fallback_username_segment if sanitized_source_username.empty?
|
||||
return import_type if sanitized_source_username.empty?
|
||||
|
||||
sanitized_source_username.slice(0, User::MAX_USERNAME_LENGTH - 55)
|
||||
end
|
||||
|
||||
# Returns a string based on the import type, and digest of namespace path and source user identifier.
|
||||
# Example: "gitlab_migration_64c4f07e"
|
||||
def fallback_username_segment
|
||||
@fallback_username_segment ||= [
|
||||
import_type,
|
||||
Zlib.crc32([namespace.path, source_user_identifier].join).to_s(16)
|
||||
].join('_')
|
||||
end
|
||||
|
||||
def uniquify_string(base_pattern, lambda_for_uniqueness)
|
||||
uniquify = Gitlab::Utils::Uniquify.new(1)
|
||||
|
||||
uniquify.string(->(unique_number) { format(base_pattern, unique_number) }) do |str|
|
||||
lambda_for_uniqueness.call(str)
|
||||
end
|
||||
def random_segment
|
||||
Zlib.crc32([namespace.path, source_user_identifier].join).to_s(36)
|
||||
end
|
||||
|
||||
def log_placeholder_user_creation(user)
|
||||
|
|
|
|||
|
|
@ -33,12 +33,21 @@ module Gitlab
|
|||
# @return [Import::SourceUser, nil] The found source user object, or `nil` if no match is found.
|
||||
def find_source_user(source_user_identifier)
|
||||
cache_from_request_store[source_user_identifier] ||= ::Import::SourceUser.uncached do
|
||||
::Import::SourceUser.find_source_user(
|
||||
source_user = ::Import::SourceUser.find_source_user(
|
||||
source_user_identifier: source_user_identifier,
|
||||
namespace: namespace,
|
||||
source_hostname: source_hostname,
|
||||
import_type: import_type
|
||||
)
|
||||
|
||||
# If the record has no `#mapped_user_id`, the record would be unusuable for import.
|
||||
# It can be in this state if the reassigned_to_user, or placeholder_user were deleted
|
||||
# unexpectedly. We intentionally do not have a cascade delete association with
|
||||
# users on this record as we do not want to have unmapped contributions be lost.
|
||||
# In this situation we reset the record.
|
||||
source_user = reset_source_user!(source_user) if reset_source_user?(source_user)
|
||||
|
||||
source_user
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -120,6 +129,45 @@ module Gitlab
|
|||
::Import::PlaceholderUserLimit.new(namespace: namespace).exceeded?
|
||||
end
|
||||
|
||||
def reset_source_user?(source_user)
|
||||
source_user && source_user.mapped_user_id.nil?
|
||||
end
|
||||
|
||||
def reset_source_user!(source_user)
|
||||
in_lock(
|
||||
lock_key(source_user.source_user_identifier), ttl: LOCK_TTL, sleep_sec: LOCK_SLEEP, retries: LOCK_RETRIES
|
||||
) do |retried|
|
||||
if retried
|
||||
source_user.reset
|
||||
next source_user unless reset_source_user?(source_user)
|
||||
end
|
||||
|
||||
::Import::Framework::Logger.info(
|
||||
message: 'Resetting source user state',
|
||||
source_user_id: source_user.id,
|
||||
source_user_status: source_user.status,
|
||||
source_user_reassign_to_user_id: source_user.reassign_to_user_id,
|
||||
source_user_placeholder_user_id: source_user.placeholder_user_id
|
||||
)
|
||||
|
||||
source_user.status = 0
|
||||
source_user.reassignment_token = nil
|
||||
source_user.reassign_to_user = nil
|
||||
source_user.placeholder_user ||= create_placeholder_user(source_user)
|
||||
|
||||
next source_user if source_user.save
|
||||
|
||||
::Import::Framework::Logger.error(
|
||||
message: 'Failed to save source user after resetting',
|
||||
source_user_id: source_user.id,
|
||||
source_user_validation_errors: source_user.errors.full_messages
|
||||
)
|
||||
|
||||
source_user.destroy
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def lock_key(source_user_identifier)
|
||||
"import:source_user_mapper:#{namespace.id}:#{import_type}:#{source_hostname}:#{source_user_identifier}"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,14 +7,20 @@ module Gitlab
|
|||
class UsernameAndEmailGenerator
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
def initialize(username_prefix:, email_domain: Gitlab.config.gitlab.host)
|
||||
def initialize(username_prefix:, email_domain: Gitlab.config.gitlab.host, random_segment: SecureRandom.hex(16))
|
||||
@username_prefix = username_prefix
|
||||
@email_domain = email_domain
|
||||
@random_segment = random_segment
|
||||
end
|
||||
|
||||
def username
|
||||
uniquify.string(->(counter) { Kernel.sprintf(username_pattern, counter) }) do |suggested_username|
|
||||
::Namespace.by_path(suggested_username) || ::User.find_by_any_email(email_for(suggested_username))
|
||||
suggested_email = email_for(suggested_username)
|
||||
|
||||
::Namespace.by_path(suggested_username) ||
|
||||
::User.username_exists?(suggested_username) ||
|
||||
::User.find_by_any_email(suggested_email) ||
|
||||
::Email.find_by_email(suggested_email)
|
||||
end
|
||||
end
|
||||
strong_memoize_attr :username
|
||||
|
|
@ -26,8 +32,10 @@ module Gitlab
|
|||
|
||||
private
|
||||
|
||||
attr_reader :random_segment, :username_prefix
|
||||
|
||||
def username_pattern
|
||||
"#{@username_prefix}_#{SecureRandom.hex(16)}%s"
|
||||
"#{username_prefix}_#{random_segment}%s"
|
||||
end
|
||||
|
||||
def email_for(name)
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ namespace :tw do
|
|||
# CodeOwnerRule.new('Activation', ''),
|
||||
# CodeOwnerRule.new('Acquisition', ''),
|
||||
CodeOwnerRule.new('AI Framework', '@sselhorn'),
|
||||
# CodeOwnerRule.new('AI Model Validation', ''),
|
||||
# CodeOwnerRule.new('Analytics Instrumentation', ''),
|
||||
CodeOwnerRule.new('Authentication', '@idurham'),
|
||||
CodeOwnerRule.new('Authorization', '@idurham'),
|
||||
|
|
|
|||
|
|
@ -268,7 +268,7 @@
|
|||
"commander": "^2.20.3",
|
||||
"crypto": "^1.0.1",
|
||||
"custom-jquery-matchers": "^2.1.0",
|
||||
"eslint": "9.17.0",
|
||||
"eslint": "9.18.0",
|
||||
"eslint-formatter-gitlab": "^5.1.0",
|
||||
"eslint-import-resolver-jest": "3.0.2",
|
||||
"eslint-import-resolver-webpack": "0.13.10",
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ exports[`Merge request dashboard merge request component when newListsEnabled is
|
|||
class="gl-border-b"
|
||||
>
|
||||
<td
|
||||
class="gl-align-top gl-px-3 gl-py-4"
|
||||
class="gl-align-top gl-pl-5 gl-pr-3 gl-py-4"
|
||||
>
|
||||
<status-badge-stub
|
||||
listid="returned_to_you"
|
||||
|
|
|
|||
|
|
@ -309,7 +309,7 @@ describe('Details Page', () => {
|
|||
runCleanupPoliciesHelpPagePath: helpPagePath(
|
||||
'administration/packages/container_registry',
|
||||
{
|
||||
anchor: 'run-the-cleanup-policy-now',
|
||||
anchor: 'run-the-cleanup-policy',
|
||||
},
|
||||
),
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,9 +5,10 @@ require 'spec_helper'
|
|||
RSpec.describe Resolvers::Ci::ProjectPipelineResolver, feature_category: :continuous_integration do
|
||||
include GraphqlHelpers
|
||||
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: 'sha') }
|
||||
let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: project, sha: 'sha2') }
|
||||
let_it_be(:project) { create(:project, :repository) }
|
||||
let_it_be(:project_pipeline_1) { create(:ci_pipeline, project: project, sha: project.commit.sha) }
|
||||
let_it_be(:project_pipeline_2) { create(:ci_pipeline, project: project, sha: 'sha') }
|
||||
let_it_be(:project_pipeline_3) { create(:ci_pipeline, project: project, sha: 'sha2') }
|
||||
let_it_be(:other_pipeline) { create(:ci_pipeline) }
|
||||
|
||||
let(:current_user) { create(:user, developer_of: project) }
|
||||
|
|
@ -23,27 +24,27 @@ RSpec.describe Resolvers::Ci::ProjectPipelineResolver, feature_category: :contin
|
|||
it 'resolves pipeline for the passed id' do
|
||||
expect(Ci::PipelinesFinder)
|
||||
.to receive(:new)
|
||||
.with(project, current_user, ids: [pipeline.id.to_s])
|
||||
.with(project, current_user, ids: [project_pipeline_1.id.to_s])
|
||||
.and_call_original
|
||||
|
||||
result = batch_sync do
|
||||
resolve_pipeline(project, { id: "gid://gitlab/Ci::Pipeline/#{pipeline.id}" })
|
||||
resolve_pipeline(project, { id: project_pipeline_1.to_global_id })
|
||||
end
|
||||
|
||||
expect(result).to eq(pipeline)
|
||||
expect(result).to eq(project_pipeline_1)
|
||||
end
|
||||
|
||||
it 'resolves pipeline for the passed iid' do
|
||||
expect(Ci::PipelinesFinder)
|
||||
.to receive(:new)
|
||||
.with(project, current_user, iids: [pipeline.iid.to_s])
|
||||
.with(project, current_user, iids: [project_pipeline_1.iid.to_s])
|
||||
.and_call_original
|
||||
|
||||
result = batch_sync do
|
||||
resolve_pipeline(project, { iid: pipeline.iid.to_s })
|
||||
resolve_pipeline(project, { iid: project_pipeline_1.iid.to_s })
|
||||
end
|
||||
|
||||
expect(result).to eq(pipeline)
|
||||
expect(result).to eq(project_pipeline_1)
|
||||
end
|
||||
|
||||
it 'resolves pipeline for the passed sha' do
|
||||
|
|
@ -56,30 +57,30 @@ RSpec.describe Resolvers::Ci::ProjectPipelineResolver, feature_category: :contin
|
|||
resolve_pipeline(project, { sha: 'sha' })
|
||||
end
|
||||
|
||||
expect(result).to eq(pipeline)
|
||||
expect(result).to eq(project_pipeline_2)
|
||||
end
|
||||
|
||||
it 'keeps the queries under the threshold for id' do
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
batch_sync { resolve_pipeline(project, { id: "gid://gitlab/Ci::Pipeline/#{pipeline.id}" }) }
|
||||
batch_sync { resolve_pipeline(project, { id: project_pipeline_1.to_global_id }) }
|
||||
end
|
||||
|
||||
expect do
|
||||
batch_sync do
|
||||
resolve_pipeline(project, { id: "gid://gitlab/Ci::Pipeline/#{pipeline.id}" })
|
||||
resolve_pipeline(project, { id: "gid://gitlab/Ci::Pipeline/#{other_project_pipeline.id}" })
|
||||
resolve_pipeline(project, { id: project_pipeline_1.to_global_id })
|
||||
resolve_pipeline(project, { id: project_pipeline_2.to_global_id })
|
||||
end
|
||||
end.not_to exceed_query_limit(control)
|
||||
end
|
||||
|
||||
it 'keeps the queries under the threshold for iid' do
|
||||
control = ActiveRecord::QueryRecorder.new do
|
||||
batch_sync { resolve_pipeline(project, { iid: pipeline.iid.to_s }) }
|
||||
batch_sync { resolve_pipeline(project, { iid: project_pipeline_1.iid.to_s }) }
|
||||
end
|
||||
|
||||
expect do
|
||||
batch_sync do
|
||||
resolve_pipeline(project, { iid: pipeline.iid.to_s })
|
||||
resolve_pipeline(project, { iid: project_pipeline_1.iid.to_s })
|
||||
resolve_pipeline(project, { iid: other_pipeline.iid.to_s })
|
||||
end
|
||||
end.not_to exceed_query_limit(control)
|
||||
|
|
@ -100,7 +101,7 @@ RSpec.describe Resolvers::Ci::ProjectPipelineResolver, feature_category: :contin
|
|||
|
||||
it 'does not resolve a pipeline outside the project' do
|
||||
result = batch_sync do
|
||||
resolve_pipeline(other_pipeline.project, { iid: pipeline.iid.to_s })
|
||||
resolve_pipeline(other_pipeline.project, { iid: project_pipeline_1.iid.to_s })
|
||||
end
|
||||
|
||||
expect(result).to be_nil
|
||||
|
|
@ -108,34 +109,72 @@ RSpec.describe Resolvers::Ci::ProjectPipelineResolver, feature_category: :contin
|
|||
|
||||
it 'does not resolve a pipeline outside the project' do
|
||||
result = batch_sync do
|
||||
resolve_pipeline(other_pipeline.project, { id: "gid://gitlab/Ci::Pipeline/#{pipeline.id}9" })
|
||||
resolve_pipeline(other_pipeline.project, { id: project_pipeline_1.to_global_id })
|
||||
end
|
||||
|
||||
expect(result).to be_nil
|
||||
end
|
||||
|
||||
it 'errors when no id, iid or sha is passed' do
|
||||
expect_graphql_error_to_be_created(GraphQL::Schema::Validator::ValidationFailedError) do
|
||||
resolve_pipeline(project, {})
|
||||
context 'when no id, iid or sha is passed' do
|
||||
it 'returns latest pipeline' do
|
||||
result = batch_sync do
|
||||
resolve_pipeline(project, {})
|
||||
end
|
||||
|
||||
expect(result).to eq(project_pipeline_1)
|
||||
end
|
||||
|
||||
it 'does not reduce complexity score' do
|
||||
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class,
|
||||
null: false, max_page_size: 1)
|
||||
|
||||
expect(field.complexity.call({}, {}, 1)).to eq 2
|
||||
end
|
||||
end
|
||||
|
||||
context 'when id is passed' do
|
||||
it 'reduces complexity score' do
|
||||
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class,
|
||||
null: false, max_page_size: 1)
|
||||
|
||||
expect(field.complexity.call({}, { id: project_pipeline_1.to_global_id }, 1)).to eq(-7)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when iid is passed' do
|
||||
it 'reduces complexity score' do
|
||||
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class,
|
||||
null: false, max_page_size: 1)
|
||||
|
||||
expect(field.complexity.call({}, { iid: project_pipeline_1.iid.to_s }, 1)).to eq(-7)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when sha is passed' do
|
||||
it 'reduces complexity score' do
|
||||
field = Types::BaseField.new(name: 'test', type: GraphQL::Types::String, resolver_class: described_class,
|
||||
null: false, max_page_size: 1)
|
||||
|
||||
expect(field.complexity.call({}, { sha: 'sha' }, 1)).to eq(-7)
|
||||
end
|
||||
end
|
||||
|
||||
it 'errors when both iid and sha are passed' do
|
||||
expect_graphql_error_to_be_created(GraphQL::Schema::Validator::ValidationFailedError) do
|
||||
resolve_pipeline(project, { iid: pipeline.iid.to_s, sha: 'sha' })
|
||||
resolve_pipeline(project, { iid: project_pipeline_1.iid.to_s, sha: 'sha' })
|
||||
end
|
||||
end
|
||||
|
||||
it 'errors when both id and iid are passed' do
|
||||
expect_graphql_error_to_be_created(GraphQL::Schema::Validator::ValidationFailedError) do
|
||||
resolve_pipeline(project, { id: "gid://gitlab/Ci::Pipeline/#{pipeline.id}", iid: pipeline.iid.to_s })
|
||||
resolve_pipeline(project, { id: project_pipeline_1.to_global_id, iid: project_pipeline_1.iid.to_s })
|
||||
end
|
||||
end
|
||||
|
||||
it 'errors when id, iid and sha are passed' do
|
||||
expect_graphql_error_to_be_created(GraphQL::Schema::Validator::ValidationFailedError) do
|
||||
resolve_pipeline(project,
|
||||
{ id: "gid://gitlab/Ci::Pipeline/#{pipeline.id}", iid: pipeline.iid.to_s, sha: '12345234' })
|
||||
{ id: project_pipeline_1.to_global_id, iid: project_pipeline_1.iid.to_s, sha: '12345234' })
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -147,10 +186,10 @@ RSpec.describe Resolvers::Ci::ProjectPipelineResolver, feature_category: :contin
|
|||
|
||||
it 'resolves pipeline for the passed iid' do
|
||||
result = batch_sync do
|
||||
resolve_pipeline(project, { iid: pipeline.iid.to_s })
|
||||
resolve_pipeline(project, { iid: project_pipeline_1.iid.to_s })
|
||||
end
|
||||
|
||||
expect(result).to eq(pipeline)
|
||||
expect(result).to eq(project_pipeline_1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -15,6 +15,13 @@ RSpec.describe Gitlab::Import::ImportUserCreator, :request_store, feature_catego
|
|||
expect(user.namespace.organization).to eq(group.organization)
|
||||
end
|
||||
|
||||
it_behaves_like 'username and email pair is generated by Gitlab::Utils::UsernameAndEmailGenerator' do
|
||||
subject(:result) { service.execute }
|
||||
|
||||
let(:username_prefix) { "import_user_#{group.path}" }
|
||||
let(:email_domain) { 'noreply.localhost' }
|
||||
end
|
||||
|
||||
context 'when import user already exists' do
|
||||
it 'returns existing import user' do
|
||||
user = create(:user)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::Import::PlaceholderUserCreator, feature_category: :importers do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
let_it_be(:namespace) { create(:namespace) }
|
||||
|
||||
let(:import_type) { 'github' }
|
||||
|
|
@ -31,11 +33,18 @@ RSpec.describe Gitlab::Import::PlaceholderUserCreator, feature_category: :import
|
|||
new_placeholder_user = User.where(user_type: :placeholder).last
|
||||
|
||||
expect(new_placeholder_user.name).to eq("Placeholder #{source_name}")
|
||||
expect(new_placeholder_user.username).to match(/^aprycontributor_placeholder_user_\d+$/)
|
||||
expect(new_placeholder_user.email).to match(/^#{import_type}_\h+_\d+@#{Settings.gitlab.host}$/)
|
||||
expect(new_placeholder_user.username).to match(/^aprycontributor_placeholder_[[:alnum:]]+$/)
|
||||
expect(new_placeholder_user.email).to match(/^aprycontributor_placeholder_[[:alnum:]]+@noreply.localhost$/)
|
||||
expect(new_placeholder_user.namespace.organization).to eq(namespace.organization)
|
||||
end
|
||||
|
||||
it_behaves_like 'username and email pair is generated by Gitlab::Utils::UsernameAndEmailGenerator' do
|
||||
subject(:result) { service.execute }
|
||||
|
||||
let(:username_prefix) { 'aprycontributor_placeholder' }
|
||||
let(:email_domain) { 'noreply.localhost' }
|
||||
end
|
||||
|
||||
it 'does not cache user policies', :request_store do
|
||||
expect { service.execute }.not_to change {
|
||||
Gitlab::SafeRequestStore.storage.keys.select do |key|
|
||||
|
|
@ -62,50 +71,36 @@ RSpec.describe Gitlab::Import::PlaceholderUserCreator, feature_category: :import
|
|||
|
||||
context 'when there are non-unique usernames on the same import source' do
|
||||
it 'creates two unique users with different usernames and emails' do
|
||||
placeholder_user1 = service.execute
|
||||
placeholder_user2 = service.execute
|
||||
placeholder_user1 = described_class.new(source_user).execute
|
||||
placeholder_user2 = described_class.new(source_user).execute
|
||||
|
||||
expect(placeholder_user1.username).not_to eq(placeholder_user2.username)
|
||||
expect(placeholder_user1.email).not_to eq(placeholder_user2.email)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an existing namespace conflicts with the placeholder user namespace' do
|
||||
before do
|
||||
create(:group, path: 'aprycontributor_placeholder_user_1')
|
||||
end
|
||||
context 'when source_name is nil' do
|
||||
let(:source_name) { nil }
|
||||
|
||||
it 'creates a placeholder with a username that avoids the conflict' do
|
||||
placeholder_user1 = service.execute
|
||||
it 'assigns a default name' do
|
||||
placeholder_user = service.execute
|
||||
|
||||
expect(placeholder_user1.username).to eq('aprycontributor_placeholder_user_2')
|
||||
expect(placeholder_user.name).to eq("Placeholder #{import_type} Source User")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when generating a unique email address' do
|
||||
it 'validates against all stored email addresses' do
|
||||
allow(Zlib).to receive(:crc32).and_return(123)
|
||||
|
||||
existing_user = create(:user, email: 'github_7b_1@localhost')
|
||||
existing_user.emails.create!(email: 'github_7b_2@localhost')
|
||||
context 'when source_username is nil' do
|
||||
let(:source_username) { nil }
|
||||
|
||||
it 'generates a fallback username and email, and default name' do
|
||||
placeholder_user = service.execute
|
||||
|
||||
expect(placeholder_user.email).to eq('github_7b_3@localhost')
|
||||
expect(placeholder_user.username).to match(/^#{import_type}_placeholder_[[:alnum:]]+$/)
|
||||
expect(placeholder_user.email).to match(/^#{import_type}_placeholder_[[:alnum:]]+@noreply.localhost$/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the incoming source_user attributes are invalid' do
|
||||
context 'when source_name is nil' do
|
||||
let(:source_name) { nil }
|
||||
|
||||
it 'assigns a default name' do
|
||||
placeholder_user = service.execute
|
||||
|
||||
expect(placeholder_user.name).to eq("Placeholder #{import_type} Source User")
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source_name is too long' do
|
||||
let(:source_name) { 'a' * 500 }
|
||||
|
||||
|
|
@ -117,25 +112,11 @@ RSpec.describe Gitlab::Import::PlaceholderUserCreator, feature_category: :import
|
|||
end
|
||||
end
|
||||
|
||||
context 'when source_username is nil' do
|
||||
let(:source_username) { nil }
|
||||
|
||||
it 'assigns a default username' do
|
||||
expected_match = /^#{import_type}_\h+_placeholder_user_\d+$/
|
||||
|
||||
placeholder_user = service.execute
|
||||
|
||||
expect(placeholder_user.username).to match(expected_match)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the source_username contains invalid characters' do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
|
||||
where(:input_username, :expected_output) do
|
||||
'.asdf' | /^asdf_placeholder_user_1$/
|
||||
'asdf^ghjk' | /^asdfghjk_placeholder_user_1$/
|
||||
'.' | /^#{import_type}_\h+_placeholder_user_1$/
|
||||
'.asdf' | /^asdf_placeholder_[[:alnum:]]+$/
|
||||
'asdf^ghjk' | /^asdfghjk_placeholder_[[:alnum:]]+$/
|
||||
'.' | /^#{import_type}_placeholder_[[:alnum:]]+$/
|
||||
end
|
||||
|
||||
with_them do
|
||||
|
|
@ -155,64 +136,49 @@ RSpec.describe Gitlab::Import::PlaceholderUserCreator, feature_category: :import
|
|||
it 'truncates the original username to 200 characters' do
|
||||
placeholder_user = service.execute
|
||||
|
||||
expect(placeholder_user.username).to match(/^#{'a' * 200}_placeholder_user_\d+$/)
|
||||
expect(placeholder_user.username).to match(/^#{'a' * 200}_placeholder_[[:alnum:]]+$/)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#placeholder_name' do
|
||||
it 'prepends Placeholder to source_name' do
|
||||
expect(service.placeholder_name).to eq("Placeholder #{source_name}")
|
||||
describe '.placeholder_email?' do
|
||||
it "matches the emails created for placeholder users" do
|
||||
import_source_user = create(:import_source_user)
|
||||
placeholder_user = described_class.new(import_source_user).execute
|
||||
|
||||
expect(described_class.placeholder_email?(placeholder_user.email)).to eq(true)
|
||||
end
|
||||
|
||||
context 'when source_name is nil' do
|
||||
let(:source_name) { nil }
|
||||
it "matches the emails created for placeholders users when source username and name are missing" do
|
||||
import_source_user = create(:import_source_user, source_username: nil, source_name: nil)
|
||||
placeholder_user = described_class.new(import_source_user).execute
|
||||
|
||||
it 'assigns a default name' do
|
||||
expect(service.placeholder_name).to eq("Placeholder #{import_type} Source User")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#placeholder_username' do
|
||||
it 'returns an unique placeholder username' do
|
||||
expect(service.placeholder_username).to match(/^aprycontributor_placeholder_user_\d+$/)
|
||||
expect(described_class.placeholder_email?(placeholder_user.email)).to eq(true)
|
||||
end
|
||||
|
||||
context 'when source_username is nil' do
|
||||
let(:source_username) { nil }
|
||||
|
||||
it 'assigns a default username' do
|
||||
expected_match = /^#{import_type}_\h+_placeholder_user_\d+$/
|
||||
|
||||
expect(service.placeholder_username).to match(expected_match)
|
||||
end
|
||||
where(:email, :expected_match) do
|
||||
'foo_placeholder_Az1@noreply.localhost' | true
|
||||
'foo_placeholder_Az$1@noreply.localhost' | false
|
||||
'placeholder_Az1@noreply.localhost' | false
|
||||
'foo_placeholder@noreply.localhost' | false
|
||||
end
|
||||
end
|
||||
|
||||
describe '.placeholder_email_pattern' do
|
||||
subject(:placeholder_email_pattern) { described_class.placeholder_email_pattern }
|
||||
|
||||
::Import::HasImportSource::IMPORT_SOURCES.except(:none).each_key do |import_type|
|
||||
it "matches the emails created for placeholder users imported from #{import_type}" do
|
||||
import_source_user = create(:import_source_user, import_type: import_type)
|
||||
placeholder_user = described_class.new(import_source_user).execute
|
||||
|
||||
expect(placeholder_email_pattern === placeholder_user.email).to eq(true)
|
||||
with_them do
|
||||
specify do
|
||||
expect(described_class.placeholder_email?(email)).to eq(expected_match)
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not match emails without an import source' do
|
||||
email = 'email_12e4ab78_1@gitlab.com'
|
||||
context 'with legacy placeholder user email formats' do
|
||||
where(:import_type) { Import::HasImportSource::IMPORT_SOURCES.except(:none).keys }
|
||||
|
||||
expect(placeholder_email_pattern === email).to eq(false)
|
||||
end
|
||||
|
||||
it 'does not match emails with domains other than the host' do
|
||||
email = "github_12e4ab78_2@not#{Settings.gitlab.host}"
|
||||
|
||||
expect(placeholder_email_pattern === email).to eq(false)
|
||||
with_them do
|
||||
it "matches the legacy emails format for placeholder users" do
|
||||
email = "#{import_type}_5c34ae6b9_1@#{Settings.gitlab.host}"
|
||||
expect(described_class.placeholder_email?(email)).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -62,8 +62,9 @@ RSpec.describe Gitlab::Import::SourceUserMapper, :request_store, feature_categor
|
|||
new_placeholder_user = User.where(user_type: :placeholder).last
|
||||
|
||||
expect(new_placeholder_user.name).to eq("Placeholder #{source_name}")
|
||||
expect(new_placeholder_user.username).to match(/^aprycontributor_placeholder_user_\d+$/)
|
||||
expect(new_placeholder_user.email).to match(/^#{import_type}_\h+_\d+@#{Settings.gitlab.host}$/)
|
||||
expect(new_placeholder_user.username).to match(/^aprycontributor_placeholder_[[:alnum:]]+$/)
|
||||
expect(new_placeholder_user.email)
|
||||
.to match(/^aprycontributor_placeholder_[[:alnum:]]+@noreply.#{Settings.gitlab.host}$/)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -371,5 +372,145 @@ RSpec.describe Gitlab::Import::SourceUserMapper, :request_store, feature_categor
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "when the source user is in a state that returns nil for `#mapped_user_id`" do
|
||||
include ExclusiveLeaseHelpers
|
||||
|
||||
shared_examples 'returns the existing source user, in a reset state' do
|
||||
specify do
|
||||
allow(::Import::Framework::Logger).to receive(:info).and_call_original
|
||||
|
||||
expect(::Import::Framework::Logger).to receive(:info).with(
|
||||
message: 'Resetting source user state',
|
||||
source_user_status: existing_import_source_user.status,
|
||||
source_user_id: existing_import_source_user.id,
|
||||
source_user_reassign_to_user_id: existing_import_source_user.reassign_to_user_id,
|
||||
source_user_placeholder_user_id: existing_import_source_user.placeholder_user_id
|
||||
)
|
||||
|
||||
expect { find_source_user }.to change { existing_import_source_user.reload.mapped_user_id }.from(nil)
|
||||
expect(find_source_user).to eq(existing_import_source_user)
|
||||
expect(find_source_user).to have_attributes(
|
||||
status: 0,
|
||||
reassign_to_user_id: nil,
|
||||
placeholder_user_id: be_present,
|
||||
reassignment_token: nil
|
||||
)
|
||||
end
|
||||
|
||||
it 'takes an exclusive lease' do
|
||||
key = end_with(":#{existing_import_source_user.source_user_identifier}")
|
||||
lease = stub_exclusive_lease(key, timeout: described_class::LOCK_TTL)
|
||||
|
||||
expect(lease).to receive(:try_obtain)
|
||||
expect(lease).to receive(:cancel)
|
||||
|
||||
find_source_user
|
||||
end
|
||||
|
||||
context 'when exclusive lease was retried' do
|
||||
let(:update_sql) { start_with('UPDATE "import_source_users"') }
|
||||
|
||||
context 'and source user was not reset while waiting' do
|
||||
before do
|
||||
allow_next_instance_of(described_class) do |source_user_mapper|
|
||||
allow(source_user_mapper).to receive(:in_lock).and_yield(true)
|
||||
end
|
||||
end
|
||||
|
||||
it 'continues to reset the source user' do
|
||||
recorder = ActiveRecord::QueryRecorder.new { find_source_user }
|
||||
|
||||
expect(recorder.log).to include(update_sql)
|
||||
expect(find_source_user).to eq(existing_import_source_user)
|
||||
end
|
||||
end
|
||||
|
||||
context 'and source user was reset while waiting' do
|
||||
before do
|
||||
allow_next_instance_of(described_class) do |source_user_mapper|
|
||||
allow(source_user_mapper).to receive(:in_lock).and_yield(true)
|
||||
allow(source_user_mapper).to receive(:reset_source_user?).and_return(true, false)
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not continue to reset the source user' do
|
||||
recorder = ActiveRecord::QueryRecorder.new { find_source_user }
|
||||
|
||||
expect(recorder.log).not_to include(update_sql)
|
||||
expect(find_source_user).to eq(existing_import_source_user)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are ActiveRecord validation errors' do
|
||||
before do
|
||||
allow_next_found_instance_of(Import::SourceUser) do |source_user|
|
||||
allow(source_user).to receive(:save).and_return(false)
|
||||
allow(source_user).to receive_message_chain(:errors, :full_messages).and_return(['mocked_error'])
|
||||
end
|
||||
end
|
||||
|
||||
it 'logs the errors and destroys the source user record' do
|
||||
expect(::Import::Framework::Logger).to receive(:error).with(
|
||||
message: 'Failed to save source user after resetting',
|
||||
source_user_id: kind_of(Integer),
|
||||
source_user_validation_errors: ['mocked_error']
|
||||
)
|
||||
|
||||
expect(existing_import_source_user).to be_invalid
|
||||
expect { find_source_user }.to change { Import::SourceUser.count }.by(-1)
|
||||
expect(find_source_user).to be_nil
|
||||
expect(Import::SourceUser.find_by_id(existing_import_source_user.id)).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'as the reassigned to user was deleted' do
|
||||
let_it_be_with_reload(:existing_import_source_user) do
|
||||
create(
|
||||
:import_source_user,
|
||||
:completed,
|
||||
namespace: namespace,
|
||||
import_type: import_type,
|
||||
source_hostname: source_hostname
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
existing_import_source_user.reassign_to_user.destroy!
|
||||
existing_import_source_user.reload
|
||||
end
|
||||
|
||||
it_behaves_like 'returns the existing source user, in a reset state'
|
||||
|
||||
it 'retains its placeholder user' do
|
||||
expect { find_source_user }.not_to change { existing_import_source_user.reload.placeholder_user_id }
|
||||
end
|
||||
end
|
||||
|
||||
context 'as placeholder user was deleted' do
|
||||
let_it_be_with_reload(:existing_import_source_user) do
|
||||
create(
|
||||
:import_source_user,
|
||||
:awaiting_approval,
|
||||
namespace: namespace,
|
||||
import_type: import_type,
|
||||
source_hostname: source_hostname
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
existing_import_source_user.placeholder_user.destroy!
|
||||
existing_import_source_user.reload
|
||||
end
|
||||
|
||||
it_behaves_like 'returns the existing source user, in a reset state'
|
||||
|
||||
it 'creates a new placeholder user' do
|
||||
expect { find_source_user }.to change { existing_import_source_user.reload.placeholder_user_id }.from(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -20,5 +20,12 @@ RSpec.describe Gitlab::Utils::UsernameAndEmailGenerator, feature_category: :syst
|
|||
end
|
||||
end
|
||||
|
||||
include_examples 'username and email pair is generated by Gitlab::Utils::UsernameAndEmailGenerator'
|
||||
it 'can be passed a random_segment' do
|
||||
user = described_class.new(username_prefix: username_prefix, email_domain: email_domain, random_segment: 'random')
|
||||
|
||||
expect(user.username).to eq('username_prefix_random')
|
||||
expect(user.email).to eq('username_prefix_random@example.com')
|
||||
end
|
||||
|
||||
it_behaves_like 'username and email pair is generated by Gitlab::Utils::UsernameAndEmailGenerator'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ RSpec.describe Member, feature_category: :groups_and_projects do
|
|||
end
|
||||
|
||||
it 'must not be a placeholder email' do
|
||||
member.invite_email = "gitlab_migration_5c34ae6b9_1@#{Settings.gitlab.host}"
|
||||
member.invite_email = 'gitlab_migration_placeholder_user@noreply.localhost'
|
||||
|
||||
expect(member).not_to be_valid
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
|
|||
|
||||
let_it_be(:project) { create(:project, :repository, :public) }
|
||||
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
|
||||
let_it_be(:pipeline_2) { create(:ci_pipeline, project: project, sha: 'sha') }
|
||||
let_it_be(:current_user) { create(:user) }
|
||||
let_it_be(:build_job) { create(:ci_build, :trace_with_sections, name: 'build-a', pipeline: pipeline, stage_idx: 0, stage: 'build') }
|
||||
let_it_be(:failed_build) { create(:ci_build, :failed, name: 'failed-build', pipeline: pipeline, stage_idx: 0, stage: 'build') }
|
||||
|
|
@ -459,6 +460,59 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
|
|||
end
|
||||
end
|
||||
|
||||
context 'when no arguments are passed' do
|
||||
let(:variables) do
|
||||
{
|
||||
path: project.full_path
|
||||
}
|
||||
end
|
||||
|
||||
let(:query) do
|
||||
<<~GQL
|
||||
query($path: ID!) {
|
||||
project(fullPath: $path) {
|
||||
pipeline {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
GQL
|
||||
end
|
||||
|
||||
it 'returns latest pipeline' do
|
||||
post_graphql(query, current_user: current_user, variables: variables)
|
||||
|
||||
expect(graphql_data_at(:project, :pipeline, :id)).to eq(pipeline.to_global_id.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when sha argument is passed' do
|
||||
let(:variables) do
|
||||
{
|
||||
path: project.full_path,
|
||||
sha: 'sha'
|
||||
}
|
||||
end
|
||||
|
||||
let(:query) do
|
||||
<<~GQL
|
||||
query($path: ID!, $sha: String!) {
|
||||
project(fullPath: $path) {
|
||||
pipeline(sha: $sha) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
GQL
|
||||
end
|
||||
|
||||
it 'returns pipeline by sha' do
|
||||
post_graphql(query, current_user: current_user, variables: variables)
|
||||
|
||||
expect(graphql_data_at(:project, :pipeline, :id)).to eq(pipeline_2.to_global_id.to_s)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def build_query_to_find_pipeline_shas(*pipelines)
|
||||
|
|
|
|||
|
|
@ -237,7 +237,7 @@ RSpec.describe Import::BulkImports::UpdateSourceUsersService, :clean_gitlab_redi
|
|||
|
||||
expect(import_source_user_1.placeholder_user.reload).to have_attributes(
|
||||
name: 'Placeholder John Doe',
|
||||
username: 'johndoe_placeholder_user_1'
|
||||
username: match(/\Ajohndoe_placeholder_[[:alnum:]]+\z/)
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,12 @@ RSpec.describe Import::SourceUsers::UpdateService, feature_category: :importers
|
|||
subject(:service) { described_class.new(import_source_user, params) }
|
||||
|
||||
describe '#execute' do
|
||||
before do
|
||||
allow_next_instance_of(Gitlab::Import::PlaceholderUserCreator) do |service|
|
||||
allow(service).to receive(:random_segment).and_return('random')
|
||||
end
|
||||
end
|
||||
|
||||
it 'updates both placeholder user and source user' do
|
||||
result = service.execute
|
||||
|
||||
|
|
@ -22,17 +28,17 @@ RSpec.describe Import::SourceUsers::UpdateService, feature_category: :importers
|
|||
expect(import_source_user.reload.source_username).to eq(new_source_username)
|
||||
|
||||
expect(placeholder_user.reload.name).to eq('Placeholder John Doe')
|
||||
expect(placeholder_user.reload.username).to eq('johndoe_placeholder_user_1')
|
||||
expect(placeholder_user.reload.username).to eq('johndoe_placeholder_random')
|
||||
|
||||
expect(result).to be_success
|
||||
end
|
||||
|
||||
it 'generates unique usernames' do
|
||||
create(:user, username: 'johndoe_placeholder_user_1')
|
||||
create(:user, username: 'johndoe_placeholder_random')
|
||||
|
||||
result = service.execute
|
||||
|
||||
expect(placeholder_user.reload.username).to eq('johndoe_placeholder_user_2')
|
||||
expect(placeholder_user.reload.username).to eq('johndoe_placeholder_random1')
|
||||
expect(import_source_user.reload.source_username).to eq(new_source_username)
|
||||
expect(result).to be_success
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,4 +23,21 @@ RSpec.describe WorkItems::DataSync::Handlers::CleanupDataHandler, feature_catego
|
|||
|
||||
cleanup_data_handler.execute
|
||||
end
|
||||
|
||||
it 'runs all non-widget callbacks' do
|
||||
create_service_params = {
|
||||
work_item: anything, target_work_item: anything, current_user: current_user, params: {}
|
||||
}
|
||||
|
||||
WorkItem.non_widgets.filter_map do |association_name|
|
||||
sync_callback_class = WorkItem.sync_callback_class(association_name)
|
||||
next if sync_callback_class.nil?
|
||||
|
||||
allow_next_instance_of(sync_callback_class, **create_service_params) do |callback_instance|
|
||||
expect(callback_instance).to receive(:post_move_cleanup)
|
||||
end
|
||||
end
|
||||
|
||||
cleanup_data_handler.execute
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe 'validate transferable associations', feature_category: :team_planning do
|
||||
it 'handles transfer for all work item associations', :aggregate_failures do
|
||||
expect(known_transferable_associations.size).to eq(known_transferable_associations.uniq.size), -> do
|
||||
duplicate_associations(known_transferable_associations)
|
||||
end
|
||||
|
||||
work_item_associations = ::WorkItem.reflect_on_all_associations.map(&:name)
|
||||
missing_callbacks = work_item_associations - known_transferable_associations
|
||||
missing_work_item_association = known_transferable_associations - work_item_associations
|
||||
|
||||
expect(missing_callbacks).to be_blank, -> do
|
||||
missing_transfer_callbacks(missing_callbacks)
|
||||
end
|
||||
expect(missing_work_item_association).to be_blank, -> do
|
||||
missing_work_item_association(missing_work_item_association)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -216,6 +216,7 @@ RSpec.configure do |config|
|
|||
config.include UserWithNamespaceShim
|
||||
config.include OrphanFinalArtifactsCleanupHelpers, :orphan_final_artifacts_cleanup
|
||||
config.include ClickHouseHelpers, :click_house
|
||||
config.include WorkItems::DataSync::AssociationsHelpers
|
||||
|
||||
config.include_context 'when rendered has no HTML escapes', type: :view
|
||||
|
||||
|
|
|
|||
|
|
@ -73,3 +73,4 @@
|
|||
- UserGroupNotificationSettingsFinder
|
||||
- UserGroupsCounter
|
||||
- Ai::FeatureSettings::FeatureSettingFinder
|
||||
- Autocomplete::VulnerabilitiesAutocompleteFinder
|
||||
|
|
|
|||
|
|
@ -364,6 +364,8 @@ module GitalySetup
|
|||
# running until `make test` cleans it up.
|
||||
next if ENV['GITALY_PID_FILE']
|
||||
|
||||
::Gitlab::GitalyClient.clear_stubs!
|
||||
|
||||
pids.each { |pid| stop(pid) }
|
||||
|
||||
[storage_path, second_storage_path].each { |storage_dir| FileUtils.rm_rf(storage_dir) }
|
||||
|
|
|
|||
|
|
@ -692,7 +692,6 @@
|
|||
- spec/lib/gitlab/relative_positioning/mover_spec.rb
|
||||
- spec/lib/gitlab/sample_data_template_spec.rb
|
||||
- spec/lib/gitlab/themes_spec.rb
|
||||
- spec/lib/gitlab/utils/username_and_email_generator_spec.rb
|
||||
- spec/mailers/emails/service_desk_spec.rb
|
||||
- spec/mailers/notify_spec.rb
|
||||
- spec/migrations/20221002234454_finalize_group_member_namespace_id_migration_spec.rb
|
||||
|
|
|
|||
|
|
@ -0,0 +1,133 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
module WorkItems
|
||||
module DataSync
|
||||
module AssociationsHelpers
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
BASE_ASSOCIATIONS = {
|
||||
base_associations: [
|
||||
:author, :updated_by, :project, :duplicated_to, :last_edited_by, :closed_by, :work_item_type,
|
||||
:correct_work_item_type, :moved_to, :moved_from, :namespace
|
||||
]
|
||||
}.freeze
|
||||
|
||||
WIDGETS_ASSOCIATIONS = {
|
||||
assignees: [:assignees, :issue_assignees, :assignees_by_name_and_id],
|
||||
award_emoji: [:award_emoji],
|
||||
crm_contacts: [:customer_relations_contacts, :issue_customer_relations_contacts],
|
||||
current_user_todos: [:todos],
|
||||
description: [:description_versions],
|
||||
designs: [:designs, :design_versions], # DesignManagement::Action ???
|
||||
development: [:merge_requests_closing_issues],
|
||||
email_participants: [:issue_email_participants, :email],
|
||||
hierarchy: [
|
||||
:work_item_parent, :work_item_children, :work_item_children_by_relative_position, :parent_link, :child_links
|
||||
],
|
||||
labels: [:label_links, :labels, :resource_label_events],
|
||||
linked_items: [], # linked_work_items
|
||||
milestone: [:milestone, :resource_milestone_events],
|
||||
notes: [:notes, :note_authors, :user_note_authors],
|
||||
notifications: [:sent_notifications, :subscriptions],
|
||||
participants: [:user_mentions],
|
||||
start_and_due_date: [:dates_source],
|
||||
time_tracking: [:timelogs],
|
||||
rolledup_dates: [],
|
||||
status: [],
|
||||
weight: [:weights_source]
|
||||
}.freeze
|
||||
|
||||
NON_WIDGETS_ASSOCIATIONS = {
|
||||
tbd: [
|
||||
:events, :assignment_events, :resource_state_events, :metrics,
|
||||
:incident_management_issuable_escalation_status, :incident_management_timeline_events, :issuable_severity,
|
||||
:sentry_issue, :alert_management_alert, :alert_management_alerts, :user_agent_detail, :zoom_meetings,
|
||||
:search_data
|
||||
]
|
||||
}.freeze
|
||||
|
||||
def base_associations
|
||||
BASE_ASSOCIATIONS
|
||||
end
|
||||
|
||||
def widgets_associations
|
||||
WIDGETS_ASSOCIATIONS
|
||||
end
|
||||
|
||||
def non_widgets_associations
|
||||
NON_WIDGETS_ASSOCIATIONS
|
||||
end
|
||||
|
||||
def known_transferable_associations
|
||||
[
|
||||
base_associations.values,
|
||||
widgets_associations.values,
|
||||
non_widgets_associations.values
|
||||
].flatten
|
||||
end
|
||||
strong_memoize_attr :known_transferable_associations
|
||||
|
||||
def missing_transfer_callbacks(missing_callbacks)
|
||||
<<~MSG
|
||||
Following association(s) are not being handled by move and clone services:
|
||||
- #{missing_callbacks.join("\n - ")}
|
||||
|
||||
Please make sure that these associations have a transfer callback defined in one of the following locations:
|
||||
- app/services/work_items/data_sync/widgets
|
||||
- app/services/work_items/data_sync/non_widgets
|
||||
- ee/app/services/work_items/data_sync/widgets
|
||||
- ee/app/services/work_items/data_sync/non_widgets
|
||||
MSG
|
||||
end
|
||||
|
||||
def missing_work_item_association(missing_work_item_association)
|
||||
<<~MSG
|
||||
Following association(s) are declared as being handled by move and clone services callbacks but are no longer
|
||||
present in WorkItem model:
|
||||
- #{missing_work_item_association.join("\n - ")}
|
||||
|
||||
Please check if these associations were removed from WorkItem model and remove the corresponding callbacks
|
||||
and update corresponding collection of associations in WorkItems::DataSync::AssociationsHelpers or
|
||||
EE::WorkItems::DataSync::AssociationsHelpers:
|
||||
- BASE_ASSOCIATIONS
|
||||
- WIDGETS_ASSOCIATIONS
|
||||
- NON_WIDGETS_ASSOCIATIONS
|
||||
MSG
|
||||
end
|
||||
|
||||
def duplicate_associations(associations)
|
||||
duplicate_locations = {}
|
||||
duplicates = associations.group_by { |assoc| assoc }.select { |_name, value| value.size > 1 }.map(&:first)
|
||||
|
||||
[
|
||||
base_associations,
|
||||
widgets_associations,
|
||||
non_widgets_associations
|
||||
].each do |constant|
|
||||
constant.each do |callback, associations|
|
||||
matches = associations & duplicates
|
||||
matches.each do |match|
|
||||
duplicate_locations[match] ||= []
|
||||
duplicate_locations[match] << callback
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
duplicate_info = duplicate_locations.map do |association_name, callback_name|
|
||||
" - #{association_name} (found in: #{callback_name})"
|
||||
end.join("\n")
|
||||
|
||||
<<~MSG
|
||||
Following associations are being handled by more than one callback:
|
||||
#{duplicate_info}
|
||||
|
||||
Please make sure that these associations are handled by only one callback.
|
||||
MSG
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
WorkItems::DataSync::AssociationsHelpers.prepend_mod
|
||||
|
|
@ -331,7 +331,6 @@
|
|||
- './ee/spec/finders/security/findings_finder_spec.rb'
|
||||
- './ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
|
||||
- './ee/spec/finders/security/scan_execution_policies_finder_spec.rb'
|
||||
- './ee/spec/finders/security/vulnerabilities_finder_spec.rb'
|
||||
- './ee/spec/finders/security/vulnerability_feedbacks_finder_spec.rb'
|
||||
- './ee/spec/finders/snippets_finder_spec.rb'
|
||||
- './ee/spec/finders/template_finder_spec.rb'
|
||||
|
|
|
|||
|
|
@ -1,17 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::UsernameAndEmailGenerator' do
|
||||
let(:randomhex) { 'randomhex' }
|
||||
before do
|
||||
allow_next_instance_of(Gitlab::Utils::UsernameAndEmailGenerator) do |generator|
|
||||
allow(generator).to receive(:random_segment).and_return(random_segment)
|
||||
end
|
||||
end
|
||||
|
||||
let(:random_segment) { 'randomhex' }
|
||||
|
||||
it 'check email domain' do
|
||||
expect(subject.email).to end_with("@#{email_domain}")
|
||||
end
|
||||
|
||||
it 'contains SecureRandom part' do
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
|
||||
expect(subject.username).to include("_#{randomhex}")
|
||||
expect(subject.email).to include("_#{randomhex}@")
|
||||
it 'contains random segment part' do
|
||||
expect(subject.username).to include("_#{random_segment}")
|
||||
expect(subject.email).to include("_#{random_segment}@")
|
||||
end
|
||||
|
||||
it 'email name is the same as username' do
|
||||
|
|
@ -19,7 +23,7 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
end
|
||||
|
||||
context 'when conflicts' do
|
||||
let(:reserved_username) { "#{username_prefix}_#{randomhex}" }
|
||||
let(:reserved_username) { "#{username_prefix}_#{random_segment}" }
|
||||
let(:reserved_email) { "#{reserved_username}@#{email_domain}" }
|
||||
|
||||
shared_examples 'uniquifies username and email' do
|
||||
|
|
@ -33,7 +37,6 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
context 'when username is reserved by user' do
|
||||
before do
|
||||
create(:user, username: reserved_username)
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
|
|
@ -42,7 +45,6 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
context 'when it conflicts with top-level group namespace' do
|
||||
before do
|
||||
create(:group, path: reserved_username)
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
|
|
@ -51,7 +53,6 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
context 'when it conflicts with top-level group namespace that includes upcased characters' do
|
||||
before do
|
||||
create(:group, path: reserved_username.upcase)
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
|
|
@ -62,7 +63,6 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
context 'when it conflicts with confirmed primary email' do
|
||||
before do
|
||||
create(:user, email: reserved_email)
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
|
|
@ -71,7 +71,6 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
context 'when it conflicts with unconfirmed primary email' do
|
||||
before do
|
||||
create(:user, :unconfirmed, email: reserved_email)
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
|
|
@ -80,7 +79,14 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
context 'when it conflicts with confirmed secondary email' do
|
||||
before do
|
||||
create(:email, :confirmed, email: reserved_email)
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
end
|
||||
|
||||
context 'when it conflicts with unconfirmed secondary email' do
|
||||
before do
|
||||
create(:email, email: reserved_email)
|
||||
end
|
||||
|
||||
include_examples 'uniquifies username and email'
|
||||
|
|
@ -91,12 +97,10 @@ RSpec.shared_examples 'username and email pair is generated by Gitlab::Utils::Us
|
|||
before do
|
||||
create(:user, email: reserved_email)
|
||||
create(:user, username: "#{reserved_username}1")
|
||||
allow(SecureRandom).to receive(:hex).at_least(:once).and_return(randomhex)
|
||||
end
|
||||
|
||||
it 'uniquifies username and email' do
|
||||
expect(subject.username).to eq("#{reserved_username}2")
|
||||
|
||||
expect(subject.email).to include("#{subject.username}@")
|
||||
end
|
||||
end
|
||||
|
|
|
|||
41
yarn.lock
41
yarn.lock
|
|
@ -1273,10 +1273,12 @@
|
|||
debug "^4.3.1"
|
||||
minimatch "^3.1.2"
|
||||
|
||||
"@eslint/core@^0.9.0":
|
||||
version "0.9.0"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.9.0.tgz#168ee076f94b152c01ca416c3e5cf82290ab4fcd"
|
||||
integrity sha512-7ATR9F0e4W85D/0w7cU0SNj7qkAexMG+bAHEZOjo9akvGuhHE2m7umzWzfnpa0XAg5Kxc1BWmtPMV67jJ+9VUg==
|
||||
"@eslint/core@^0.10.0":
|
||||
version "0.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.10.0.tgz#23727063c21b335f752dbb3a16450f6f9cbc9091"
|
||||
integrity sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==
|
||||
dependencies:
|
||||
"@types/json-schema" "^7.0.15"
|
||||
|
||||
"@eslint/eslintrc@^3.2.0":
|
||||
version "3.2.0"
|
||||
|
|
@ -1293,21 +1295,22 @@
|
|||
minimatch "^3.1.2"
|
||||
strip-json-comments "^3.1.1"
|
||||
|
||||
"@eslint/js@9.17.0", "@eslint/js@^9.15.0":
|
||||
version "9.17.0"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.17.0.tgz#1523e586791f80376a6f8398a3964455ecc651ec"
|
||||
integrity sha512-Sxc4hqcs1kTu0iID3kcZDW3JHq2a77HO9P8CP6YEA/FpH3Ll8UXE2r/86Rz9YJLKme39S9vU5OWNjC6Xl0Cr3w==
|
||||
"@eslint/js@9.18.0", "@eslint/js@^9.15.0":
|
||||
version "9.18.0"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.18.0.tgz#3356f85d18ed3627ab107790b53caf7e1e3d1e84"
|
||||
integrity sha512-fK6L7rxcq6/z+AaQMtiFTkvbHkBLNlwyRxHpKawP0x3u9+NC6MQTnFW+AdpwC6gfHTW0051cokQgtTN2FqlxQA==
|
||||
|
||||
"@eslint/object-schema@^2.1.4":
|
||||
version "2.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/object-schema/-/object-schema-2.1.4.tgz#9e69f8bb4031e11df79e03db09f9dbbae1740843"
|
||||
integrity sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==
|
||||
|
||||
"@eslint/plugin-kit@^0.2.3":
|
||||
version "0.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.2.3.tgz#812980a6a41ecf3a8341719f92a6d1e784a2e0e8"
|
||||
integrity sha512-2b/g5hRmpbb1o4GnTZax9N9m0FXzz9OV42ZzI4rDDMDuHUqigAiQCEWChBWCY4ztAGVRjoWT19v0yMmc5/L5kA==
|
||||
"@eslint/plugin-kit@^0.2.5":
|
||||
version "0.2.5"
|
||||
resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz#ee07372035539e7847ef834e3f5e7b79f09e3a81"
|
||||
integrity sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==
|
||||
dependencies:
|
||||
"@eslint/core" "^0.10.0"
|
||||
levn "^0.4.1"
|
||||
|
||||
"@fastify/busboy@^2.0.0":
|
||||
|
|
@ -7392,18 +7395,18 @@ eslint-visitor-keys@^4.2.0:
|
|||
resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz#687bacb2af884fcdda8a6e7d65c606f46a14cd45"
|
||||
integrity sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==
|
||||
|
||||
eslint@9.17.0:
|
||||
version "9.17.0"
|
||||
resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.17.0.tgz#faa1facb5dd042172fdc520106984b5c2421bb0c"
|
||||
integrity sha512-evtlNcpJg+cZLcnVKwsai8fExnqjGPicK7gnUtlNuzu+Fv9bI0aLpND5T44VLQtoMEnI57LoXO9XAkIXwohKrA==
|
||||
eslint@9.18.0:
|
||||
version "9.18.0"
|
||||
resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.18.0.tgz#c95b24de1183e865de19f607fda6518b54827850"
|
||||
integrity sha512-+waTfRWQlSbpt3KWE+CjrPPYnbq9kfZIYUqapc0uBXyjTp8aYXZDsUH16m39Ryq3NjAVP4tjuF7KaukeqoCoaA==
|
||||
dependencies:
|
||||
"@eslint-community/eslint-utils" "^4.2.0"
|
||||
"@eslint-community/regexpp" "^4.12.1"
|
||||
"@eslint/config-array" "^0.19.0"
|
||||
"@eslint/core" "^0.9.0"
|
||||
"@eslint/core" "^0.10.0"
|
||||
"@eslint/eslintrc" "^3.2.0"
|
||||
"@eslint/js" "9.17.0"
|
||||
"@eslint/plugin-kit" "^0.2.3"
|
||||
"@eslint/js" "9.18.0"
|
||||
"@eslint/plugin-kit" "^0.2.5"
|
||||
"@humanfs/node" "^0.16.6"
|
||||
"@humanwhocodes/module-importer" "^1.0.1"
|
||||
"@humanwhocodes/retry" "^0.4.1"
|
||||
|
|
|
|||
Loading…
Reference in New Issue