Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-12-21 03:15:00 +00:00
parent 4aa6fba6d8
commit e3ecb7dc09
73 changed files with 1552 additions and 127 deletions

View File

@ -2837,7 +2837,6 @@ RSpec/FeatureCategory:
- 'spec/lib/gitlab/build_access_spec.rb'
- 'spec/lib/gitlab/bullet_spec.rb'
- 'spec/lib/gitlab/cache/helpers_spec.rb'
- 'spec/lib/gitlab/cache/import/caching_spec.rb'
- 'spec/lib/gitlab/cache/metrics_spec.rb'
- 'spec/lib/gitlab/cache/request_cache_spec.rb'
- 'spec/lib/gitlab/cache_spec.rb'

View File

@ -1,5 +1,6 @@
<script>
import { GlBanner, GlLink } from '@gitlab/ui';
import ChatBubbleSvg from '@gitlab/svgs/dist/illustrations/chat-sm.svg?url';
import { __, s__ } from '~/locale';
import { helpPagePath } from '~/helpers/help_page_helper';
import BetaBadge from '~/vue_shared/components/badges/beta_badge.vue';
@ -44,6 +45,7 @@ export default {
learnMore: __('Learn more'),
},
learnMorePath: helpPagePath('ci/components/index'),
ChatBubbleSvg,
};
</script>
<template>
@ -54,6 +56,7 @@ export default {
:title="$options.i18n.banner.title"
:button-text="$options.i18n.banner.btnText"
button-link="https://gitlab.com/gitlab-org/gitlab/-/issues/407556"
:svg-path="$options.ChatBubbleSvg"
@close="handleDismissBanner"
>
<p>

View File

@ -172,6 +172,8 @@ export default Extension.create({
return true;
}
if (!textContent) return false;
const hasHTML = clipboardData.types.some((type) => type === HTML_FORMAT);
const hasVsCode = clipboardData.types.some((type) => type === VS_CODE_FORMAT);
const vsCodeMeta = hasVsCode ? JSON.parse(clipboardData.getData(VS_CODE_FORMAT)) : {};

View File

@ -3,6 +3,7 @@ import '~/lib/utils/jquery_at_who';
import { escape as lodashEscape, sortBy, template, escapeRegExp } from 'lodash';
import * as Emoji from '~/emoji';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import { loadingIconForLegacyJS } from '~/loading_icon_for_legacy_js';
import { s__, __, sprintf } from '~/locale';
import { isUserBusy } from '~/set_status_modal/utils';
@ -26,6 +27,8 @@ export const CONTACT_STATE_ACTIVE = 'active';
export const CONTACTS_ADD_COMMAND = '/add_contacts';
export const CONTACTS_REMOVE_COMMAND = '/remove_contacts';
const useMentionsBackendFiltering = window.gon.features?.mentionAutocompleteBackendFiltering;
/**
* Escapes user input before we pass it to at.js, which
* renders it as HTML in the autocomplete dropdown.
@ -62,6 +65,8 @@ export function showAndHideHelper($input, alias = '') {
});
}
// This should be kept in sync with the backend filtering in
// `User#gfm_autocomplete_search` and `Namespace#gfm_autocomplete_search`
function createMemberSearchString(member) {
return `${member.name.replace(/ /g, '')} ${member.username}`;
}
@ -344,6 +349,7 @@ class GfmAutoComplete {
}
setupMembers($input) {
const instance = this;
const fetchData = this.fetchData.bind(this);
const MEMBER_COMMAND = {
ASSIGN: '/assign',
@ -383,6 +389,7 @@ class GfmAutoComplete {
// eslint-disable-next-line no-template-curly-in-string
insertTpl: '${atwho-at}${username}',
limit: 10,
delay: useMentionsBackendFiltering ? DEFAULT_DEBOUNCE_AND_THROTTLE_MS : null,
searchKey: 'search',
alwaysHighlightFirst: true,
skipSpecialCharacterTest: true,
@ -409,16 +416,19 @@ class GfmAutoComplete {
const match = GfmAutoComplete.defaultMatcher(flag, subtext, this.app.controllers);
return match && match.length ? match[1] : null;
},
filter(query, data, searchKey) {
if (GfmAutoComplete.isLoading(data)) {
filter(query, data) {
if (useMentionsBackendFiltering) {
if (GfmAutoComplete.isLoading(data) || instance.previousQuery !== query) {
instance.previousQuery = query;
fetchData(this.$inputor, this.at, query);
return data;
}
} else if (GfmAutoComplete.isLoading(data)) {
fetchData(this.$inputor, this.at);
return data;
}
if (data === GfmAutoComplete.defaultLoadingData) {
return $.fn.atwho.default.callbacks.filter(query, data, searchKey);
}
if (command === MEMBER_COMMAND.ASSIGN) {
// Only include members which are not assigned to Issuable currently
return data.filter((member) => !assignees.includes(member.search));
@ -988,6 +998,11 @@ GfmAutoComplete.atTypeMap = {
};
GfmAutoComplete.typesWithBackendFiltering = ['vulnerabilities'];
if (useMentionsBackendFiltering) {
GfmAutoComplete.typesWithBackendFiltering.push('members');
}
GfmAutoComplete.isTypeWithBackendFiltering = (type) =>
GfmAutoComplete.typesWithBackendFiltering.includes(GfmAutoComplete.atTypeMap[type]);
@ -1040,6 +1055,8 @@ GfmAutoComplete.Members = {
// `member.search` is a name:username string like `MargeSimpson msimpson`
return member.search.toLowerCase().includes(query);
},
// This should be kept in sync with the backend sorting in
// `User#gfm_autocomplete_search` and `Namespace#gfm_autocomplete_search`
sort(query, members) {
const lowercaseQuery = query.toLowerCase();
const { nameOrUsernameStartsWith, nameOrUsernameIncludes } = GfmAutoComplete.Members;

View File

@ -10,7 +10,7 @@ class Groups::AutocompleteSourcesController < Groups::ApplicationController
urgency :low, [:issues, :labels, :milestones, :commands, :merge_requests, :members]
def members
render json: ::Groups::ParticipantsService.new(@group, current_user).execute(target)
render json: ::Groups::ParticipantsService.new(@group, current_user, params).execute(target)
end
def issues

View File

@ -15,7 +15,7 @@ class Projects::AutocompleteSourcesController < Projects::ApplicationController
urgency :low, [:issues, :labels, :milestones, :commands, :contacts]
def members
render json: ::Projects::ParticipantsService.new(@project, current_user).execute(target)
render json: ::Projects::ParticipantsService.new(@project, current_user, params).execute(target)
end
def issues

View File

@ -71,6 +71,7 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:display_work_item_epic_issue_sidebar, project)
push_force_frontend_feature_flag(:linked_work_items, project.linked_work_items_feature_flag_enabled?)
push_frontend_feature_flag(:notifications_todos_buttons, current_user)
push_frontend_feature_flag(:mention_autocomplete_backend_filtering, project)
end
around_action :allow_gitaly_ref_name_caching, only: [:discussions]

View File

@ -46,6 +46,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:notifications_todos_buttons, current_user)
push_frontend_feature_flag(:mr_request_changes, current_user)
push_frontend_feature_flag(:merge_blocked_component, current_user)
push_frontend_feature_flag(:mention_autocomplete_backend_filtering, project)
end
around_action :allow_gitaly_ref_name_caching, only: [:index, :show, :diffs, :discussions]

View File

@ -18,7 +18,7 @@ class GroupMember < Member
default_scope { where(source_type: SOURCE_TYPE) } # rubocop:disable Cop/DefaultScope
scope :of_groups, ->(groups) { where(source_id: groups&.select(:id)) }
scope :of_groups, ->(groups) { where(source_id: groups) }
scope :of_ldap_type, -> { where(ldap: true) }
scope :count_users_by_group_id, -> { group(:source_id).count }

View File

@ -263,6 +263,28 @@ class Namespace < ApplicationRecord
end
end
# This should be kept in sync with the frontend filtering in
# https://gitlab.com/gitlab-org/gitlab/-/blob/5d34e3488faa3982d30d7207773991c1e0b6368a/app/assets/javascripts/gfm_auto_complete.js#L68 and
# https://gitlab.com/gitlab-org/gitlab/-/blob/5d34e3488faa3982d30d7207773991c1e0b6368a/app/assets/javascripts/gfm_auto_complete.js#L1053
def gfm_autocomplete_search(query)
without_project_namespaces
.allow_cross_joins_across_databases(url: "https://gitlab.com/gitlab-org/gitlab/-/issues/420046")
.joins(:route)
.where(
"REPLACE(routes.name, ' ', '') ILIKE :pattern OR routes.path ILIKE :pattern",
pattern: "%#{sanitize_sql_like(query)}%"
)
.order(
Arel.sql(sanitize_sql(
[
"CASE WHEN starts_with(REPLACE(routes.name, ' ', ''), :pattern) OR starts_with(routes.path, :pattern) THEN 1 ELSE 2 END",
{ pattern: query }
]
)),
'routes.path'
)
end
def clean_path(path, limited_to: Namespace.all)
slug = Gitlab::Slug::Path.new(path).generate
path = Namespaces::RandomizedSuffixPath.new(slug)

View File

@ -1,6 +1,8 @@
# frozen_string_literal: true
class ResourceMilestoneEvent < ResourceTimeboxEvent
include EachBatch
belongs_to :milestone
scope :include_relations, -> { includes(:user, milestone: [:project, :group]) }

View File

@ -847,6 +847,25 @@ class User < MainClusterwide::ApplicationRecord
scope.reorder(order)
end
# This should be kept in sync with the frontend filtering in
# https://gitlab.com/gitlab-org/gitlab/-/blob/5d34e3488faa3982d30d7207773991c1e0b6368a/app/assets/javascripts/gfm_auto_complete.js#L68 and
# https://gitlab.com/gitlab-org/gitlab/-/blob/5d34e3488faa3982d30d7207773991c1e0b6368a/app/assets/javascripts/gfm_auto_complete.js#L1053
def gfm_autocomplete_search(query)
where(
"REPLACE(users.name, ' ', '') ILIKE :pattern OR users.username ILIKE :pattern",
pattern: "%#{sanitize_sql_like(query)}%"
).order(
Arel.sql(sanitize_sql(
[
"CASE WHEN starts_with(REPLACE(users.name, ' ', ''), :pattern) OR starts_with(users.username, :pattern) THEN 1 ELSE 2 END",
{ pattern: query }
]
)),
:username,
:id
)
end
# Limits the result set to users _not_ in the given query/list of IDs.
#
# users - The list of users to ignore. This can be an

View File

@ -3,6 +3,9 @@
module Users
module ParticipableService
extend ActiveSupport::Concern
include Gitlab::Utils::StrongMemoize
SEARCH_LIMIT = 10
included do
attr_reader :noteable
@ -25,6 +28,16 @@ module Users
sorted(users)
end
def filter_and_sort_users(users_relation)
if params[:search]
users_relation.gfm_autocomplete_search(params[:search]).limit(SEARCH_LIMIT).tap do |users|
preload_status(users)
end
else
sorted(users_relation)
end
end
def sorted(users)
users.uniq.to_a.compact.sort_by(&:username).tap do |users|
preload_status(users)
@ -34,8 +47,15 @@ module Users
def groups
return [] unless current_user
current_user.authorized_groups.with_route.sort_by(&:full_path)
relation = current_user.authorized_groups
if params[:search]
relation.gfm_autocomplete_search(params[:search]).limit(SEARCH_LIMIT).to_a
else
relation.with_route.sort_by(&:full_path)
end
end
strong_memoize_attr :groups
def render_participants_as_hash(participants)
participants.map { |participant| participant_as_hash(participant) }
@ -74,11 +94,14 @@ module Users
end
def group_counts
@group_counts ||= GroupMember
.of_groups(current_user.authorized_groups)
groups_for_count = params[:search] ? groups : current_user.authorized_groups
GroupMember
.of_groups(groups_for_count)
.non_request
.count_users_by_group_id
end
strong_memoize_attr :group_counts
def preload_status(users)
users.each { |u| lazy_user_availability(u) }

View File

@ -29,7 +29,9 @@ module Groups
def group_hierarchy_users
return [] unless group
sorted(Autocomplete::GroupUsersFinder.new(group: group).execute)
relation = Autocomplete::GroupUsersFinder.new(group: group).execute
filter_and_sort_users(relation)
end
end
end

View File

@ -139,7 +139,8 @@ module Import
.new(project)
.write(
timeout_strategy: params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT,
optional_stages: params[:optional_stages]
optional_stages: params[:optional_stages],
extended_events: Feature.enabled?(:github_import_extended_events, current_user)
)
end
end

View File

@ -63,9 +63,12 @@ module Milestones
def update_children(group_milestone, milestone_ids)
issues = Issue.where(project_id: group_project_ids, milestone_id: milestone_ids)
merge_requests = MergeRequest.where(source_project_id: group_project_ids, milestone_id: milestone_ids)
milestone_events = ResourceMilestoneEvent.where(milestone_id: milestone_ids)
[issues, merge_requests].each do |issuable_collection|
issuable_collection.update_all(milestone_id: group_milestone.id)
[issues, merge_requests, milestone_events].each do |collection|
collection.each_batch do |batch|
batch.update_all(milestone_id: group_milestone.id)
end
end
end
# rubocop: enable CodeReuse/ActiveRecord

View File

@ -18,13 +18,17 @@ module Projects
end
def project_members
@project_members ||= sorted(project.authorized_users)
filter_and_sort_users(project_members_relation)
end
def all_members
return [] if Feature.enabled?(:disable_all_mention)
[{ username: "all", name: "All Project and Group Members", count: project_members.count }]
[{ username: "all", name: "All Project and Group Members", count: project_members_relation.count }]
end
def project_members_relation
project.authorized_users
end
end
end

View File

@ -12,4 +12,4 @@
cancel_path: cancel_import_github_path,
details_path: details_import_github_path,
status_import_github_group_path: status_import_github_group_path(format: :json),
optional_stages: Gitlab::GithubImport::Settings.stages_array
optional_stages: Gitlab::GithubImport::Settings.stages_array(current_user)

View File

@ -1353,6 +1353,15 @@
:weight: 1
:idempotent: false
:tags: []
- :name: github_importer:github_import_replay_events
:worker_name: Gitlab::GithubImport::ReplayEventsWorker
:feature_category: :importers
:has_external_dependencies: true
:urgency: :low
:resource_boundary: :unknown
:weight: 1
:idempotent: true
:tags: []
- :name: github_importer:github_import_stage_finish_import
:worker_name: Gitlab::GithubImport::Stage::FinishImportWorker
:feature_category: :importers

View File

@ -22,14 +22,20 @@ module Gitlab
sidekiq_options dead: false
# The known importer stages and their corresponding Sidekiq workers.
#
# Note: AdvanceStageWorker is not used for the repository, base_data, and pull_requests stages.
# They are included in the list for us to easily see all stage workers and the order in which they are executed.
STAGES = {
repository: Stage::ImportRepositoryWorker,
base_data: Stage::ImportBaseDataWorker,
pull_requests: Stage::ImportPullRequestsWorker,
collaborators: Stage::ImportCollaboratorsWorker,
pull_requests_merged_by: Stage::ImportPullRequestsMergedByWorker,
pull_request_review_requests: Stage::ImportPullRequestsReviewRequestsWorker,
pull_request_reviews: Stage::ImportPullRequestsReviewsWorker,
pull_requests_merged_by: Stage::ImportPullRequestsMergedByWorker, # Skipped on extended_events
pull_request_review_requests: Stage::ImportPullRequestsReviewRequestsWorker, # Skipped on extended_events
pull_request_reviews: Stage::ImportPullRequestsReviewsWorker, # Skipped on extended_events
issues_and_diff_notes: Stage::ImportIssuesAndDiffNotesWorker,
issue_events: Stage::ImportIssueEventsWorker,
notes: Stage::ImportNotesWorker,
notes: Stage::ImportNotesWorker, # Skipped on extended_events
attachments: Stage::ImportAttachmentsWorker,
protected_branches: Stage::ImportProtectedBranchesWorker,
lfs_objects: Stage::ImportLfsObjectsWorker,

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
class ReplayEventsWorker
include ObjectImporter
idempotent!
def representation_class
Representation::ReplayEvent
end
def importer_class
Importer::ReplayEventsImporter
end
def object_type
:replay_event
end
def increment_object_counter?(_object)
false
end
end
end
end

View File

@ -42,9 +42,15 @@ module Gitlab
def move_to_next_stage(project, waiters = {})
AdvanceStageWorker.perform_async(
project.id, waiters.deep_stringify_keys, 'pull_requests_merged_by'
project.id, waiters.deep_stringify_keys, next_stage(project)
)
end
def next_stage(project)
return 'issues_and_diff_notes' if import_settings(project).extended_events?
'pull_requests_merged_by'
end
end
end
end

View File

@ -15,7 +15,7 @@ module Gitlab
# client - An instance of Gitlab::GithubImport::Client.
# project - An instance of Project.
def import(client, project)
return skip_to_next_stage(project) if import_settings(project).disabled?(:single_endpoint_issue_events_import)
return skip_to_next_stage(project) if skip_to_next_stage?(project)
importer = ::Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
info(project.id, message: "starting importer", importer: importer.name)
@ -25,13 +25,26 @@ module Gitlab
private
def skip_to_next_stage?(project)
# This stage is mandatory when using extended_events
return false if import_settings(project).extended_events?
import_settings(project).disabled?(:single_endpoint_issue_events_import)
end
def skip_to_next_stage(project)
info(project.id, message: "skipping importer", importer: "IssueEventsImporter")
move_to_next_stage(project)
end
def move_to_next_stage(project, waiters = {})
AdvanceStageWorker.perform_async(project.id, waiters.deep_stringify_keys, 'notes')
AdvanceStageWorker.perform_async(project.id, waiters.deep_stringify_keys, next_stage(project))
end
def next_stage(project)
return "attachments" if import_settings(project).extended_events?
"notes"
end
end
end

View File

@ -0,0 +1,8 @@
---
name: github_import_extended_events
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139410
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435089
milestone: '16.8'
type: development
group: group::import and integrate
default_enabled: false

View File

@ -0,0 +1,8 @@
---
name: mention_autocomplete_backend_filtering
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131250
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/435106
milestone: '16.7'
type: development
group: group::project management
default_enabled: false

View File

@ -6,7 +6,7 @@ class FinalizeSystemNoteMetadataBigintConversion < Gitlab::Database::Migration[2
disable_ddl_transaction!
restrict_gitlab_migration gitlab_schema: :gitlab_main
milestone '16.7'
milestone '16.8'
TABLE_NAME = :system_note_metadata

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
class DropNoteMentionsTempIndex < Gitlab::Database::Migration[2.2]
disable_ddl_transaction!
milestone '16.8'
INDEX_NAME = 'note_mentions_temp_index'
def up
remove_concurrent_index_by_name :notes, INDEX_NAME
end
def down
add_concurrent_index :notes, [:id, :noteable_type], where: "note ~~ '%@%'::text", name: INDEX_NAME
end
end

View File

@ -0,0 +1 @@
d48e67240f21ae70e7bae4d1c28fd2070b82e6415539da302248897847678915

View File

@ -35341,8 +35341,6 @@ CREATE UNIQUE INDEX merge_request_user_mentions_on_mr_id_and_note_id_index ON me
CREATE UNIQUE INDEX merge_request_user_mentions_on_mr_id_index ON merge_request_user_mentions USING btree (merge_request_id) WHERE (note_id IS NULL);
CREATE INDEX note_mentions_temp_index ON notes USING btree (id, noteable_type) WHERE (note ~~ '%@%'::text);
CREATE UNIQUE INDEX one_canonical_wiki_page_slug_per_metadata ON wiki_page_slugs USING btree (wiki_page_meta_id) WHERE (canonical = true);
CREATE INDEX p_ci_builds_scheduled_at_idx ON ONLY p_ci_builds USING btree (scheduled_at) WHERE ((scheduled_at IS NOT NULL) AND ((type)::text = 'Ci::Build'::text) AND ((status)::text = 'scheduled'::text));

View File

@ -593,6 +593,7 @@ Some analyzers can be customized with CI/CD variables.
| `PHPCS_SECURITY_AUDIT_PHP_EXTENSIONS` | phpcs-security-audit | Comma separated list of additional PHP Extensions. |
| `SAST_SEMGREP_METRICS` | Semgrep | Set to `"false"` to disable sending anonymized scan metrics to [r2c](https://semgrep.dev). Default: `true`. Introduced in GitLab 14.0. GitLab team members can view more information in this confidential issue: `https://gitlab.com/gitlab-org/gitlab/-/issues/330565`. |
| `SAST_SCANNER_ALLOWED_CLI_OPTS` | Semgrep | CLI options (arguments with value, or flags) that are passed to the underlying security scanner when running scan operation. Only a limited set of [options](#security-scanner-configuration) are accepted. Separate a CLI option and its value using either a blank space or equals (`=`) character. For example: `name1 value1` or `name1=value1`. Multiple options must be separated by blank spaces. For example: `name1 value1 name2 value2`. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/368565) in GitLab 15.3. |
| `SAST_RULESET_GIT_REFERENCE` | Semgrep and nodejs-scan | Defines a path to a custom ruleset configuration. If a project has a `.gitlab/sast-ruleset.toml` file committed, that local configuration takes precedence and the file from `SAST_RULESET_GIT_REFERENCE` isnt used. This variable is available for the Ultimate tier only. |
#### Security scanner configuration

View File

@ -17,7 +17,7 @@ The Web IDE is an advanced editor with commit staging.
You can use the Web IDE to make changes to multiple files directly from the GitLab UI.
For a more basic implementation, see [Web Editor](../repository/web_editor.md).
To pair the Web IDE with a remote development environment, see [remote development](../remote_development/index.md).
To pair the Web IDE with a remote development environment, see [Remote development](../remote_development/index.md).
## Open the Web IDE
@ -45,7 +45,7 @@ To open the Web IDE from a merge request:
The Web IDE opens new and modified files in separate tabs and displays changes side by side.
To reduce load time, only 10 files with the most lines changed are opened automatically.
In the **Explorer** panel, any new or modified file is indicated
On the left **Explorer** sidebar, any new or modified file is indicated
by the merge request icon (**{merge-request}**) next to the file name.
To view changes to a file, right-click the file and select **Compare with merge request base**.
@ -63,12 +63,12 @@ To search across open files in the Web IDE:
1. Press <kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>F</kbd>.
1. In the search box, enter your search term.
## View a list of changed files
## View a list of modified files
To view a list of files you changed in the Web IDE:
To view a list of files you modified in the Web IDE:
- On the activity bar on the left, select **Source Control**,
or press <kbd>Control</kbd>+<kbd>Shift</kbd>+<kbd>G</kbd>.
- On the left activity bar, select **Source Control**, or
press <kbd>Control</kbd>+<kbd>Shift</kbd>+<kbd>G</kbd>.
Your `CHANGES`, `STAGED CHANGES`, and `MERGE CHANGES` are displayed.
For more information, see the [VS Code documentation](https://code.visualstudio.com/docs/sourcecontrol/overview#_commit).
@ -91,10 +91,11 @@ To restore uncommitted changes in the Web IDE:
To upload a file in the Web IDE:
1. On the activity bar on the left, select **Explorer** and
go to the directory where you want to upload the file.
1. Optional. To create a new directory, in the upper right of the
**Explorer** panel, select **New Folder** (**{folder-new}**).
1. On the left activity bar, select **Explorer**, or
press <kbd>Shift</kbd>+<kbd>Command</kbd>+<kbd>E</kbd>.
1. Go to the directory where you want to upload the file.
- To create a new directory, on the left **Explorer** sidebar,
in the upper right, select **New Folder** (**{folder-new}**).
1. Right-click the directory and select **Upload**.
1. Select the file you want to upload.
@ -106,14 +107,14 @@ The new files are uploaded and automatically added to the repository.
The Web IDE uses the current branch by default.
To switch branches in the Web IDE:
1. On the status bar, in the lower-left corner, select the current branch name.
1. On the bottom status bar, on the left, select the current branch name.
1. Enter or select an existing branch.
## Create a branch
To create a branch from the current branch in the Web IDE:
1. On the status bar, in the lower-left corner, select the current branch name.
1. On the bottom status bar, on the left, select the current branch name.
1. From the dropdown list, select **Create new branch**.
1. Enter the new branch name.
@ -123,8 +124,8 @@ If you do not have write access to the repository, **Create new branch** is not
To commit changes in the Web IDE:
1. On the activity bar on the left, select **Source Control**,
or press <kbd>Control</kbd>+<kbd>Shift</kbd>+<kbd>G</kbd>.
1. On the left activity bar, select **Source Control**, or
press <kbd>Control</kbd>+<kbd>Shift</kbd>+<kbd>G</kbd>.
1. Enter your commit message.
1. Commit to the current branch or [create a new branch](#create-a-branch).
@ -133,7 +134,7 @@ To commit changes in the Web IDE:
To create a [merge request](../merge_requests/index.md) in the Web IDE:
1. [Commit the changes](#commit-changes).
1. In the notification in the lower-right corner, select **Create MR**.
1. In the notification that appears in the lower right, select **Create MR**.
For more information, see [View missed notifications](#view-missed-notifications).
@ -172,12 +173,12 @@ In the keyboard shortcuts editor, you can search for:
Keybindings are based on your keyboard layout.
If you change your keyboard layout, existing keybindings are updated automatically.
## Change themes
## Change the color theme
You can choose between different themes for the Web IDE.
The default theme for the Web IDE is **GitLab Dark**.
You can choose between different color themes for the Web IDE.
The default theme is **GitLab Dark**.
To change the Web IDE theme:
To change the color theme in the Web IDE:
1. On the top menu bar, select **File > Preferences > Theme > Color Theme**,
or press <kbd>Command</kbd>+<kbd>K</kbd> then <kbd>Command</kbd>+<kbd>T</kbd>.
@ -188,10 +189,10 @@ The active color theme is stored in the [user settings](#edit-settings).
## View missed notifications
When you perform actions in the Web IDE, notifications appear in the lower-right corner.
When you perform actions in the Web IDE, notifications appear in the lower right.
To view any notification you might have missed:
1. On the status bar, in the lower-right corner, select the bell icon (**{notifications}**) for a list of notifications.
1. On the bottom status bar, on the right, select the bell icon (**{notifications}**) for a list of notifications.
1. Select the notification you want to view.
<!-- ## Privacy and data collection for extensions
@ -221,7 +222,7 @@ When you set up a remote development server in the Web IDE, you can use interact
You cannot use interactive web terminals to interact with a runner.
However, you can use a terminal to install dependencies and compile and debug code.
For more information, see [Workspaces](../../workspace/index.md).
For more information, see [Remote development](../remote_development/index.md).
## Related topics

View File

@ -239,6 +239,48 @@ module Gitlab
end
end
# Adds a value to a list.
#
# raw_key - The key of the list to add to.
# value - The field value to add to the list.
# timeout - The new timeout of the key.
# limit - The maximum number of members in the set. Older members will be trimmed to this limit.
def self.list_add(raw_key, value, timeout: TIMEOUT, limit: nil)
validate_redis_value!(value)
key = cache_key_for(raw_key)
with_redis do |redis|
redis.multi do |m|
m.rpush(key, value)
m.ltrim(key, -limit, -1) if limit
m.expire(key, timeout)
end
end
end
# Returns the values of the given list.
#
# raw_key - The key of the list.
def self.values_from_list(raw_key)
key = cache_key_for(raw_key)
with_redis do |redis|
redis.lrange(key, 0, -1)
end
end
# Deletes a key
#
# raw_key - Key name
def self.del(raw_key)
key = cache_key_for(raw_key)
with_redis do |redis|
redis.del(key)
end
end
def self.cache_key_for(raw_key)
"#{Redis::Cache::CACHE_NAMESPACE}:#{raw_key}"
end

View File

@ -0,0 +1,61 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
class EventsCache
MAX_NUMBER_OF_EVENTS = 100
MAX_EVENT_SIZE = 100.kilobytes
def initialize(project)
@project = project
end
# Add issue event as JSON to the cache
#
# @param record [ActiveRecord::Model] Model that responds to :iid
# @param event [GitLab::GitHubImport::Representation::IssueEvent]
def add(record, issue_event)
json = issue_event.to_hash.to_json
if json.bytesize > MAX_EVENT_SIZE
Logger.warn(
message: 'Event too large to cache',
project_id: project.id,
github_identifiers: issue_event.github_identifiers
)
return
end
Gitlab::Cache::Import::Caching.list_add(events_cache_key(record), json, limit: MAX_NUMBER_OF_EVENTS)
end
# Reads issue events from cache
#
# @param record [ActiveRecord::Model] Model that responds to :iid
# @retun [Array<GitLab::GitHubImport::Representation::IssueEvent>] List of issue events
def events(record)
events = Gitlab::Cache::Import::Caching.values_from_list(events_cache_key(record)).map do |event|
Representation::IssueEvent.from_json_hash(Gitlab::Json.parse(event))
end
events.sort_by(&:created_at)
end
# Deletes the cache
#
# @param record [ActiveRecord::Model] Model that responds to :iid
def delete(record)
Gitlab::Cache::Import::Caching.del(events_cache_key(record))
end
private
attr_reader :project
def events_cache_key(record)
"github-importer/events/#{project.id}/#{record.class.name}/#{record.iid}"
end
end
end
end

View File

@ -10,6 +10,7 @@ module Gitlab
# client - An instance of `Gitlab::GithubImport::Client`.
def initialize(project, client)
@project = project
@client = client
@user_finder = UserFinder.new(project, client)
end
@ -20,7 +21,7 @@ module Gitlab
private
attr_reader :project, :user_finder
attr_reader :project, :user_finder, :client
def author_id(issue_event, author_key: :actor)
user_finder.author_id_for(issue_event, author_key: author_key).first
@ -42,6 +43,10 @@ module Gitlab
belongs_to_key = merge_request_event?(issue_event) ? :merge_request_id : :issue_id
{ belongs_to_key => issuable_db_id(issue_event) }
end
def import_settings
@import_settings ||= Gitlab::GithubImport::Settings.new(project)
end
end
end
end

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
module Events
class Commented < BaseImporter
def execute(issue_event)
return true unless import_settings.extended_events?
note = Representation::Note.from_json_hash(
noteable_id: issue_event.issuable_id,
noteable_type: issue_event.issuable_type,
author: issue_event.actor&.to_hash,
note: issue_event.body,
created_at: issue_event.created_at,
updated_at: issue_event.updated_at,
note_id: issue_event.id
)
NoteImporter.new(note, project, client).execute
end
end
end
end
end
end

View File

@ -6,6 +6,8 @@ module Gitlab
module Events
class Merged < BaseImporter
def execute(issue_event)
create_note(issue_event) if import_settings.extended_events?
create_event(issue_event)
create_state_event(issue_event)
end
@ -37,6 +39,17 @@ module Gitlab
ResourceStateEvent.create!(attrs)
end
def create_note(issue_event)
pull_request = Representation::PullRequest.from_json_hash({
merged_by: issue_event.actor&.to_hash,
merged_at: issue_event.created_at,
iid: issue_event.issuable_id,
state: :closed
})
PullRequests::MergedByImporter.new(pull_request, project, client).execute
end
end
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
module Events
class Reviewed < BaseImporter
def execute(issue_event)
return true unless import_settings.extended_events?
review = Representation::PullRequestReview.new(
merge_request_iid: issue_event.issuable_id,
author: issue_event.actor&.to_hash,
note: issue_event.body.to_s,
review_type: issue_event.state.upcase, # On timeline API, the state is in lower case
submitted_at: issue_event.submitted_at,
review_id: issue_event.id
)
PullRequests::ReviewImporter.new(review, project, client).execute({ add_reviewer: false })
end
end
end
end
end
end

View File

@ -22,6 +22,11 @@ module Gitlab
unlabeled
].freeze
EXTENDED_SUPPORTED_EVENTS = SUPPORTED_EVENTS + %w[
commented
reviewed
].freeze
# issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`.
# project - An instance of `Project`.
# client - An instance of `Gitlab::GithubImport::Client`.
@ -65,6 +70,10 @@ module Gitlab
Gitlab::GithubImport::Importer::Events::ChangedReviewer
when 'merged'
Gitlab::GithubImport::Importer::Events::Merged
when 'commented'
Gitlab::GithubImport::Importer::Events::Commented
when 'reviewed'
Gitlab::GithubImport::Importer::Events::Reviewed
end
end
end

View File

@ -14,10 +14,12 @@ module Gitlab
@review = review
@project = project
@client = client
@merge_request = project.merge_requests.find_by_id(review.merge_request_id)
@merge_request = project.merge_requests.find_by_iid(review.merge_request_iid)
end
def execute
def execute(options = {})
options = { add_reviewer: true }.merge(options)
user_finder = GithubImport::UserFinder.new(project, client)
gitlab_user_id = user_finder.user_id_for(review.author)
@ -25,7 +27,7 @@ module Gitlab
if gitlab_user_id
add_review_note!(gitlab_user_id)
add_approval!(gitlab_user_id)
add_reviewer!(gitlab_user_id)
add_reviewer!(gitlab_user_id) if options[:add_reviewer]
else
add_complementary_review_note!(project.creator_id)
end

View File

@ -0,0 +1,60 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Importer
class ReplayEventsImporter
SUPPORTED_EVENTS = %w[review_request_removed review_requested].freeze
# replay_event - An instance of `Gitlab::GithubImport::Representation::ReplayEvent`.
# project - An instance of `Project`
# client - An instance of `Gitlab::GithubImport::Client`
def initialize(replay_event, project, client)
@project = project
@client = client
@replay_event = replay_event
end
def execute
association = case replay_event.issuable_type
when 'MergeRequest'
project.merge_requests.find_by_iid(replay_event.issuable_iid)
end
return unless association
events_cache = EventsCache.new(project)
handle_review_requests(association, events_cache.events(association))
events_cache.delete(association)
end
private
attr_reader :project, :client, :replay_event
def handle_review_requests(association, events)
reviewers = {}
events.each do |event|
case event.event
when 'review_requested'
reviewers[event.requested_reviewer.login] = event.requested_reviewer.to_hash
when 'review_request_removed'
reviewers[event.requested_reviewer.login] = nil
end
end
representation = Representation::PullRequests::ReviewRequests.from_json_hash(
merge_request_id: association.id,
merge_request_iid: association.iid,
users: reviewers.values.compact
)
Importer::PullRequests::ReviewRequestImporter.new(representation, project, client).execute
end
end
end
end
end

View File

@ -30,7 +30,9 @@ module Gitlab
compose_associated_id!(parent_record, associated)
return if already_imported?(associated) || importer_class::SUPPORTED_EVENTS.exclude?(associated[:event])
return if already_imported?(associated) || supported_events.exclude?(associated[:event])
cache_event(parent_record, associated)
Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched)
@ -98,6 +100,43 @@ module Gitlab
event[:id] = "cross-reference##{issuable.iid}-in-#{event.dig(:source, :issue, :id)}"
end
def import_settings
@import_settings ||= Gitlab::GithubImport::Settings.new(project)
end
def after_batch_processed(parent)
return unless import_settings.extended_events?
events = events_cache.events(parent)
return if events.empty?
hash = Representation::ReplayEvent.new(issuable_type: parent.class.name.to_s, issuable_iid: parent.iid)
.to_hash.deep_stringify_keys
ReplayEventsWorker.perform_async(project.id, hash, job_waiter.key.to_s)
job_waiter.jobs_remaining = Gitlab::Cache::Import::Caching.increment(job_waiter_remaining_cache_key)
end
def supported_events
return importer_class::EXTENDED_SUPPORTED_EVENTS if import_settings.extended_events?
importer_class::SUPPORTED_EVENTS
end
def cache_event(parent_record, associated)
return unless import_settings.extended_events?
return if Importer::ReplayEventsImporter::SUPPORTED_EVENTS.exclude?(associated[:event])
representation = representation_class.from_api_response(associated)
events_cache.add(parent_record, representation)
end
def events_cache
@events_cache ||= EventsCache.new(project)
end
end
end
end

View File

@ -8,7 +8,8 @@ module Gitlab
expose_attribute :id, :actor, :event, :commit_id, :label_title, :old_title, :new_title,
:milestone_title, :issue, :source, :assignee, :review_requester,
:requested_reviewer, :created_at
:requested_reviewer, :created_at, :updated_at, :submitted_at,
:state, :body
# attributes - A Hash containing the event details. The keys of this
# Hash (and any nested hashes) must be symbols.
@ -51,7 +52,11 @@ module Gitlab
assignee: user_representation(event[:assignee]),
requested_reviewer: user_representation(event[:requested_reviewer]),
review_requester: user_representation(event[:review_requester]),
created_at: event[:created_at]
created_at: event[:created_at],
updated_at: event[:updated_at],
submitted_at: event[:submitted_at],
state: event[:state],
body: event[:body]
)
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
module Gitlab
module GithubImport
module Representation
class ReplayEvent
include ToHash
include ExposeAttribute
attr_reader :attributes
expose_attribute :issuable_type, :issuable_iid
def self.from_json_hash(raw_hash)
new Representation.symbolize_hash(raw_hash)
end
def initialize(attributes)
@attributes = attributes
end
def github_identifiers
{
issuable_type: issuable_type,
issuable_iid: issuable_iid
}
end
end
end
end
end

View File

@ -38,8 +38,13 @@ module Gitlab
}
}.freeze
def self.stages_array
OPTIONAL_STAGES.map do |stage_name, data|
def self.stages_array(current_user)
deprecated_options = %i[single_endpoint_issue_events_import]
OPTIONAL_STAGES.filter_map do |stage_name, data|
next if deprecated_options.include?(stage_name) &&
Feature.enabled?(:github_import_extended_events, current_user)
{
name: stage_name.to_s,
label: s_(format("GitHubImport|%{text}", text: data[:label])),
@ -61,7 +66,8 @@ module Gitlab
import_data = project.build_or_assign_import_data(
data: {
optional_stages: optional_stages,
timeout_strategy: user_settings[:timeout_strategy]
timeout_strategy: user_settings[:timeout_strategy],
extended_events: user_settings[:extended_events]
},
credentials: project.import_data&.credentials
)
@ -77,6 +83,10 @@ module Gitlab
!enabled?(stage_name)
end
def extended_events?
!!project.import_data&.data&.dig('extended_events')
end
private
attr_reader :project

View File

@ -85,6 +85,7 @@ module Gitlab
yield parent_record, page
end
after_batch_processed(parent_record)
mark_parent_imported(parent_record)
end
end
@ -96,6 +97,8 @@ module Gitlab
)
end
def after_batch_processed(_parent); end
def already_imported_parents
Gitlab::Cache::Import::Caching.values_from_set(parent_imported_cache_key)
end

View File

@ -42722,7 +42722,7 @@ msgstr ""
msgid "ScanResultPolicy|Protected branch settings"
msgstr ""
msgid "ScanResultPolicy|Recommended setting"
msgid "ScanResultPolicy|Recommended settings"
msgstr ""
msgid "ScanResultPolicy|Remove all approvals with new commit"
@ -42782,7 +42782,7 @@ msgstr ""
msgid "ScanResultPolicy|When enabled, users who have contributed code to the MR are ineligible for approval."
msgstr ""
msgid "ScanResultPolicy|You have selected any protected branch option as a condition. To better protect your project, it is recommended to enable the protect branch settings. %{linkStart}Learn more.%{linkEnd}"
msgid "ScanResultPolicy|You have selected all protected branches in this policy's rules. To better protect your project, you should leave this setting enabled. %{linkStart}What are the risks of allowing pushing and force pushing?%{linkEnd}"
msgstr ""
msgid "ScanResultPolicy|any commits"

View File

@ -191,7 +191,7 @@
"remark-rehype": "^10.1.0",
"scrollparent": "^2.0.1",
"semver": "^7.3.4",
"sentrybrowser": "npm:@sentry/browser@7.81.1",
"sentrybrowser": "npm:@sentry/browser@7.88.0",
"sentrybrowser5": "npm:@sentry/browser@5.30.0",
"sortablejs": "^1.10.2",
"string-hash": "1.1.3",

View File

@ -185,7 +185,7 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
end
end
describe 'assignees' do
shared_examples 'autocomplete user mentions' do
it 'does not wrap with quotes for assignee values' do
fill_in 'Comment', with: "@#{user.username}"
@ -252,6 +252,16 @@ RSpec.describe 'GFM autocomplete', :js, feature_category: :team_planning do
end
end
it_behaves_like 'autocomplete user mentions'
context 'when mention_autocomplete_backend_filtering is disabled' do
before do
stub_feature_flags(mention_autocomplete_backend_filtering: false)
end
it_behaves_like 'autocomplete user mentions'
end
context 'if a selected value has special characters' do
it 'wraps the result in double quotes' do
fill_in 'Comment', with: "~#{label.title[0..2]}"

View File

@ -92,7 +92,7 @@ describe('content_editor/extensions/copy_paste', () => {
return Object.assign(new Event(eventName), {
clipboardData: {
types,
getData: jest.fn((type) => data[type] || defaultData[type]),
getData: jest.fn((type) => data[type] ?? defaultData[type]),
setData: jest.fn(),
clearData: jest.fn(),
},
@ -190,6 +190,17 @@ describe('content_editor/extensions/copy_paste', () => {
});
});
it('does not handle pasting when textContent is empty (eg. images)', async () => {
expect(
await triggerPasteEventHandler(
buildClipboardEvent({
types: ['text/plain'],
data: { 'text/plain': '' },
}),
),
).toBe(false);
});
describe('when pasting raw markdown source', () => {
it('shows a loading indicator while markdown is being processed', async () => {
await triggerPasteEventHandler(buildClipboardEvent());

View File

@ -224,4 +224,56 @@ RSpec.describe Gitlab::Cache::Import::Caching, :clean_gitlab_redis_cache, :clean
subject { described_class.write_if_greater('foo', value) }
end
end
describe '.list_add' do
it 'adds a value to a list' do
described_class.list_add('foo', 10)
described_class.list_add('foo', 20)
key = described_class.cache_key_for('foo')
values = Gitlab::Redis::Cache.with { |r| r.lrange(key, 0, -1) }
expect(values).to eq(%w[10 20])
end
context 'when a limit is provided' do
it 'limits the size of the list to the number of items defined by the limit' do
described_class.list_add('foo', 10, limit: 3)
described_class.list_add('foo', 20, limit: 3)
described_class.list_add('foo', 30, limit: 3)
described_class.list_add('foo', 40, limit: 3)
key = described_class.cache_key_for('foo')
values = Gitlab::Redis::Cache.with { |r| r.lrange(key, 0, -1) }
expect(values).to eq(%w[20 30 40])
end
end
it_behaves_like 'validated redis value' do
subject { described_class.list_add('foo', value) }
end
end
describe '.values_from_list' do
it 'returns empty hash when the list is empty' do
expect(described_class.values_from_list('foo')).to eq([])
end
it 'returns the items stored in the list in order' do
described_class.list_add('foo', 10)
described_class.list_add('foo', 20)
described_class.list_add('foo', 10)
expect(described_class.values_from_list('foo')).to eq(%w[10 20 10])
end
end
describe '.del' do
it 'deletes the key' do
described_class.write('foo', 'value')
expect { described_class.del('foo') }.to change { described_class.read('foo') }.from('value').to(nil)
end
end
end

View File

@ -21,6 +21,15 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
]
end
# Some reasons to exempt a table:
# 1. It has no foreign key for performance reasons
# 2. It does not yet have a foreign key as the index is still being backfilled
let(:allowed_to_be_missing_foreign_key) do
[
'p_catalog_resource_sync_events.project_id'
]
end
let(:starting_from_milestone) { 16.6 }
let(:allowed_sharding_key_referenced_tables) { %w[projects namespaces organizations] }
@ -38,6 +47,19 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
expect(column_exists?(table_name, column_name)).to eq(true),
"Could not find sharding key column #{table_name}.#{column_name}"
expect(referenced_table_name).to be_in(allowed_sharding_key_referenced_tables)
if allowed_to_be_missing_foreign_key.include?("#{table_name}.#{column_name}")
expect(has_foreign_key?(table_name, column_name)).to eq(false),
"The column `#{table_name}.#{column_name}` has a foreign key so cannot be " \
"allowed_to_be_missing_foreign_key. " \
"If this is a foreign key referencing the specified table #{referenced_table_name} " \
"then you must remove it from allowed_to_be_missing_foreign_key"
else
expect(has_foreign_key?(table_name, column_name, to_table_name: referenced_table_name)).to eq(true),
"Missing a foreign key constraint for `#{table_name}.#{column_name}` " \
"referencing #{referenced_table_name}. " \
"All sharding keys must have a foreign key constraint"
end
end
end
end
@ -137,6 +159,25 @@ RSpec.describe 'new tables missing sharding_key', feature_category: :cell do
result.count > 0
end
def has_foreign_key?(from_table_name, column_name, to_table_name: nil)
where_clause = {
constrained_table_name: from_table_name,
constrained_columns: [column_name]
}
where_clause[:referenced_table_name] = to_table_name if to_table_name
fk = ::Gitlab::Database::PostgresForeignKey.where(where_clause).first
lfk = ::Gitlab::Database::LooseForeignKeys.definitions.find do |d|
d.from_table == from_table_name &&
(to_table_name.nil? || d.to_table == to_table_name) &&
d.options[:column] == column_name
end
fk.present? || lfk.present?
end
def column_exists?(table_name, column_name)
sql = <<~SQL
SELECT 1

View File

@ -0,0 +1,79 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::EventsCache, :clean_gitlab_redis_cache, feature_category: :importers do
let(:project) { build_stubbed(:project, id: 1) }
let(:issue) { build_stubbed(:issue, iid: 2) }
let(:event_cache) { described_class.new(project) }
def build_event(event)
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(event)
end
describe '#add' do
it 'adds event to cache' do
expect(Gitlab::Cache::Import::Caching).to receive(:list_add).with(
'github-importer/events/1/Issue/2',
an_instance_of(String),
limit: described_class::MAX_NUMBER_OF_EVENTS
)
event_cache.add(issue, build_event({ event: 'closed' }))
end
context 'when events is too large to cache' do
before do
stub_const("#{described_class}::MAX_EVENT_SIZE", 1.byte)
end
it 'does not add event to cache' do
expect(Gitlab::Cache::Import::Caching).not_to receive(:list_add)
expect(Gitlab::GithubImport::Logger).to receive(:warn).with(
message: 'Event too large to cache',
project_id: project.id,
github_identifiers: {
event: 'closed',
id: '99',
issuable_iid: '2'
}
)
event_cache.add(issue, build_event({ event: 'closed', id: '99', issue: { number: '2' } }))
end
end
end
describe '#events' do
it 'retrieves the list of events from the cache in the correct order' do
key = 'github-importer/events/1/Issue/2'
Gitlab::Cache::Import::Caching.list_add(key, { event: 'merged', created_at: '2023-01-02T00:00:00Z' }.to_json)
Gitlab::Cache::Import::Caching.list_add(key, { event: 'closed', created_at: '2023-01-03T00:00:00Z' }.to_json)
Gitlab::Cache::Import::Caching.list_add(key, { event: 'commented', created_at: '2023-01-01T00:00:00Z' }.to_json)
events = event_cache.events(issue).map(&:to_hash)
expect(events).to match([
a_hash_including(event: 'commented', created_at: '2023-01-01 00:00:00 UTC'),
a_hash_including(event: 'merged', created_at: '2023-01-02 00:00:00 UTC'),
a_hash_including(event: 'closed', created_at: '2023-01-03 00:00:00 UTC')
])
end
context 'when no event was added' do
it 'returns an empty array' do
expect(event_cache.events(issue)).to eq([])
end
end
end
describe '#delete' do
it 'deletes the list' do
expect(Gitlab::Cache::Import::Caching).to receive(:del).with('github-importer/events/1/Issue/2')
event_cache.delete(issue)
end
end
end

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::Commented, feature_category: :importers do
subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issuable) { create(:issue, project: project) }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.new(
id: 1196850910,
actor: { id: user.id, login: user.username },
event: 'commented',
created_at: '2022-07-27T14:41:11Z',
updated_at: '2022-07-27T14:41:11Z',
body: 'This is my note',
issue: { number: issuable.iid, pull_request: issuable.is_a?(MergeRequest) }
)
end
let(:extended_events) { true }
before do
allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
allow(finder).to receive(:database_id).and_return(issuable.id)
end
allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
end
allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
allow(setting).to receive(:extended_events?).and_return(extended_events)
end
end
shared_examples 'new note' do
it 'creates a note' do
expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
expect(issuable.notes.last).to have_attributes(
note: 'This is my note',
author_id: user.id,
noteable_type: issuable.class.name.to_s
)
end
context 'when extended_events is disabled' do
let(:extended_events) { false }
it 'does not create a note' do
expect { importer.execute(issue_event) }.not_to change { Note.count }
end
end
end
context 'with Issue' do
it_behaves_like 'new note'
end
context 'with MergeRequest' do
let(:issuable) { create(:merge_request, source_project: project, target_project: project) }
it_behaves_like 'new note'
end
end

View File

@ -11,6 +11,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:commit_id) { nil }
let(:extended_events) { false }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
@ -32,6 +33,9 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
end
allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
allow(setting).to receive(:extended_events?).and_return(extended_events)
end
end
it 'creates expected event and state event' do
@ -71,4 +75,27 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Merged, feature_category:
expect(state_event.source_commit).to eq commit_id[0..40]
end
end
describe 'extended events' do
context 'when using extended events' do
let(:extended_events) { true }
it 'creates a merged by note' do
expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
last_note = merge_request.notes.last
expect(last_note.created_at).to eq(issue_event.created_at)
expect(last_note.author).to eq(project.owner)
expect(last_note.note).to eq("*Merged by: #{user.username} at #{issue_event.created_at}*")
end
end
context 'when not using extended events' do
let(:extended_events) { false }
it 'does not create a merged by note' do
expect { importer.execute(issue_event) }.not_to change { Note.count }
end
end
end
end

View File

@ -0,0 +1,85 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::Reviewed, feature_category: :importers do
subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:extended_events) { true }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.new(
id: 1196850910,
actor: { id: user.id, login: user.username },
event: 'reviewed',
submitted_at: '2022-07-27T14:41:11Z',
body: 'This is my review',
state: state,
issue: { number: merge_request.iid, pull_request: true }
)
end
let(:state) { 'commented' }
before do
allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
allow(finder).to receive(:database_id).and_return(merge_request.id)
end
allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
end
allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
allow(setting).to receive(:extended_events?).and_return(extended_events)
end
end
it 'creates a review note', :aggregate_failures do
expect { importer.execute(issue_event) }.to change { Note.count }.by(1)
last_note = merge_request.notes.last
expect(last_note.note).to include("This is my review")
expect(last_note.author).to eq(user)
expect(last_note.created_at).to eq(issue_event.submitted_at)
end
it 'does not create a reviewer for the Merge Request', :aggregate_failures do
expect { importer.execute(issue_event) }.not_to change { MergeRequestReviewer.count }
end
context 'when stage is approved' do
let(:state) { 'approved' }
it 'creates an approval for the Merge Request', :aggregate_failures do
expect { importer.execute(issue_event) }.to change { Approval.count }.by(1).and change { Note.count }.by(2)
expect(merge_request.approved_by_users.reload).to include(user)
expect(merge_request.approvals.last.created_at).to eq(issue_event.submitted_at)
note = merge_request.notes.where(system: false).last
expect(note.note).to include("This is my review")
expect(note.author).to eq(user)
expect(note.created_at).to eq(issue_event.submitted_at)
system_note = merge_request.notes.where(system: true).last
expect(system_note.note).to eq('approved this merge request')
expect(system_note.author).to eq(user)
expect(system_note.created_at).to eq(issue_event.submitted_at)
expect(system_note.system_note_metadata.action).to eq('approved')
end
end
context 'when extended events is false' do
let(:extended_events) { false }
it 'does nothing' do
expect { importer.execute(issue_event) }
.to not_change { Note.count }
.and not_change { Approval.count }
end
end
end

View File

@ -115,6 +115,18 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Merged
end
context "when it's commented issue event" do
let(:event_name) { 'commented' }
it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Commented
end
context "when it's reviewed issue event" do
let(:event_name) { 'reviewed' }
it_behaves_like 'triggers specific event importer', Gitlab::GithubImport::Importer::Events::Reviewed
end
context "when it's unknown issue event" do
let(:event_name) { 'fake' }

View File

@ -13,7 +13,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter, feature_cate
struct = Struct.new(
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone, :source,
:assignee, :assigner, :review_requester, :requested_reviewer, :issue, :created_at, :performed_via_github_app,
keyword_init: true
:body, :updated_at, :submitted_at, :state, keyword_init: true
)
struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
end

View File

@ -30,6 +30,12 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
expect(merge_request.reviewers).to contain_exactly(author)
end
context 'when add_reviewer option is false' do
it 'does not change Merge Request reviewers' do
expect { subject.execute(add_reviewer: false) }.not_to change { MergeRequestReviewer.count }
end
end
context 'when reviewer already exists' do
before do
create(
@ -309,6 +315,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequests::ReviewImporter,
extra.reverse_merge(
author: { id: 999, login: 'author' },
merge_request_id: merge_request.id,
merge_request_iid: merge_request.iid,
review_type: type,
note: 'note',
submitted_at: submitted_at.to_s

View File

@ -0,0 +1,108 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::ReplayEventsImporter, feature_category: :importers do
let_it_be(:association) { create(:merged_merge_request) }
let_it_be(:project) { association.project }
let(:user1) { build(:user1) }
let(:user2) { build(:user2) }
let(:user3) { build(:user3) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:representation) do
Gitlab::GithubImport::Representation::ReplayEvent.new(
issuable_type: association.class.name.to_s, issuable_iid: association.iid
)
end
let(:importer) { described_class.new(representation, project, client) }
describe '#execute' do
before do
events = [
{
requested_reviewer: { id: 1, login: 'user1' },
event: 'review_requested'
},
{
requested_reviewer: { id: 1, login: 'user1' },
event: 'review_request_removed'
},
{
requested_reviewer: { id: 2, login: 'user2' },
event: 'review_requested'
},
{
requested_reviewer: { id: 2, login: 'user2' },
event: 'review_request_removed'
},
{
requested_reviewer: { id: 2, login: 'user2' },
event: 'review_requested'
},
{
requested_reviewer: { id: 3, login: 'user3' },
event: 'review_requested'
}
]
representations = events.map { |e| Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(e) }
allow_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
allow(events_cache).to receive(:events).with(association).and_return(representations)
end
end
context 'when association is a MergeRequest' do
it 'imports reviewers' do
representation = instance_double(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests)
expect(Gitlab::GithubImport::Representation::PullRequests::ReviewRequests).to receive(:from_json_hash).with(
merge_request_id: association.id,
merge_request_iid: association.iid,
users: [
{ id: 2, login: 'user2' },
{ id: 3, login: 'user3' }
]
).and_return(representation)
expect_next_instance_of(
Gitlab::GithubImport::Importer::PullRequests::ReviewRequestImporter, anything, project, client
) do |review_impoter|
expect(review_impoter).to receive(:execute)
end
importer.execute
end
end
context 'when association is not found' do
let(:representation) do
Gitlab::GithubImport::Representation::ReplayEvent.new(
issuable_type: association.class.name.to_s, issuable_iid: -1
)
end
it 'does not read events' do
expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
importer.execute
end
end
context 'when issueable type is not supported' do
let(:representation) do
Gitlab::GithubImport::Representation::ReplayEvent.new(
issuable_type: 'Issue', issuable_iid: association.iid
)
end
it 'does not read events' do
expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
importer.execute
end
end
end
end

View File

@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter, feature_category: :importers do
let(:client) { double }
let(:client) { Gitlab::GithubImport::Client.new('token') }
let_it_be(:project) { create(:project, :import_started, import_source: 'http://somegithub.com') }
let_it_be(:project) { create(:project, :import_started, import_source: 'foo/bar') }
let!(:issuable) { create(:issue, project: project) }
@ -88,23 +88,32 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:issue_event) do
struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
struct.new(id: 1, event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
struct.new(id: 1, event: event_name, created_at: '2022-04-26 18:30:53 UTC')
end
let(:event_name) { 'closed' }
let(:page_events) { [issue_event] }
let(:page) do
instance_double(
Gitlab::GithubImport::Client::Page,
number: 1, objects: [issue_event]
number: 1, objects: page_events
)
end
let(:page_counter) { instance_double(Gitlab::GithubImport::PageCounter) }
let(:extended_events) { true }
before do
allow(Gitlab::Redis::SharedState).to receive(:with).and_return('OK')
allow(client).to receive(:each_page).once.with(:issue_timeline,
project.import_source, issuable.iid, { state: 'all', sort: 'created', direction: 'asc', page: 1 }
).and_yield(page)
allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
allow(setting).to receive(:extended_events?).and_return(extended_events)
end
end
context 'with issues' do
@ -190,10 +199,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
end
context 'when event is not supported' do
let(:issue_event) do
struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
struct.new(id: 1, event: 'not_supported_event', created_at: '2022-04-26 18:30:53 UTC')
end
let(:event_name) { 'not_supported_event' }
it "doesn't process this event" do
counter = 0
@ -201,5 +207,156 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
expect(counter).to eq 0
end
end
describe 'save events' do
shared_examples 'saves event' do
it 'saves event' do
expect(Gitlab::GithubImport::Representation::IssueEvent).to receive(:from_api_response).with(issue_event.to_h)
.and_call_original
expect_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
expect(events_cache).to receive(:add).with(
issuable,
an_instance_of(Gitlab::GithubImport::Representation::IssueEvent)
)
end
subject.each_object_to_import { |event| event }
end
end
context 'when event is review_requested' do
let(:event_name) { 'review_requested' }
it_behaves_like 'saves event'
end
context 'when event is review_request_removed' do
let(:event_name) { 'review_request_removed' }
it_behaves_like 'saves event'
end
context 'when event is closed' do
let(:event_name) { 'closed' }
it 'does not save event' do
expect_next_instance_of(Gitlab::GithubImport::EventsCache) do |events_cache|
expect(events_cache).not_to receive(:add)
end
subject.each_object_to_import { |event| event }
end
end
context 'when extended_events is disabled' do
let(:event_name) { 'review_requested' }
let(:extended_events) { false }
it 'does not save event' do
expect(Gitlab::GithubImport::EventsCache).not_to receive(:new)
subject.each_object_to_import { |event| event }
end
end
end
describe 'after batch processed' do
context 'when events should be replayed' do
let(:event_name) { 'review_requested' }
it 'enqueues worker to replay events' do
allow(Gitlab::JobWaiter).to receive(:generate_key).and_return('job_waiter_key')
expect(Gitlab::GithubImport::ReplayEventsWorker).to receive(:perform_async)
.with(
project.id,
{ 'issuable_type' => issuable.class.name.to_s, 'issuable_iid' => issuable.iid },
'job_waiter_key'
)
subject.each_object_to_import { |event| event }
end
end
context 'when events are not relevant' do
let(:event_name) { 'closed' }
it 'does not replay events' do
expect(Gitlab::GithubImport::ReplayEventsWorker).not_to receive(:perform_async)
subject.each_object_to_import { |event| event }
end
end
context 'when extended_events is disabled' do
let(:extended_events) { false }
it 'does not replay events' do
expect(Gitlab::GithubImport::ReplayEventsWorker).not_to receive(:perform_async)
subject.each_object_to_import { |event| event }
end
end
end
end
describe '#execute', :clean_gitlab_redis_cache do
let(:extended_events) { false }
before do
allow_next_instance_of(Gitlab::GithubImport::Settings) do |setting|
allow(setting).to receive(:extended_events?).and_return(extended_events)
end
stub_request(:get, 'https://api.github.com/rate_limit')
.to_return(status: 200, headers: { 'X-RateLimit-Limit' => 5000, 'X-RateLimit-Remaining' => 5000 })
events = [
{
id: 1,
event: 'review_requested',
created_at: '2022-04-26 18:30:53 UTC',
issue: {
number: issuable.iid,
pull_request: true
}
}
]
endpoint = 'https://api.github.com/repos/foo/bar/issues/1/timeline' \
'?direction=asc&page=1&per_page=100&sort=created&state=all'
stub_request(:get, endpoint)
.to_return(status: 200, body: events.to_json, headers: { 'Content-Type' => 'application/json' })
end
context 'when extended_events is disabled' do
it 'enqueues importer worker' do
expect { subject.execute }.to change { Gitlab::GithubImport::ReplayEventsWorker.jobs.size }.by(0)
.and change { Gitlab::GithubImport::ImportIssueEventWorker.jobs.size }.by(1)
end
it 'returns job waiter with the correct remaining jobs count' do
job_waiter = subject.execute
expect(job_waiter.jobs_remaining).to eq(1)
end
end
context 'when extended_events is enabled' do
let(:extended_events) { true }
it 'enqueues importer worker and replay worker' do
expect { subject.execute }.to change { Gitlab::GithubImport::ReplayEventsWorker.jobs.size }.by(1)
.and change { Gitlab::GithubImport::ImportIssueEventWorker.jobs.size }.by(1)
end
it 'returns job waiter with the correct remaining jobs count' do
job_waiter = subject.execute
expect(job_waiter.jobs_remaining).to eq(2)
end
end
end
end

View File

@ -168,8 +168,8 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
describe '.from_api_response' do
let(:response) do
event_resource = Struct.new(
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone,
:source, :assignee, :requested_reviewer, :review_requester, :issue, :created_at,
:id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone, :state, :body,
:source, :assignee, :requested_reviewer, :review_requester, :issue, :created_at, :updated_at, :submitted_at,
:performed_via_github_app,
keyword_init: true
)

View File

@ -0,0 +1,24 @@
# frozen_string_literal: true
require 'fast_spec_helper'
RSpec.describe Gitlab::GithubImport::Representation::ReplayEvent, feature_category: :importers do
describe '.from_json_hash' do
it 'returns an instance of ReplayEvent' do
representation = described_class.from_json_hash(issuable_iid: 1, issuable_type: 'MergeRequest')
expect(representation).to be_an_instance_of(described_class)
end
end
describe '#github_identifiers' do
it 'returns a hash with needed identifiers' do
representation = described_class.new(issuable_type: 'MergeRequest', issuable_iid: 1)
expect(representation.github_identifiers).to eq({
issuable_type: 'MergeRequest',
issuable_iid: 1
})
end
end
end

View File

@ -20,12 +20,6 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
let(:expected_list) do
stages = described_class::OPTIONAL_STAGES
[
{
name: 'single_endpoint_issue_events_import',
label: stages[:single_endpoint_issue_events_import][:label],
selected: false,
details: stages[:single_endpoint_issue_events_import][:details]
},
{
name: 'single_endpoint_notes_import',
label: stages[:single_endpoint_notes_import][:label],
@ -48,7 +42,31 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
end
it 'returns stages list as array' do
expect(described_class.stages_array).to match_array(expected_list)
expect(described_class.stages_array(project.owner)).to match_array(expected_list)
end
context 'when `github_import_extended_events` feature flag is disabled' do
let(:expected_list_with_deprecated_options) do
stages = described_class::OPTIONAL_STAGES
expected_list.concat(
[
{
name: 'single_endpoint_issue_events_import',
label: stages[:single_endpoint_issue_events_import][:label],
selected: false,
details: stages[:single_endpoint_issue_events_import][:details]
}
])
end
before do
stub_feature_flags(github_import_extended_events: false)
end
it 'returns stages list as array' do
expect(described_class.stages_array(project.owner)).to match_array(expected_list_with_deprecated_options)
end
end
end
@ -99,4 +117,24 @@ RSpec.describe Gitlab::GithubImport::Settings, feature_category: :importers do
expect(settings.disabled?(:collaborators_import)).to eq true
end
end
describe '#extended_events?' do
it 'when extended_events is set to true' do
project.build_or_assign_import_data(data: { extended_events: true })
expect(settings.extended_events?).to eq(true)
end
it 'when extended_events is set to false' do
project.build_or_assign_import_data(data: { extended_events: false })
expect(settings.extended_events?).to eq(false)
end
it 'when extended_events is not present' do
project.build_or_assign_import_data(data: {})
expect(settings.extended_events?).to eq(false)
end
end
end

View File

@ -1130,6 +1130,28 @@ RSpec.describe Namespace, feature_category: :groups_and_projects do
end
end
describe '.gfm_autocomplete_search' do
let_it_be(:parent_group) { create(:group, path: 'parent', name: 'Parent') }
let_it_be(:group_1) { create(:group, parent: parent_group, path: 'somepath', name: 'Your Group') }
let_it_be(:group_2) { create(:group, path: 'noparent', name: 'My Group') }
it 'returns partial matches on full path' do
expect(described_class.gfm_autocomplete_search('parent/som')).to eq([group_1])
end
it 'returns matches on full name across multiple words' do
expect(described_class.gfm_autocomplete_search('yourgr')).to eq([group_1])
end
it 'prioritizes sorting of matches that start with the query' do
expect(described_class.gfm_autocomplete_search('pare')).to eq([parent_group, group_1, group_2])
end
it 'falls back to sorting by full path' do
expect(described_class.gfm_autocomplete_search('group')).to eq([group_2, group_1])
end
end
describe '.with_statistics' do
let_it_be(:namespace) { create(:namespace) }

View File

@ -3381,6 +3381,27 @@ RSpec.describe User, feature_category: :user_profile do
end
end
describe '.gfm_autocomplete_search' do
let_it_be(:user_1) { create(:user, username: 'someuser', name: 'John Doe') }
let_it_be(:user_2) { create(:user, username: 'userthomas', name: 'Thomas Person') }
it 'returns partial matches on username' do
expect(described_class.gfm_autocomplete_search('some')).to eq([user_1])
end
it 'returns matches on name across multiple words' do
expect(described_class.gfm_autocomplete_search('johnd')).to eq([user_1])
end
it 'prioritizes sorting of matches that start with the query' do
expect(described_class.gfm_autocomplete_search('user')).to eq([user_2, user_1])
end
it 'falls back to sorting by username' do
expect(described_class.gfm_autocomplete_search('ser')).to eq([user_1, user_2])
end
end
describe '.user_search_minimum_char_limit' do
it 'returns true' do
expect(described_class.user_search_minimum_char_limit).to be(true)

View File

@ -10,7 +10,8 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:subproject) { create(:project, group: subgroup) }
let(:service) { described_class.new(group, developer) }
let(:params) { {} }
let(:service) { described_class.new(group, developer, params) }
subject(:service_result) { service.execute(nil) }
@ -74,6 +75,19 @@ RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projec
it { is_expected.to include(private_group_member.username) }
end
context 'when search param is given' do
let(:params) { { search: 'johnd' } }
let_it_be(:member_1) { create(:user, name: 'John Doe').tap { |u| group.add_guest(u) } }
let_it_be(:member_2) { create(:user, name: 'Jane Doe ').tap { |u| group.add_guest(u) } }
it 'only returns matching members' do
users = service_result.select { |hash| hash[:type].eql?('User') }
expect(users.pluck(:username)).to eq([member_1.username])
end
end
end
def user_to_autocompletable(user)

View File

@ -31,6 +31,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
allow(settings)
.to receive(:write)
.with(
extended_events: true,
optional_stages: optional_stages,
timeout_strategy: timeout_strategy
)
@ -92,6 +93,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
expect(settings)
.to have_received(:write)
.with(optional_stages: nil,
extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@ -117,6 +119,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@ -149,6 +152,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: nil,
extended_events: true,
timeout_strategy: timeout_strategy
)
expect_snowplow_event(
@ -185,6 +189,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
extended_events: true,
timeout_strategy: timeout_strategy
)
end
@ -200,6 +205,7 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
extended_events: true,
timeout_strategy: timeout_strategy
)
end
@ -213,10 +219,25 @@ RSpec.describe Import::GithubService, feature_category: :importers do
.to have_received(:write)
.with(
optional_stages: optional_stages,
extended_events: true,
timeout_strategy: timeout_strategy
)
end
end
context 'when `github_import_extended_events`` feature flag is disabled' do
before do
stub_feature_flags(github_import_extended_events: false)
end
it 'saves extend_events to import_data' do
expect(settings)
.to receive(:write)
.with(a_hash_including(extended_events: false))
subject.execute(access_params, :github)
end
end
end
context 'when import source is disabled' do

View File

@ -62,13 +62,20 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
it 'sets issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
create(:resource_milestone_event, issue: issue, milestone: milestone)
merge_request = create(:merge_request, milestone: milestone, source_project: project)
create(:resource_milestone_event, merge_request: merge_request, milestone: milestone)
promoted_milestone = service.execute(milestone)
expect(promoted_milestone).to be_group_milestone
expect(issue.reload.milestone).to eq(promoted_milestone)
expect(merge_request.reload.milestone).to eq(promoted_milestone)
expect(ResourceMilestoneEvent.where(milestone_id: promoted_milestone).count).to eq(2)
expect(ResourceMilestoneEvent.where(milestone_id: milestone).count).to eq(0)
end
end
@ -101,9 +108,14 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
it 'sets all issuables with new promoted milestone' do
issue = create(:issue, milestone: milestone, project: project)
create(:resource_milestone_event, issue: issue, milestone: milestone)
issue_2 = create(:issue, milestone: milestone_2, project: project_2)
create(:resource_milestone_event, issue: issue_2, milestone: milestone_2)
merge_request = create(:merge_request, milestone: milestone, source_project: project)
create(:resource_milestone_event, merge_request: merge_request, milestone: milestone)
merge_request_2 = create(:merge_request, milestone: milestone_2, source_project: project_2)
create(:resource_milestone_event, merge_request: merge_request_2, milestone: milestone_2)
promoted_milestone = service.execute(milestone)
@ -111,6 +123,10 @@ RSpec.describe Milestones::PromoteService, feature_category: :team_planning do
expect(issue_2.reload.milestone).to eq(promoted_milestone)
expect(merge_request.reload.milestone).to eq(promoted_milestone)
expect(merge_request_2.reload.milestone).to eq(promoted_milestone)
expect(ResourceMilestoneEvent.where(milestone_id: promoted_milestone).count).to eq(4)
expect(ResourceMilestoneEvent.where(milestone_id: milestone).count).to eq(0)
expect(ResourceMilestoneEvent.where(milestone_id: milestone_2).count).to eq(0)
end
end
end

View File

@ -8,14 +8,18 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
let_it_be(:project) { create(:project, :public) }
let_it_be(:noteable) { create(:issue, project: project) }
let(:params) { {} }
before_all do
project.add_developer(user)
end
before do
stub_feature_flags(disable_all_mention: false)
end
def run_service
described_class.new(project, user).execute(noteable)
described_class.new(project, user, params).execute(noteable)
end
it 'returns results in correct order' do
@ -129,6 +133,16 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
group_1.full_path, subgroup.full_path, group_2.full_path
])
end
context 'when search param is given' do
let(:params) { { search: 'bb' } }
it 'only returns matching groups' do
expect(group_items.pluck(:username)).to eq([
group_1.full_path, subgroup.full_path
])
end
end
end
end
@ -229,5 +243,17 @@ RSpec.describe Projects::ParticipantsService, feature_category: :groups_and_proj
end
end
end
context 'when search param is given' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:member_1) { create(:user, name: 'John Doe').tap { |u| project.add_guest(u) } }
let_it_be(:member_2) { create(:user, name: 'Jane Doe ').tap { |u| project.add_guest(u) } }
let(:service) { described_class.new(project, create(:user), search: 'johnd') }
it 'only returns matching members' do
expect(usernames).to eq([member_1.username])
end
end
end
end

View File

@ -280,6 +280,7 @@ RSpec.describe 'Every Sidekiq worker', feature_category: :shared do
'Gitlab::GithubImport::PullRequests::ImportMergedByWorker' => 5,
'Gitlab::GithubImport::ImportPullRequestWorker' => 5,
'Gitlab::GithubImport::RefreshImportJidWorker' => 5,
'Gitlab::GithubImport::ReplayEventsWorker' => 5,
'Gitlab::GithubImport::Stage::FinishImportWorker' => 6,
'Gitlab::GithubImport::Stage::ImportBaseDataWorker' => 6,
'Gitlab::GithubImport::Stage::ImportIssuesAndDiffNotesWorker' => 6,

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::ReplayEventsWorker, feature_category: :importers do
let_it_be(:project) { create(:project, import_state: create(:import_state, :started)) }
let(:client) { instance_double(Gitlab::GithubImport::Client) }
let(:worker) { described_class.new }
describe '#import' do
it 'call replay events importer' do
hash = {
'issuable_iid' => 1,
'issuable_type' => 'Issue'
}
expect_next_instance_of(Gitlab::GithubImport::Importer::ReplayEventsImporter,
an_instance_of(Gitlab::GithubImport::Representation::ReplayEvent), project, client) do |importer|
expect(importer).to receive(:execute)
end
expect(Gitlab::GithubImport::ObjectCounter).not_to receive(:increment)
worker.import(project, client, hash)
end
end
describe '#object_type' do
it { expect(worker.object_type).to eq(:replay_event) }
end
end

View File

@ -9,9 +9,12 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker, feature_cat
let!(:group) { create(:group, projects: [project]) }
let(:settings) { ::Gitlab::GithubImport::Settings.new(project) }
let(:stage_enabled) { true }
let(:extended_events) { false }
before do
settings.write({ optional_stages: { single_endpoint_issue_events_import: stage_enabled } })
settings.write({
optional_stages: { single_endpoint_issue_events_import: stage_enabled }, extended_events: extended_events
})
end
it_behaves_like Gitlab::GithubImport::StageMethods
@ -48,6 +51,18 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker, feature_cat
worker.import(client, project)
end
context 'when extended_events is enabled' do
let(:extended_events) { true }
it 'does not skip the stage' do
expect_next_instance_of(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter) do |importer|
expect(importer).to receive(:execute).and_return(Gitlab::JobWaiter.new)
end
worker.import(client, project)
end
end
end
end
end

View File

@ -1973,14 +1973,23 @@
resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.4.1.tgz#8311b77e6cce322865ba12ada8c3779369610d18"
integrity sha512-eAhItDX9yQtZVM3yvXS/VR3qPqcnXvnLyx1pLXl4JzyNMBNO3KC986t/iAg2zcMzpAp9JSvxB5VZGnBiNoA98w==
"@sentry-internal/tracing@7.81.1":
version "7.81.1"
resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.81.1.tgz#1180365cd8a9e18cb0f92e1ea970161840ec0e2e"
integrity sha512-E5xm27xrLXL10knH2EWDQsQYh5nb4SxxZzJ3sJwDGG9XGKzBdlp20UUhKqx00wixooVX9uCj3e4Jg8SvNB1hKg==
"@sentry-internal/feedback@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-7.88.0.tgz#fa4db4a27d1fa7fe51dc67af185b13519d7fbc76"
integrity sha512-lbK6jgO1I0M96nZQ99mcLSZ55ebwPAP6LhEWhkmc+eAfy97VpiY+qsbmgsmOzCEPqMmEUCEcI0rEZ7fiye2v2Q==
dependencies:
"@sentry/core" "7.81.1"
"@sentry/types" "7.81.1"
"@sentry/utils" "7.81.1"
"@sentry/core" "7.88.0"
"@sentry/types" "7.88.0"
"@sentry/utils" "7.88.0"
"@sentry-internal/tracing@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.88.0.tgz#c820bde835c4af576781f8b818eed5085e417927"
integrity sha512-xXQdcYhsS+ourzJHjXNjZC9zakuc97udmpgaXRjEP7FjPYclIx+YXwgFBdHM2kzAwZLFOsEce5dr46GVXUDfZw==
dependencies:
"@sentry/core" "7.88.0"
"@sentry/types" "7.88.0"
"@sentry/utils" "7.88.0"
"@sentry/core@5.30.0":
version "5.30.0"
@ -1993,13 +2002,13 @@
"@sentry/utils" "5.30.0"
tslib "^1.9.3"
"@sentry/core@7.81.1":
version "7.81.1"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.81.1.tgz#082fd9122bf9a488c8e05b1754724ddbc2d5cf30"
integrity sha512-tU37yAmckOGCw/moWKSwekSCWWJP15O6luIq+u7wal22hE88F3Vc5Avo8SeF3upnPR+4ejaOFH+BJTr6bgrs6Q==
"@sentry/core@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.88.0.tgz#46f1526e9b98de96a0e93fd69917a990db5d5a37"
integrity sha512-Jzbb7dcwiCO7kI0a1w+32UzWxbEn2OcZWzp55QMEeAh6nZ/5CXhXwpuHi0tW7doPj+cJdmxMTMu9LqMVfdGkzQ==
dependencies:
"@sentry/types" "7.81.1"
"@sentry/utils" "7.81.1"
"@sentry/types" "7.88.0"
"@sentry/utils" "7.88.0"
"@sentry/hub@5.30.0":
version "5.30.0"
@ -2019,25 +2028,25 @@
"@sentry/types" "5.30.0"
tslib "^1.9.3"
"@sentry/replay@7.81.1":
version "7.81.1"
resolved "https://registry.yarnpkg.com/@sentry/replay/-/replay-7.81.1.tgz#a656d55e2a00b34e42be6eeb79018d21efc223af"
integrity sha512-4ueT0C4bYjngN/9p0fEYH10dTMLovHyk9HxJ6zSTgePvGVexhg+cSEHXisoBDwHeRZVnbIvsVM0NA7rmEDXJJw==
"@sentry/replay@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry/replay/-/replay-7.88.0.tgz#a9158af527db0cce91659f9a87b938040c21fdaa"
integrity sha512-em5dPKLPG7c/HGDbpIj3aHrWbA4iMwqjevqTzn+++KNO1YslkOosCaGsb1whU3AL1T9c3aIFIhZ4u3rNo+DxcA==
dependencies:
"@sentry-internal/tracing" "7.81.1"
"@sentry/core" "7.81.1"
"@sentry/types" "7.81.1"
"@sentry/utils" "7.81.1"
"@sentry-internal/tracing" "7.88.0"
"@sentry/core" "7.88.0"
"@sentry/types" "7.88.0"
"@sentry/utils" "7.88.0"
"@sentry/types@5.30.0":
version "5.30.0"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-5.30.0.tgz#19709bbe12a1a0115bc790b8942917da5636f402"
integrity sha512-R8xOqlSTZ+htqrfteCWU5Nk0CDN5ApUTvrlvBuiH1DyP6czDZ4ktbZB0hAgBlVcK0U+qpD3ag3Tqqpa5Q67rPw==
"@sentry/types@7.81.1":
version "7.81.1"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.81.1.tgz#2b2551fc291e1089651fd574a68f7c4175878bd5"
integrity sha512-dvJvGyctiaPMIQqa46k56Re5IODWMDxiHJ1UjBs/WYDLrmWFPGrEbyJ8w8CYLhYA+7qqrCyIZmHbWSTRIxstHw==
"@sentry/types@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.88.0.tgz#b3a09733a7bfad3634687b77764c5767d646d6e7"
integrity sha512-FvwvmX1pWAZKicPj4EpKyho8Wm+C4+r5LiepbbBF8oKwSPJdD2QV1fo/LWxsrzNxWOllFIVIXF5Ed3nPYQWpTw==
"@sentry/utils@5.30.0":
version "5.30.0"
@ -2047,12 +2056,12 @@
"@sentry/types" "5.30.0"
tslib "^1.9.3"
"@sentry/utils@7.81.1":
version "7.81.1"
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.81.1.tgz#42f3e77baf90205cec1f8599eb8445a6918030bd"
integrity sha512-gq+MDXIirHKxNZ+c9/lVvCXd6y2zaZANujwlFggRH2u9SRiPaIXVilLpvMm4uJqmqBMEcY81ArujExtHvkbCqg==
"@sentry/utils@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.88.0.tgz#704e79f14047080564c3e5231028f1cef8824e9f"
integrity sha512-ukminfRmdBXTzk49orwJf3Lu3hR60ZRHjE2a4IXwYhyDT6JJgJqgsq1hzGXx0AyFfyS4WhfZ6QUBy7fu3BScZQ==
dependencies:
"@sentry/types" "7.81.1"
"@sentry/types" "7.88.0"
"@sinclair/typebox@^0.24.1":
version "0.24.40"
@ -11846,16 +11855,17 @@ send@0.17.2:
"@sentry/utils" "5.30.0"
tslib "^1.9.3"
"sentrybrowser@npm:@sentry/browser@7.81.1":
version "7.81.1"
resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.81.1.tgz#5ee6ae3679ee80f444d2e8c5662430e7a734ae50"
integrity sha512-DNtS7bZEnFPKVoGazKs5wHoWC0FwsOFOOMNeDvEfouUqKKbjO7+RDHbr7H6Bo83zX4qmZWRBf8V+3n3YPIiJFw==
"sentrybrowser@npm:@sentry/browser@7.88.0":
version "7.88.0"
resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.88.0.tgz#80e3afe00e19bffbed09be392061e64dd6196796"
integrity sha512-il4x3PB99nuU/OJQw2RltgYYbo8vtnYoIgneOeEiw4m0ppK1nKkMkd3vDRipGL6E/0i7IUmQfYYy6U10J5Rx+g==
dependencies:
"@sentry-internal/tracing" "7.81.1"
"@sentry/core" "7.81.1"
"@sentry/replay" "7.81.1"
"@sentry/types" "7.81.1"
"@sentry/utils" "7.81.1"
"@sentry-internal/feedback" "7.88.0"
"@sentry-internal/tracing" "7.88.0"
"@sentry/core" "7.88.0"
"@sentry/replay" "7.88.0"
"@sentry/types" "7.88.0"
"@sentry/utils" "7.88.0"
serialize-javascript@^2.1.2:
version "2.1.2"