Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-07-04 15:28:22 +00:00
parent b72c0274e9
commit 587d38cfdd
39 changed files with 1072 additions and 370 deletions

View File

@ -16,4 +16,5 @@ variables:
QA_OMNIBUS_MR_TESTS: "only-smoke"
# Retry failed specs in separate process
QA_RETRY_FAILED_SPECS: "true"
GITLAB_HELM_CHART_REF: "b085a68939fa12d40f9ff933c5af35fec209b1cd" # helm chart ref used by test-on-cng pipeline
# helm chart ref used by test-on-cng pipeline
GITLAB_HELM_CHART_REF: "b085a68939fa12d40f9ff933c5af35fec209b1cd"

View File

@ -41,6 +41,7 @@
RUN_WITH_BUNDLE: "true" # instructs pipeline to install and run gitlab-qa gem via bundler
QA_PATH: qa # sets the optional path for bundler to run from
DYNAMIC_PIPELINE_YML: package-and-test-pipeline.yml # yml files are generated by scripts/generate-e2e-pipeline script
QA_EXPORT_TEST_METRICS: "true" # Export test metrics directly to influxdb by default
inherit:
variables:
- CHROME_VERSION

View File

@ -2220,7 +2220,6 @@ Gitlab/BoundedContexts:
- 'ee/app/events/namespace_settings/ai_related_settings_changed_event.rb'
- 'ee/app/experiments/ee/application_experiment.rb'
- 'ee/app/experiments/project_templates_during_registration_experiment.rb'
- 'ee/app/experiments/signup_intent_step_one_experiment.rb'
- 'ee/app/finders/app_sec/fuzzing/coverage/corpuses_finder.rb'
- 'ee/app/finders/approval_rules/group_finder.rb'
- 'ee/app/finders/audit_event_finder.rb'

View File

@ -1,6 +1,7 @@
<script>
import { GlCard, GlButton, GlSprintf } from '@gitlab/ui';
import { GlAlert, GlCard, GlButton, GlSprintf } from '@gitlab/ui';
import { objectToQuery, visitUrl } from '~/lib/utils/url_utility';
import { s__ } from '~/locale';
import {
UPDATE_SETTINGS_ERROR_MESSAGE,
SHOW_SETUP_SUCCESS_ALERT,
@ -29,6 +30,7 @@ import ExpirationToggle from './expiration_toggle.vue';
export default {
components: {
GlAlert,
GlCard,
GlButton,
GlSprintf,
@ -71,6 +73,9 @@ export default {
NAME_REGEX_DESCRIPTION,
CADENCE_LABEL,
EXPIRATION_POLICY_FOOTER_NOTE,
EXPIRATION_POLICY_REGEX_NOTE: s__(
'ContainerRegistry|Both keep and remove regex patterns are automatically surrounded with %{codeStart}\\A%{codeEnd} and %{codeStart}\\Z%{codeEnd} anchors, so you do not need to include them. However, make sure to take this into account when choosing and testing your regex patterns.',
),
},
data() {
return {
@ -214,7 +219,14 @@ export default {
class="gl-mb-0!"
/>
</div>
<gl-card class="gl-mt-7">
<gl-alert class="gl-mt-7" :dismissible="false">
<gl-sprintf :message="$options.i18n.EXPIRATION_POLICY_REGEX_NOTE">
<template #code="{ content }">
<code>{{ content }}</code>
</template>
</gl-sprintf>
</gl-alert>
<gl-card class="gl-mt-4">
<template #header>
{{ $options.i18n.KEEP_HEADER_TEXT }}
</template>

View File

@ -46,6 +46,7 @@ export default {
import('ee_component/work_items/components/work_item_rolledup_dates.vue'),
},
mixins: [glFeatureFlagMixin()],
inject: ['hasSubepicsFeature'],
props: {
fullPath: {
type: String,
@ -109,6 +110,9 @@ export default {
this.glFeatures.workItemsRolledupDates && this.workItemType === WORK_ITEM_TYPE_VALUE_EPIC
);
},
showParent() {
return this.workItemType === WORK_ITEM_TYPE_VALUE_EPIC ? this.hasSubepicsFeature : true;
},
workItemParent() {
return this.isWidgetPresent(WIDGET_TYPE_HIERARCHY)?.parent;
},
@ -255,7 +259,7 @@ export default {
@error="$emit('error', $event)"
/>
</template>
<template v-if="workItemHierarchy">
<template v-if="workItemHierarchy && showParent">
<work-item-parent
class="gl-mb-5 gl-pt-5 gl-border-t gl-border-gray-50"
:can-update="canUpdate"

View File

@ -25,6 +25,7 @@ import {
LINKED_ITEMS_ANCHOR,
WORK_ITEM_REFERENCE_CHAR,
WORK_ITEM_TYPE_VALUE_TASK,
WORK_ITEM_TYPE_VALUE_EPIC,
} from '../constants';
import workItemUpdatedSubscription from '../graphql/work_item_updated.subscription.graphql';
@ -80,7 +81,7 @@ export default {
WorkItemLoading,
},
mixins: [glFeatureFlagMixin()],
inject: ['fullPath', 'isGroup', 'reportAbusePath', 'groupPath'],
inject: ['fullPath', 'isGroup', 'reportAbusePath', 'groupPath', 'hasSubepicsFeature'],
props: {
isModal: {
type: Boolean,
@ -230,9 +231,17 @@ export default {
showAncestors() {
// TODO: This is a temporary check till the issue work item migration is completed
// Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/468114
return this.workItemType === WORK_ITEM_TYPE_VALUE_TASK
? this.glFeatures.namespaceLevelWorkItems && this.parentWorkItem
: this.parentWorkItem;
const { workItemType, glFeatures, parentWorkItem, hasSubepicsFeature } = this;
if (workItemType === WORK_ITEM_TYPE_VALUE_TASK) {
return glFeatures.namespaceLevelWorkItems && parentWorkItem;
}
if (workItemType === WORK_ITEM_TYPE_VALUE_EPIC) {
return hasSubepicsFeature;
}
return parentWorkItem;
},
parentWorkItemConfidentiality() {
return this.parentWorkItem?.confidential;

View File

@ -32,6 +32,7 @@ export default {
WorkItemTreeActions,
GlToggle,
},
inject: ['hasSubepicsFeature'],
props: {
fullPath: {
type: String,
@ -103,9 +104,18 @@ export default {
.some((hierarchy) => hierarchy.hasChildren);
},
addItemsActions() {
const reorderedChildTypes = this.allowedChildTypes
.slice()
.sort((a, b) => a.id.localeCompare(b.id));
let childTypes = this.allowedChildTypes;
// To remove EPICS actions when subepics are not available
if (
this.workItemType.toUpperCase() === WORK_ITEM_TYPE_ENUM_EPIC &&
!this.hasSubepicsFeature
) {
childTypes = childTypes.filter((type) => {
return type.name.toUpperCase() !== WORK_ITEM_TYPE_ENUM_EPIC;
});
}
const reorderedChildTypes = childTypes.slice().sort((a, b) => a.id.localeCompare(b.id));
return reorderedChildTypes.map((type) => {
const enumType = WORK_ITEM_TYPE_VALUE_MAP[type.name];
return {

View File

@ -35,6 +35,7 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType } = {}) => {
signInPath,
hasIterationsFeature,
hasOkrsFeature,
hasSubepicsFeature,
hasIssuableHealthStatusFeature,
newCommentTemplatePaths,
reportAbusePath,
@ -54,6 +55,7 @@ export const initWorkItemsRoot = ({ workItemType, workspaceType } = {}) => {
isGroup,
hasIssueWeightsFeature: parseBoolean(hasIssueWeightsFeature),
hasOkrsFeature: parseBoolean(hasOkrsFeature),
hasSubepicsFeature: parseBoolean(hasSubepicsFeature),
issuesListPath,
labelsManagePath,
registerPath,

View File

@ -47,7 +47,7 @@ module VulnerabilityFindingHelpers
return Vulnerabilities::Finding.new unless report_finding
finding_data = report_finding.to_hash.except(
:compare_key, :identifiers, :location, :scanner, :links, :signatures, :flags, :evidence
:compare_key, :identifiers, :location, :scanner, :links, :signatures, :flags, :evidence, :confidence
)
identifiers = report_finding.identifiers.uniq(&:fingerprint).map do |identifier|

View File

@ -222,7 +222,19 @@ class Group < Namespace
end
scope :excluding_restricted_visibility_levels_for_user, ->(user) do
user.can_admin_all_resources? ? all : where.not(visibility_level: Gitlab::CurrentSettings.restricted_visibility_levels)
return all if user.can_admin_all_resources?
case Gitlab::CurrentSettings.restricted_visibility_levels.sort
when [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::PUBLIC],
[Gitlab::VisibilityLevel::PRIVATE]
where.not(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
when [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
where.not(visibility_level: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL])
when Gitlab::VisibilityLevel.values
none
else
all
end
end
scope :project_creation_allowed, ->(user) do

View File

@ -22,7 +22,7 @@ class WorkItem < Issue
has_many :child_links, class_name: '::WorkItems::ParentLink', foreign_key: :work_item_parent_id
has_many :work_item_children, through: :child_links, class_name: 'WorkItem',
foreign_key: :work_item_id, source: :work_item
has_many :work_item_children_by_relative_position, -> { work_item_children_keyset_order },
has_many :work_item_children_by_relative_position, ->(work_item) { work_item_children_keyset_order(work_item) },
through: :child_links, class_name: 'WorkItem',
foreign_key: :work_item_id, source: :work_item
@ -65,8 +65,8 @@ class WorkItem < Issue
)
end
def work_item_children_keyset_order
keyset_order = Gitlab::Pagination::Keyset::Order.build(
def work_item_children_keyset_order_config
Gitlab::Pagination::Keyset::Order.build(
[
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'parent_link_relative_position',
@ -81,6 +81,10 @@ class WorkItem < Issue
)
]
)
end
def work_item_children_keyset_order(_work_item)
keyset_order = work_item_children_keyset_order_config
keyset_order.apply_cursor_conditions(includes(:parent_link)).reorder(keyset_order)
end

View File

@ -56,20 +56,24 @@ module Members
existing_members: existing_members
}.merge(parsed_args(args))
members = emails.map do |email|
new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
end
members += users.map do |user|
new(invitee: user, **common_arguments).execute
end
members
build_members(emails, users, common_arguments)
end
end
end
end
def build_members(emails, users, common_arguments)
members = emails.map do |email|
new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute
end
members += users.map do |user|
new(invitee: user, **common_arguments).execute
end
members
end
def add_member(source, invitee, access_level, **args)
add_members(source, [invitee], access_level, **args).first
end

View File

@ -2,7 +2,7 @@
- add_page_specific_style 'page_bundles/labels'
- page_description = s_('AdminLabels|Labels created here will be automatically added to new projects.')
%div{ data: { event_tracking_load: 'true', event_tracking: 'view_admin_labels_pageload' } })
%div{ data: { event_tracking_load: 'true', event_tracking: 'view_admin_labels_pageload' } }
- if @labels.present?
= render ::Layouts::CrudComponent.new(_('Labels'), description: page_description, count: @labels.count, count_class: 'js-admin-labels-count', icon: 'label', options: { class: 'labels other-labels gl-mt-5 js-admin-labels-container' }) do |c|

View File

@ -15,23 +15,6 @@
= f.hidden_field :email, value: @invite_email
= hidden_field_tag :invite_email, @invite_email
- experiment(:signup_intent_step_one, actor: current_user) do |e|
- e.candidate do
.form-group
= label_tag :signup_intent, s_('SignUp|I want to...')
= select_tag :signup_intent,
options_for_select([[s_('SignUp|Set up a new team'),
:select_signup_intent_dropdown_new_team_registration_step_one],
[s_('SignUp|Set up a new personal account'),
:select_signup_intent_dropdown_new_personal_account_registration_step_one],
[s_('SignUp|Join an existing team'),
:select_signup_intent_dropdown_join_existing_team_registration_step_one],
[s_('SignUp|Contribute to a public project on GitLab'),
:select_signup_intent_dropdown_contribute_public_project_registration_step_one]]),
prompt: s_('SignUp|Please select an option...'),
class: 'gl-form-select custom-select',
required: true
.name.form-row
.col.form-group
= f.label :first_name, _('First name'), for: 'new_user_first_name'

View File

@ -0,0 +1,9 @@
---
name: bitbucket_import_resumable_worker
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/466231
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/156797
rollout_issue_url:
milestone: '17.2'
group: group::import and integrate
type: wip
default_enabled: false

View File

@ -13,6 +13,12 @@ MSG
BLUEPRINT_LONG_MESSAGE = <<~MSG
## Architecture Evolution Review
:exclamation: We plan to [move the architecture and design documents](https://gitlab.com/gitlab-com/content-sites/handbook/-/issues/279)
to the [public GitLab handbook](https://handbook.gitlab.com/handbook/engineering/architecture/).
This will change how you contribute to the documents. Follow the
[migration issue](https://gitlab.com/gitlab-com/content-sites/handbook/-/issues/279)
and join the `#architecture` and `#handbook` Slack channels for updates.
#{BLUEPRINT_SHORT_MESSAGE}
The following files, which might require the additional review, have been changed:

View File

@ -29,12 +29,45 @@ module Bitbucket
values.links
values.summary
values.reviewers
next
].freeze
def initialize(options = {})
@connection = Connection.new(options)
end
# Fetches data from the Bitbucket API and yields a Page object for every page
# of data, without loading all of them into memory.
#
# method - The method name used for getting the data.
# representation_type - The representation type name used to wrap the result
# args - Arguments to pass to the method.
def each_page(method, representation_type, *args)
options =
if args.last.is_a?(Hash)
args.last
else
{}
end
loop do
parsed_response = fetch_data(method, *args)
object = Page.new(parsed_response, representation_type)
yield object
break unless object.next?
options[:next_url] = object.next
if args.last.is_a?(Hash)
args[-1] = options
else
args.push(options)
end
end
end
def last_issue(repo)
parsed_response = connection.get("/repositories/#{repo}/issues?pagelen=1&sort=-created_on&state=ALL")
Bitbucket::Representation::Issue.new(parsed_response['values'].first)
@ -50,9 +83,15 @@ module Bitbucket
get_collection(path, :comment)
end
def pull_requests(repo)
def pull_requests(repo, options = {})
path = "/repositories/#{repo}/pullrequests?state=ALL&sort=created_on&fields=#{pull_request_values}"
get_collection(path, :pull_request)
if options[:raw]
path = options[:next_url] if options[:next_url]
connection.get(path)
else
get_collection(path, :pull_request)
end
end
def pull_request_comments(repo, pull_request)
@ -91,6 +130,14 @@ module Bitbucket
private
def fetch_data(method, *args)
case method
when :pull_requests then pull_requests(*args)
else
raise ArgumentError, "Unknown data method #{method}"
end
end
def get_collection(path, type, page_number: nil, limit: nil)
paginator = Paginator.new(connection, path, type, page_number: page_number, limit: limit)
Collection.new(paginator)

View File

@ -7,6 +7,31 @@ module Gitlab
include ParallelScheduling
def execute
bitbucket_import_resumable_worker =
project.import_data&.data&.dig('bitbucket_import_resumable_worker')
if bitbucket_import_resumable_worker
resumable_execute
else
non_resumable_execute
end
end
private
def resumable_execute
log_info(import_stage: 'import_pull_requests', message: 'importing pull requests')
each_object_to_import do |object|
job_delay = calculate_job_delay(job_waiter.jobs_remaining)
sidekiq_worker_class.perform_in(job_delay, project.id, object.to_hash, job_waiter.key)
end
job_waiter
end
def non_resumable_execute
log_info(import_stage: 'import_pull_requests', message: 'importing pull requests')
pull_requests = client.pull_requests(project.import_source)
@ -29,8 +54,6 @@ module Gitlab
job_waiter
end
private
def sidekiq_worker_class
ImportPullRequestWorker
end
@ -39,8 +62,22 @@ module Gitlab
:pull_requests
end
def collection_options
{ raw: true }
end
def representation_type
:pull_request
end
def id_for_already_enqueued_cache(object)
object.iid
if object.is_a?(Hash)
# used for `resumable_execute`
object[:iid]
else
# used for `non_resumable_execute`
object.iid
end
end
# To avoid overloading Gitaly, we use a smaller limit for pull requests than the one defined in the

View File

@ -6,7 +6,8 @@ module Gitlab
include Loggable
include ErrorTracking
attr_reader :project, :already_enqueued_cache_key, :job_waiter_cache_key
attr_reader :project, :already_enqueued_cache_key, :job_waiter_cache_key, :job_waiter_remaining_cache_key,
:page_keyset
# The base cache key to use for tracking already enqueued objects.
ALREADY_ENQUEUED_CACHE_KEY =
@ -16,6 +17,10 @@ module Gitlab
JOB_WAITER_CACHE_KEY =
'bitbucket-importer/job-waiter/%{project}/%{collection}'
# The base cache key to use for storing job waiter remaining jobs
JOB_WAITER_REMAINING_CACHE_KEY =
'bitbucket-importer/job-waiter-remaining/%{project}/%{collection}'
# project - An instance of `Project`.
def initialize(project)
@project = project
@ -24,10 +29,45 @@ module Gitlab
format(ALREADY_ENQUEUED_CACHE_KEY, project: project.id, collection: collection_method)
@job_waiter_cache_key =
format(JOB_WAITER_CACHE_KEY, project: project.id, collection: collection_method)
@job_waiter_remaining_cache_key = format(JOB_WAITER_REMAINING_CACHE_KEY, project: project.id,
collection: collection_method)
@page_keyset = Gitlab::Import::PageKeyset.new(project, collection_method, 'bitbucket-importer')
end
# The method that will be called for traversing through all the objects to
# import, yielding them to the supplied block.
def each_object_to_import
repo = project.import_source
options = collection_options.merge(next_url: page_keyset.current)
client.each_page(collection_method, representation_type, repo, options) do |page|
page.items.each do |object|
job_waiter.jobs_remaining = Gitlab::Cache::Import::Caching.increment(job_waiter_remaining_cache_key)
object = object.to_hash
next if already_enqueued?(object)
yield object
# We mark the object as imported immediately so we don't end up
# scheduling it multiple times.
mark_as_enqueued(object)
end
page_keyset.set(page.next) if page.next?
end
end
private
# Any options to be passed to the method used for retrieving the data to
# import.
def collection_options
{}
end
def client
@client ||= Bitbucket::Client.new(project.import_data.credentials)
end
@ -51,12 +91,18 @@ module Gitlab
raise NotImplementedError
end
# The name of the method to call to retrieve the representation object
def representation_type
raise NotImplementedError
end
def job_waiter
@job_waiter ||= begin
key = Gitlab::Cache::Import::Caching.read(job_waiter_cache_key)
key ||= Gitlab::Cache::Import::Caching.write(job_waiter_cache_key, JobWaiter.generate_key)
jobs_remaining = Gitlab::Cache::Import::Caching.read(job_waiter_remaining_cache_key).to_i || 0
JobWaiter.new(0, key)
JobWaiter.new(jobs_remaining, key)
end
end

View File

@ -14,6 +14,9 @@ module Gitlab
end
def execute
bitbucket_import_resumable_worker =
Feature.enabled?(:bitbucket_import_resumable_worker, current_user)
::Projects::CreateService.new(
current_user,
name: name,
@ -25,7 +28,12 @@ module Gitlab
import_type: 'bitbucket',
import_source: repo.full_name,
import_url: clone_url,
import_data: { credentials: credentials },
import_data: {
credentials: credentials,
data: {
bitbucket_import_resumable_worker: bitbucket_import_resumable_worker
}
},
skip_wiki: skip_wiki
).execute
end

View File

@ -0,0 +1,41 @@
# frozen_string_literal: true
module Gitlab
module Import
# PageKeyset can be used to keep track of the last imported page of a
# collection, allowing workers to resume where they left off in the event of
# an error.
class PageKeyset
attr_reader :cache_key
# The base cache key to use for storing the last key.
CACHE_KEY = '%{import_type}/page-keyset/%{object}/%{collection}'
def initialize(object, collection, import_type)
@cache_key = format(CACHE_KEY, import_type: import_type, object: object.id, collection: collection)
end
# Set the key to the given value.
#
# @param value [String]
# @return [String]
def set(value)
Gitlab::Cache::Import::Caching.write(cache_key, value)
end
# Get the current value from the cache
#
# @return [String]
def current
Gitlab::Cache::Import::Caching.read(cache_key)
end
# Expire the key
#
# @return [Boolean]
def expire!
Gitlab::Cache::Import::Caching.expire(cache_key, 0)
end
end
end
end

View File

@ -14189,6 +14189,9 @@ msgstr ""
msgid "ContainerRegistry|Add rule"
msgstr ""
msgid "ContainerRegistry|Both keep and remove regex patterns are automatically surrounded with %{codeStart}\\A%{codeEnd} and %{codeStart}\\Z%{codeEnd} anchors, so you do not need to include them. However, make sure to take this into account when choosing and testing your regex patterns."
msgstr ""
msgid "ContainerRegistry|Build an image"
msgstr ""
@ -49994,33 +49997,15 @@ msgstr ""
msgid "SignUp|By signing in you accept the %{link_start}Terms of Use and acknowledge the Privacy Statement and Cookie Policy%{link_end}."
msgstr ""
msgid "SignUp|Contribute to a public project on GitLab"
msgstr ""
msgid "SignUp|First name is too long (maximum is %{max_length} characters)."
msgstr ""
msgid "SignUp|I want to..."
msgstr ""
msgid "SignUp|Join an existing team"
msgstr ""
msgid "SignUp|Last name is too long (maximum is %{max_length} characters)."
msgstr ""
msgid "SignUp|Minimum length is %{minimum_password_length} characters."
msgstr ""
msgid "SignUp|Please select an option..."
msgstr ""
msgid "SignUp|Set up a new personal account"
msgstr ""
msgid "SignUp|Set up a new team"
msgstr ""
msgid "SignUp|Username is too long (maximum is %{max_length} characters)."
msgstr ""

View File

@ -41,11 +41,9 @@ variables:
GITLAB_QA_CACHE_KEY: "$qa_cache_key"
GITLAB_SEMVER_VERSION: "$(cat VERSION)"
FEATURE_FLAGS: "${QA_FEATURE_FLAGS}"
QA_EXPORT_TEST_METRICS: "${QA_EXPORT_TEST_METRICS:-true}"
QA_FRAMEWORK_CHANGES: "${QA_FRAMEWORK_CHANGES:-false}"
QA_RUN_ALL_TESTS: "${QA_RUN_ALL_TESTS:-false}"
QA_RUN_ALL_E2E_LABEL: "${QA_RUN_ALL_E2E_LABEL:-false}"
QA_SAVE_TEST_METRICS: "${QA_SAVE_TEST_METRICS:-false}"
QA_SUITES: "$QA_SUITES"
QA_TESTS: "$QA_TESTS"
KNAPSACK_TEST_FILE_PATTERN: "$KNAPSACK_TEST_FILE_PATTERN"

View File

@ -28,7 +28,7 @@ class ReleaseEnvironmentNotification
def set_required_env_vars?
# List of required environment variables.
# CI_PIPELINE_ID supposes to be set by the CI pipeline, so we don't check it.
# CI_PIPELINE_URL supposes to be set by the CI pipeline, so we don't check it.
required_env_vars = %w[ENVIRONMENT VERSIONS OPS_RELEASE_TOOLS_PIPELINE_TOKEN RELEASE_ENVIRONMENT_NOTIFICATION_TYPE]
required_env_vars.each do |var|
@ -56,7 +56,7 @@ class ReleaseEnvironmentNotification
data = {
"variables[RELEASE_ENVIRONMENT_PIPELINE]" => "true",
"variables[RELEASE_ENVIRONMENT_NOTIFICATION_TYPE]" => ENV.fetch('RELEASE_ENVIRONMENT_NOTIFICATION_TYPE', nil),
"variables[RELEASE_ENVIRONMENT_CI_PIPELINE_ID]" => ENV.fetch('CI_PIPELINE_ID', nil),
"variables[RELEASE_ENVIRONMENT_CI_PIPELINE_URL]" => ENV.fetch('CI_PIPELINE_URL', nil),
"variables[RELEASE_ENVIRONMENT_NAME]" => ENV.fetch('ENVIRONMENT', nil),
"variables[RELEASE_ENVIRONMENT_VERSION]" => version,
"token" => ENV.fetch('OPS_RELEASE_TOOLS_PIPELINE_TOKEN', nil),

View File

@ -84,5 +84,42 @@ RSpec.describe ForkTargetsFinder, feature_category: :source_code_management do
end
end
end
context 'with restricted visibility levels' do
using RSpec::Parameterized::TableSyntax
let_it_be(:private_group) { create(:group, :private, owners: user) }
let_it_be(:internal_group) { create(:group, :internal, owners: user) }
let_it_be(:public_groups) do
[maintained_group, owned_group, developer_group, project.namespace, shared_group_to_group_with_owner_access]
end
let(:private_vis) { Gitlab::VisibilityLevel::PRIVATE }
let(:internal_vis) { Gitlab::VisibilityLevel::INTERNAL }
let(:public_vis) { Gitlab::VisibilityLevel::PUBLIC }
subject(:execute_finder) { finder.execute(only_groups: true) }
context 'with table syntax' do
where(:restricted_visibility_levels, :expected_groups_and_namespaces) do
[] | lazy { [private_group, internal_group, *public_groups] }
[private_vis] | lazy { [internal_group, *public_groups] }
[internal_vis] | lazy { [private_group, internal_group, *public_groups] }
[public_vis] | lazy { [private_group, internal_group, *public_groups] }
[private_vis, internal_vis] | lazy { [*public_groups] }
[private_vis, public_vis] | lazy { [internal_group, *public_groups] }
[internal_vis, public_vis] | lazy { [private_group, internal_group, *public_groups] }
[private_vis, internal_vis, public_vis] | []
end
with_them do
before do
stub_application_setting(restricted_visibility_levels: restricted_visibility_levels)
end
it { is_expected.to match_array(expected_groups_and_namespaces) }
end
end
end
end
end

View File

@ -1,3 +1,4 @@
import { GlAlert, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import Vue, { nextTick } from 'vue';
@ -46,6 +47,8 @@ describe('Container Expiration Policy Settings Form', () => {
const findOlderThanDropdown = () => wrapper.find('[data-testid="older-than-dropdown"]');
const findRemoveRegexInput = () => wrapper.find('[data-testid="remove-regex-input"]');
const findAlert = () => wrapper.findComponent(GlAlert);
const submitForm = () => {
findForm().trigger('submit');
return waitForPromises();
@ -61,6 +64,7 @@ describe('Container Expiration Policy Settings Form', () => {
stubs: {
GlCard,
GlLoadingIcon,
GlSprintf,
},
propsData: { ...props },
provide,
@ -120,6 +124,22 @@ describe('Container Expiration Policy Settings Form', () => {
});
};
describe('alert', () => {
beforeEach(() => {
mountComponent();
});
it('is not dismissible', () => {
expect(findAlert().props('dismissible')).toBe(false);
});
it('contains right text', () => {
expect(findAlert().text()).toMatchInterpolatedText(
'Both keep and remove regex patterns are automatically surrounded with %{codeStart}\\A%{codeEnd} and %{codeStart}\\Z%{codeEnd} anchors, so you do not need to include them. However, make sure to take this into account when choosing and testing your regex patterns.',
);
});
});
describe.each`
model | finder | fieldName | type | defaultValue
${'enabled'} | ${findEnableToggle} | ${'Enable'} | ${'toggle'} | ${false}

View File

@ -43,6 +43,7 @@ describe('WorkItemAttributesWrapper component', () => {
hasOkrsFeature: true,
hasIssuableHealthStatusFeature: true,
projectNamespace: 'namespace',
hasSubepicsFeature: true,
glFeatures: {
workItemsAlpha,
},

View File

@ -111,6 +111,7 @@ describe('WorkItemDetail component', () => {
workItemsAlphaEnabled = false,
workItemsBeta = false,
namespaceLevelWorkItems = true,
hasSubepicsFeature = true,
} = {}) => {
wrapper = shallowMountExtended(WorkItemDetail, {
apolloProvider: createMockApollo([
@ -141,6 +142,7 @@ describe('WorkItemDetail component', () => {
hasIssueWeightsFeature: true,
hasIterationsFeature: true,
hasOkrsFeature: true,
hasSubepicsFeature,
hasIssuableHealthStatusFeature: true,
projectNamespace: 'namespace',
fullPath: 'group/project',
@ -359,6 +361,22 @@ describe('WorkItemDetail component', () => {
});
});
describe('`subepics` is unavailable', () => {
it('does not show ancestors widget and shows title in the header', async () => {
const epicWorkItem = workItemByIidResponseFactory({
workItemType: epicType,
});
const epicHandler = jest.fn().mockResolvedValue(epicWorkItem);
createComponent({ hasSubepicsFeature: false, handler: epicHandler });
await waitForPromises();
expect(findAncestors().exists()).toBe(false);
expect(findWorkItemType().classes()).toEqual(['sm:!gl-block', 'gl-w-full']);
});
});
describe('with parent', () => {
beforeEach(() => {
const parentResponse = workItemByIidResponseFactory(mockParent);

View File

@ -12,6 +12,8 @@ import {
FORM_TYPES,
WORK_ITEM_TYPE_ENUM_OBJECTIVE,
WORK_ITEM_TYPE_ENUM_KEY_RESULT,
WORK_ITEM_TYPE_ENUM_EPIC,
WORK_ITEM_TYPE_ENUM_ISSUE,
WORK_ITEM_TYPE_VALUE_EPIC,
WORK_ITEM_TYPE_VALUE_OBJECTIVE,
} from '~/work_items/constants';
@ -38,6 +40,7 @@ describe('WorkItemTree', () => {
children = childrenWorkItems,
canUpdate = true,
canUpdateChildren = true,
hasSubepicsFeature = true,
} = {}) => {
wrapper = shallowMountExtended(WorkItemTree, {
propsData: {
@ -51,6 +54,9 @@ describe('WorkItemTree', () => {
canUpdate,
canUpdateChildren,
},
provide: {
hasSubepicsFeature,
},
stubs: { WidgetWrapper },
});
};
@ -114,6 +120,52 @@ describe('WorkItemTree', () => {
},
);
describe('when subepics are not available', () => {
it.each`
option | formType | childType
${'New issue'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_ISSUE}
${'Existing issue'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_ISSUE}
`(
'when triggering action $option, renders the form passing $formType and $childType',
async ({ formType, childType }) => {
createComponent({ hasSubepicsFeature: false, workItemType: 'Epic' });
wrapper.vm.showAddForm(formType, childType);
await nextTick();
expect(findForm().exists()).toBe(true);
expect(findForm().props()).toMatchObject({
formType,
childrenType: childType,
});
},
);
});
describe('when subepics are available', () => {
it.each`
option | formType | childType
${'New issue'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_ISSUE}
${'Existing issue'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_ISSUE}
${'New epic'} | ${FORM_TYPES.create} | ${WORK_ITEM_TYPE_ENUM_EPIC}
${'Existing epic'} | ${FORM_TYPES.add} | ${WORK_ITEM_TYPE_ENUM_EPIC}
`(
'when triggering action $option, renders the form passing $formType and $childType',
async ({ formType, childType }) => {
createComponent({ hasSubepicsFeature: true, workItemType: 'Epic' });
wrapper.vm.showAddForm(formType, childType);
await nextTick();
expect(findForm().exists()).toBe(true);
expect(findForm().props()).toMatchObject({
formType,
childrenType: childType,
});
},
);
});
describe('when no permission to update', () => {
beforeEach(() => {
createComponent({

View File

@ -57,6 +57,7 @@ describe('Work items router', () => {
hasIssueWeightsFeature: false,
hasIterationsFeature: false,
hasOkrsFeature: false,
hasSubepicsFeature: false,
hasIssuableHealthStatusFeature: false,
labelsManagePath: 'test-project-path/labels',
reportAbusePath: '/report/abuse/path',

View File

@ -14,6 +14,76 @@ RSpec.describe Bitbucket::Client, feature_category: :importers do
subject(:client) { described_class.new(options) }
describe '#each_page' do
let_it_be(:item1) do
{ 'username' => 'Ben' }
end
let_it_be(:item2) do
{ 'username' => 'Affleck' }
end
let_it_be(:item3) do
{ 'username' => 'Jane' }
end
let_it_be(:response1) do
{ 'values' => [item1], 'next' => 'https://example.com/next' }
end
let_it_be(:response2) do
{ 'values' => [item2], 'next' => 'https://example.com/next2' }
end
let_it_be(:response3) do
{ 'values' => [item3], 'next' => nil }
end
before do
allow(client)
.to receive(:pull_requests)
.with('repo')
.and_return(response1)
allow(client)
.to receive(:pull_requests)
.with('repo', { next_url: 'https://example.com/next' })
.and_return(response2)
allow(client)
.to receive(:pull_requests)
.with('repo', { next_url: 'https://example.com/next2' })
.and_return(response3)
end
it 'yields every retrieved page to the supplied block' do
pages = []
client.each_page(:pull_requests, :pull_request, 'repo') { |page| pages << page }
expect(pages[0]).to be_an_instance_of(Bitbucket::Page)
expect(pages[0].items.count).to eq(1)
expect(pages[0].items.first.raw).to eq(item1)
expect(pages[0].attrs[:next]).to eq('https://example.com/next')
expect(pages[1].items.count).to eq(1)
expect(pages[1].items.first.raw).to eq(item2)
expect(pages[1].attrs[:next]).to eq('https://example.com/next2')
expect(pages[2].items.count).to eq(1)
expect(pages[2].items.first.raw).to eq(item3)
expect(pages[2].attrs[:next]).to eq(nil)
end
context 'when fetch_data not defined' do
it 'raises argument error' do
expect { client.each_page(:foo, :pull_request, 'repo') }
.to raise_error(ArgumentError, 'Unknown data method foo')
end
end
end
describe '#last_issue' do
let(:url) { "#{root_url}/repositories/#{repo}/issues?pagelen=1&sort=-created_on&state=ALL" }
@ -59,6 +129,18 @@ RSpec.describe Bitbucket::Client, feature_category: :importers do
client.pull_requests(repo)
end
context 'with options raw' do
let(:url) { "#{root_url}#{path}" }
it 'returns raw result' do
stub_request(:get, url).to_return(status: 200, headers: headers, body: '{}')
client.pull_requests(repo, raw: true)
expect(WebMock).to have_requested(:get, url)
end
end
end
describe '#pull_request_comments' do

View File

@ -3,29 +3,20 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, :clean_gitlab_redis_shared_state, feature_category: :importers do
let_it_be(:project) do
create(:project, :import_started,
import_data_attributes: {
data: { 'project_key' => 'key', 'repo_slug' => 'slug' },
credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
}
)
end
subject(:importer) { described_class.new(project) }
describe '#execute' do
before do
allow_next_instance_of(Bitbucket::Client) do |client|
allow(client).to receive(:pull_requests).and_return(
[
Bitbucket::Representation::PullRequest.new({ 'id' => 1, 'state' => 'OPENED' }),
Bitbucket::Representation::PullRequest.new({ 'id' => 2, 'state' => 'DECLINED' }),
Bitbucket::Representation::PullRequest.new({ 'id' => 3, 'state' => 'MERGED' })
],
[]
)
end
shared_examples 'import bitbucket PullRequestsImporter' do |bitbucket_import_resumable_worker|
let_it_be(:project) do
create(:project, :import_started,
import_data_attributes: {
data: {
'project_key' => 'key',
'repo_slug' => 'slug',
'bitbucket_import_resumable_worker' => bitbucket_import_resumable_worker
},
credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
}
)
end
it 'imports each pull request in parallel' do
@ -39,20 +30,6 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, :clean_
.to match_array(%w[1 2 3])
end
context 'when the client raises an error' do
before do
allow_next_instance_of(Bitbucket::Client) do |client|
allow(client).to receive(:pull_requests).and_raise(StandardError)
end
end
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track).once
expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
context 'when pull request was already enqueued' do
before do
Gitlab::Cache::Import::Caching.set_add(importer.already_enqueued_cache_key, 1)
@ -68,4 +45,69 @@ RSpec.describe Gitlab::BitbucketImport::Importers::PullRequestsImporter, :clean_
end
end
end
describe '#resumable_execute' do
before do
allow_next_instance_of(Bitbucket::Client) do |client|
page = instance_double('Bitbucket::Page', attrs: [], items: [
Bitbucket::Representation::PullRequest.new({ 'id' => 1, 'state' => 'OPENED' }),
Bitbucket::Representation::PullRequest.new({ 'id' => 2, 'state' => 'DECLINED' }),
Bitbucket::Representation::PullRequest.new({ 'id' => 3, 'state' => 'MERGED' })
])
allow(client).to receive(:each_page).and_yield(page)
allow(page).to receive(:next?).and_return(true)
allow(page).to receive(:next).and_return('https://example.com/next')
end
end
it_behaves_like 'import bitbucket PullRequestsImporter', true do
context 'when the client raises an error' do
before do
allow_next_instance_of(Bitbucket::Client) do |client|
allow(client).to receive(:pull_requests).and_raise(StandardError.new('error fetching PRs'))
end
end
it 'raises the error' do
expect { importer.execute }.to raise_error(StandardError, 'error fetching PRs')
end
end
end
end
describe '#non_resumable_execute' do
before do
allow_next_instance_of(Bitbucket::Client) do |client|
allow(client).to receive(:pull_requests).and_return(
[
Bitbucket::Representation::PullRequest.new({ 'id' => 1, 'state' => 'OPENED' }),
Bitbucket::Representation::PullRequest.new({ 'id' => 2, 'state' => 'DECLINED' }),
Bitbucket::Representation::PullRequest.new({ 'id' => 3, 'state' => 'MERGED' })
],
[]
)
end
end
it_behaves_like 'import bitbucket PullRequestsImporter', false do
context 'when the client raises an error' do
let(:exception) { StandardError.new('error fetching PRs') }
before do
allow_next_instance_of(Bitbucket::Client) do |client|
allow(client).to receive(:pull_requests).and_raise(exception)
end
end
it 'tracks the failure and does not fail' do
expect(Gitlab::Import::ImportFailureService).to receive(:track)
.once
.with(a_hash_including(exception: exception))
expect(importer.execute).to be_a(Gitlab::JobWaiter)
end
end
end
end
end

View File

@ -3,19 +3,29 @@
require 'spec_helper'
RSpec.describe Gitlab::BitbucketImport::ParallelScheduling, feature_category: :importers do
let_it_be(:project) { build(:project) }
let_it_be(:project) do
create(:project, :import_started, import_source: 'foo/bar',
import_data_attributes: {
data: {
'project_key' => 'key',
'repo_slug' => 'slug'
},
credentials: { 'base_uri' => 'http://bitbucket.org/', 'user' => 'bitbucket', 'password' => 'password' }
}
)
end
describe '#calculate_job_delay' do
let(:importer_class) do
Class.new do
include Gitlab::BitbucketImport::ParallelScheduling
let(:importer_class) do
Class.new do
include Gitlab::BitbucketImport::ParallelScheduling
def collection_method
:issues
end
def collection_method
:issues
end
end
end
describe '#calculate_job_delay' do
let(:importer) { importer_class.new(project) }
before do
@ -34,4 +44,120 @@ RSpec.describe Gitlab::BitbucketImport::ParallelScheduling, feature_category: :i
expect(importer.send(:calculate_job_delay, 100)).to eq(50.minutes)
end
end
describe '#each_object_to_import' do
let_it_be(:opened_issue) { Bitbucket::Representation::Issue.new({ 'id' => 1, 'state' => 'OPENED' }) }
let_it_be(:object) { opened_issue.to_hash }
let(:importer) { importer_class.new(project) }
context 'without representation_type' do
it 'raises NotImplementedError' do
expect { importer_class.new(project).each_object_to_import }.to raise_error(NotImplementedError)
end
end
context 'with representation_type' do
before do
allow(importer)
.to receive(:representation_type)
.and_return(:issue)
end
it 'yields every object to import' do
page = instance_double('Bitbucket::Page', attrs: [], items: [opened_issue])
allow(page).to receive(:next?).and_return(true)
allow(page).to receive(:next).and_return('https://example.com/next')
allow_next_instance_of(Bitbucket::Client) do |client|
expect(client)
.to receive(:each_page)
.with(:issues, :issue, 'foo/bar', { next_url: nil })
.and_yield(page)
end
expect(importer.page_keyset)
.to receive(:set)
.with('https://example.com/next')
.and_return(true)
expect(importer)
.to receive(:already_enqueued?)
.with(object)
.and_return(false)
expect(importer)
.to receive(:mark_as_enqueued)
.with(object)
expect { |b| importer.each_object_to_import(&b) }
.to yield_with_args(object)
end
it 'resumes from the last page' do
page = instance_double('Bitbucket::Page', attrs: [], items: [opened_issue])
allow(page).to receive(:next?).and_return(true)
allow(page).to receive(:next).and_return('https://example.com/next2')
expect(importer.page_keyset)
.to receive(:current)
.and_return('https://example.com/next')
allow_next_instance_of(Bitbucket::Client) do |client|
expect(client)
.to receive(:each_page)
.with(:issues, :issue, 'foo/bar', {
next_url: 'https://example.com/next'
})
.and_yield(page)
end
expect(importer.page_keyset)
.to receive(:set)
.with('https://example.com/next2')
.and_return(true)
expect(importer)
.to receive(:already_enqueued?)
.with(object)
.and_return(false)
expect(importer)
.to receive(:mark_as_enqueued)
.with(object)
expect { |b| importer.each_object_to_import(&b) }
.to yield_with_args(object)
end
it 'does not yield the object if it was already imported' do
page = instance_double('Bitbucket::Page', attrs: [], items: [opened_issue])
allow(page).to receive(:next?).and_return(true)
allow(page).to receive(:next).and_return('https://example.com/next')
allow_next_instance_of(Bitbucket::Client) do |client|
expect(client)
.to receive(:each_page)
.with(:issues, :issue, 'foo/bar', { next_url: nil })
.and_yield(page)
end
expect(importer.page_keyset)
.to receive(:set)
.with('https://example.com/next')
.and_return(true)
expect(importer)
.to receive(:already_enqueued?)
.with(object)
.and_return(true)
expect(importer)
.not_to receive(:mark_as_enqueued)
expect { |b| importer.each_object_to_import(&b) }
.not_to yield_control
end
end
end
end

View File

@ -0,0 +1,38 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Import::PageKeyset, :clean_gitlab_redis_shared_state, feature_category: :importers do
let(:project) { instance_double(Project, id: 1) }
let(:keyset) { described_class.new(project, :issues, 'bitbucket-import') }
describe '#initialize' do
it 'sets the initial next url to be nil when no value is cached' do
expect(keyset.current).to eq(nil)
end
it 'sets the initial next url to the cached value when one is present' do
Gitlab::Cache::Import::Caching.write(keyset.cache_key, 'https://example.com/nextpresent')
expect(described_class.new(project, :issues, 'bitbucket-import').current).to eq('https://example.com/nextpresent')
end
end
describe '#set' do
it 'sets the next url' do
keyset.set('https://example.com/next')
expect(keyset.current).to eq('https://example.com/next')
end
end
describe '#expire!' do
it 'expires the current next url' do
keyset.set('https://example.com/next')
keyset.expire!
expect(Gitlab::Cache::Import::Caching.read(keyset.cache_key)).to be_nil
expect(keyset.current).to eq(nil)
end
end
end

View File

@ -1183,42 +1183,37 @@ RSpec.describe Group, feature_category: :groups_and_projects do
describe '.excluding_restricted_visibility_levels_for_user' do
let_it_be(:admin_user) { create(:admin) }
context 'when restricted_visibility_level is not configured' do
context 'when user is an admin', :enable_admin_mode do
it 'returns all groups' do
expect(described_class.excluding_restricted_visibility_levels_for_user(admin_user)).to contain_exactly(
private_group, internal_group, group
)
let(:private_vis) { Gitlab::VisibilityLevel::PRIVATE }
let(:internal_vis) { Gitlab::VisibilityLevel::INTERNAL }
let(:public_vis) { Gitlab::VisibilityLevel::PUBLIC }
subject { described_class.excluding_restricted_visibility_levels_for_user(user1) }
context 'with table syntax' do
using RSpec::Parameterized::TableSyntax
where(:restricted_visibility_levels, :expected_groups) do
[] | lazy { [private_group, internal_group, group] }
[private_vis] | lazy { [internal_group, group] }
[internal_vis] | lazy { [private_group, internal_group, group] }
[public_vis] | lazy { [private_group, internal_group, group] }
[private_vis, internal_vis] | lazy { [group] }
[private_vis, public_vis] | lazy { [internal_group, group] }
[internal_vis, public_vis] | lazy { [private_group, internal_group, group] }
[private_vis, internal_vis, public_vis] | lazy { [] }
end
with_them do
before do
stub_application_setting(restricted_visibility_levels: restricted_visibility_levels)
end
end
context 'when user is not an admin' do
it 'returns all groups' do
expect(described_class.excluding_restricted_visibility_levels_for_user(user1)).to contain_exactly(
private_group, internal_group, group
)
end
end
end
it { is_expected.to match_array(expected_groups) }
context 'when restricted_visibility_level is set to private' do
before do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE])
end
context 'with admin mode enabled', :enable_admin_mode do
subject { described_class.excluding_restricted_visibility_levels_for_user(admin_user) }
context 'and user is an admin', :enable_admin_mode do
it 'returns all groups' do
expect(described_class.excluding_restricted_visibility_levels_for_user(admin_user)).to contain_exactly(
private_group, internal_group, group
)
end
end
context 'and user is not an admin' do
it 'excludes private groups' do
expect(described_class.excluding_restricted_visibility_levels_for_user(user1)).to contain_exactly(
internal_group, group
)
it { is_expected.to match_array([private_group, internal_group, group]) }
end
end
end

View File

@ -28,7 +28,7 @@ RSpec.describe ReleaseEnvironmentNotification, feature_category: :delivery do
context 'when all environment variables are provided' do
before do
stub_env('CI_PIPELINE_ID', '1')
stub_env('CI_PIPELINE_URL', '1')
stub_env('ENVIRONMENT', 'my-env')
stub_env('VERSIONS', '{"gitlab": "12.7.0"}')
stub_env('OPS_RELEASE_TOOLS_PIPELINE_TOKEN', 'token')
@ -44,7 +44,7 @@ RSpec.describe ReleaseEnvironmentNotification, feature_category: :delivery do
describe '#execute' do
context 'when all environment variables are provided' do
before do
stub_env('CI_PIPELINE_ID', '1')
stub_env('CI_PIPELINE_URL', '1')
stub_env('ENVIRONMENT', 'my-env')
stub_env('VERSIONS', '{"gitlab": "12.7.0"}')
stub_env('OPS_RELEASE_TOOLS_PIPELINE_TOKEN', 'token')

View File

@ -66,95 +66,116 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
allow(ci_jobs_dependency_validation.helper).to receive(:mr_target_branch).and_return('master')
allow(ci_jobs_dependency_validation.helper).to receive(:mr_source_project_id).and_return('1')
allow(ci_jobs_dependency_validation.helper).to receive(:mr_target_project_id).and_return('1')
allow($stdout).to receive(:puts)
end
describe '#output_message' do
shared_examples 'empty message' do |output, num_of_jobs_in_target_branch, num_of_jobs_in_source_branch|
it 'returns empty string and prints the correct messages to stdout' do
default_output = <<~OUTPUT
Initializing #{num_of_jobs_in_target_branch} jobs from master ci config...
Initializing #{num_of_jobs_in_source_branch} jobs from feature_branch ci config...
Looking for misconfigured dependent jobs for setup-test-env...
Detected 0 dependent jobs with misconfigured rules.
Looking for misconfigured dependent jobs for compile-test-assets...
Detected 0 dependent jobs with misconfigured rules.
Looking for misconfigured dependent jobs for retrieve-tests-metadata...
Detected 0 dependent jobs with misconfigured rules.
Looking for misconfigured dependent jobs for build-gdk-image...
Detected 0 dependent jobs with misconfigured rules.
OUTPUT
expect { expect(ci_jobs_dependency_validation.output_message).to eq('') }.tap do |expectation|
expected_output = output == :default_stdout_output ? default_output : output
expectation.to output(expected_output).to_stdout unless expected_output.nil?
shared_examples 'output message' do |warning|
it 'outputs messages' do
if warning
expect(ci_jobs_dependency_validation).to receive(:warn).with(described_class::FAILED_VALIDATION_WARNING)
else
expect(ci_jobs_dependency_validation).not_to receive(:warn)
end
expect(ci_jobs_dependency_validation.output_message).to eq(expected_message)
end
end
context 'when not in ci environment' do
let(:ci) { false }
let(:expected_message) { '' }
it_behaves_like 'empty message', nil
it_behaves_like 'output message'
end
context 'when in ci environment' do
context 'with no ci changes' do
let(:expected_message) { '' }
before do
allow(ci_jobs_dependency_validation.helper).to receive(:has_ci_changes?).and_return(false)
end
it_behaves_like 'empty message'
it_behaves_like 'output message'
end
context 'when target branch jobs is empty' do
let(:source_branch_merged_yaml) { YAML.dump({}) }
context 'with api fails to retrieve jobs from target branch' do
let(:error_msg) { '404 not found' }
it_behaves_like 'empty message'
end
context 'when retrieving target branch jobs fails' do
before do
allow(ci_jobs_dependency_validation).to receive_message_chain(:gitlab, :api, :get)
.with("/projects/1/ci/lint", query: query).and_raise('404 Not Found')
allow(
ci_jobs_dependency_validation
).to receive_message_chain(:gitlab, :api, :get).with("/projects/1/ci/lint", query: {}).and_raise(error_msg)
end
it 'prints the failure but does not break' do
it 'warns validation is skipped and outputs empty message' do
expect(ci_jobs_dependency_validation).to receive(:warn).with(
"#{described_class::SKIPPED_VALIDATION_WARNING}: #{error_msg}"
)
expect { expect(ci_jobs_dependency_validation.output_message).to eq('') }.tap do |expectation|
expectation
.to output(<<~MSG).to_stdout
Initializing 1 jobs from master ci config...
404 Not Found
Initializing 0 jobs from feature_branch ci config...
MSG
end
end
end
context 'when source branch jobs is empty' do
let(:master_merged_yaml) { YAML.dump({}) }
it_behaves_like 'empty message'
end
context 'when retrieving source branch jobs fails' do
before do
allow(ci_jobs_dependency_validation).to receive_message_chain(:gitlab, :api, :get)
.with("/projects/1/ci/lint", query: {}).and_raise('404 Not Found')
end
it 'prints the failure but does not break' do
expect { expect(ci_jobs_dependency_validation.output_message).to eq('') }.tap do |expectation|
expectation
.to output(<<~MSG).to_stdout
404 Not Found
Initializing 0 jobs from master ci config...
MSG
end
end
end
context 'with api fails to retrieve jobs from source branch' do
let(:error_msg) { '404 not found' }
before do
allow(
ci_jobs_dependency_validation
).to receive_message_chain(:gitlab, :api, :get).with("/projects/1/ci/lint", query: query).and_raise(error_msg)
end
it 'warns validation is skipped and outputs empty message' do
expect(ci_jobs_dependency_validation).to receive(:warn).with(
"#{described_class::SKIPPED_VALIDATION_WARNING}: #{error_msg}"
)
expect { expect(ci_jobs_dependency_validation.output_message).to eq('') }.tap do |expectation|
expectation
.to output(<<~MSG).to_stdout
Initializing 1 jobs from master ci config...
Initializing 0 jobs from feature_branch ci config...
MSG
end
end
end
context 'with api returns nil for merged yaml' do
let(:source_branch_merged_yaml) { nil }
it 'warns validation is skipped and outputs empty message' do
expect(ci_jobs_dependency_validation).to receive(:warn).with(
"#{described_class::SKIPPED_VALIDATION_WARNING}: no implicit conversion of nil into String"
)
expect(ci_jobs_dependency_validation.output_message).to eq('')
end
end
context 'when target branch jobs is empty' do
let(:source_branch_merged_yaml) { YAML.dump({}) }
let(:expected_message) { '' }
it_behaves_like 'output message'
end
context 'when source branch jobs is empty' do
let(:master_merged_yaml) { YAML.dump({}) }
let(:expected_message) { '' }
it_behaves_like 'output message'
end
context 'when jobs do not have dependencies' do
it_behaves_like 'empty message', :default_stdout_output, 1, 4
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'output message'
end
context 'when needed jobs are missing is source branch' do
@ -162,23 +183,51 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
YAML.dump({ 'job1' => job_config_base.merge({ 'rules' => rules_with_new_condition }) })
end
it 'returns warning for the missing jobs' do
expect(ci_jobs_dependency_validation.output_message).to eq(
<<~MARKDOWN.chomp
Unable to find job setup-test-env in feature_branch. Skipping.
Unable to find job compile-test-assets in feature_branch. Skipping.
Unable to find job retrieve-tests-metadata in feature_branch. Skipping.
Unable to find job build-gdk-image in feature_branch. Skipping.
MARKDOWN
)
let(:expected_message) do
<<~MARKDOWN
### CI Jobs Dependency Validation
| name | validation status |
| ------ | --------------- |
| `setup-test-env` | :warning: Skipped |
| `compile-test-assets` | :warning: Skipped |
| `retrieve-tests-metadata` | :warning: Skipped |
| `build-gdk-image` | :warning: Skipped |
| `build-assets-image` | :warning: Skipped |
| `build-qa-image` | :warning: Skipped |
- :warning: Unable to find job `setup-test-env` in branch `feature_branch`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
- :warning: Unable to find job `compile-test-assets` in branch `feature_branch`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
- :warning: Unable to find job `retrieve-tests-metadata` in branch `feature_branch`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
- :warning: Unable to find job `build-gdk-image` in branch `feature_branch`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
- :warning: Unable to find job `build-assets-image` in branch `feature_branch`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
- :warning: Unable to find job `build-qa-image` in branch `feature_branch`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
MARKDOWN
end
it_behaves_like 'output message', true
end
context 'when job1 in branch needs one other job to run' do
let(:job_name) { 'job1' }
let(:needed_job_name) { 'setup-test-env' }
let(:needed_job_config) { job_config_base }
let(:needed_job) { { needed_job_name => needed_job_config } }
let(:source_branch_merged_yaml) do
YAML.dump(source_branch_jobs_base.merge(
@ -193,62 +242,90 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
context 'with a hidden job' do
let(:job_name) { '.job1' }
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'empty message', :default_stdout_output, 1, 5
it_behaves_like 'output message'
end
context 'with a ignored job' do
context 'with a global keyword' do
let(:job_name) { 'default' }
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'empty message', :default_stdout_output, 1, 5
it_behaves_like 'output message'
end
context 'when the dependent job config has not changed (identical in master and in branch)' do
let(:master_merged_yaml) { source_branch_merged_yaml }
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'empty message', :default_stdout_output, 5, 5
it_behaves_like 'output message'
end
context 'when VALIDATED_JOB_NAMES does not contain the needed job' do
let(:needed_job_name) { 'not-recognized' }
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'empty message', :default_stdout_output, 1, 5
it_behaves_like 'output message'
end
context 'when VALIDATED_JOB_NAMES contains the needed job and dependent job config changed' do
context 'when the added rule is also present in its needed job' do
let(:needed_job_config) { job_config_base.merge({ 'rules' => rules_with_new_condition }) }
let(:source_branch_merged_yaml) do
YAML.dump(source_branch_jobs_base.merge({
job_name => job_config_base.merge({
'rules' => rules_with_new_condition,
'needs' => [needed_job_name]
}),
needed_job_name => { 'rules' => rules_with_new_condition, 'needs' => [] }
}))
end
it_behaves_like 'empty message'
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'output message'
end
context 'when an added rule is missing in its needed job' do
it 'returns warning' do
expect(ci_jobs_dependency_validation.output_message).to eq(
<<~MARKDOWN
**This MR adds new rules to the following dependent jobs for `setup-test-env`:**
let(:expected_message) do
<<~MARKDOWN
### CI Jobs Dependency Validation
`job1`:
| name | validation status |
| ------ | --------------- |
| `setup-test-env` | 🚨 Failed (1) |
| `compile-test-assets` | :white_check_mark: Passed |
| `retrieve-tests-metadata` | :white_check_mark: Passed |
| `build-gdk-image` | :white_check_mark: Passed |
| `build-assets-image` | :white_check_mark: Passed |
| `build-qa-image` | :white_check_mark: Passed |
```yaml
- if: $NEW_VAR == "true"
```
- 🚨 **New rules were detected in the following jobs but missing in `setup-test-env`:**
Please ensure the changes are included in the rules for `setup-test-env` to avoid yaml syntax error!
<details><summary>Click to expand details</summary>
<details><summary>Click to expand rules for setup-test-env to confirm if the new conditions are present</summary>
`job1`:
```yaml
- if: $CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE
== "detached"
changes:
- doc/index.md
```
```yaml
- if: $NEW_VAR == "true"
```
</details>
MARKDOWN
)
Here are the rules for `setup-test-env`:
```yaml
- if: $CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE
== "detached"
changes:
- doc/index.md
```
</details>
To avoid CI config errors, please verify if the new rules should be added to `setup-test-env`.
Please add a comment if rules should not be added.
MARKDOWN
end
it_behaves_like 'output message', true
end
end
end
@ -264,7 +341,9 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
))
end
it_behaves_like 'empty message', :default_stdout_output, 1, 7
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'output message'
end
context 'when dependent job has a rule that is not a hash' do
@ -279,7 +358,9 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
)
end
it_behaves_like 'empty message', :default_stdout_output, 1, 5
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'output message'
end
context 'when dependent job have an added rule but condition reads "when: never"' do
@ -295,22 +376,12 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
)
end
it_behaves_like 'empty message', <<~OUTPUT
Initializing 1 jobs from master ci config...
Initializing 5 jobs from feature_branch ci config...
Looking for misconfigured dependent jobs for setup-test-env...
Detected 0 jobs with applicable rule changes.
Detected 0 dependent jobs with misconfigured rules.
Looking for misconfigured dependent jobs for compile-test-assets...
Detected 0 dependent jobs with misconfigured rules.
Looking for misconfigured dependent jobs for retrieve-tests-metadata...
Detected 0 dependent jobs with misconfigured rules.
Looking for misconfigured dependent jobs for build-gdk-image...
Detected 0 dependent jobs with misconfigured rules.
OUTPUT
let(:expected_message) { described_class::VALIDATION_PASSED_OUTPUT }
it_behaves_like 'output message'
end
context 'when dependent job have modified rules, but its attributes has nested arrays' do
context 'when dependent job have modified rules, but its attributes have nested arrays' do
let(:source_branch_merged_yaml) do
YAML.dump(
source_branch_jobs_base.merge({
@ -322,37 +393,56 @@ RSpec.describe Tooling::Danger::CiJobsDependencyValidation, feature_category: :t
)
end
it 'correctly parses input yaml and returns warning' do
expected_markdown = %w[setup-test-env compile-test-assets retrieve-tests-metadata].map do |job_name|
let(:message_preview) do
<<~MARKDOWN
### CI Jobs Dependency Validation
| name | validation status |
| ------ | --------------- |
| `setup-test-env` | 🚨 Failed (1) |
| `compile-test-assets` | 🚨 Failed (1) |
| `retrieve-tests-metadata` | 🚨 Failed (1) |
| `build-gdk-image` | :white_check_mark: Passed |
| `build-assets-image` | :white_check_mark: Passed |
| `build-qa-image` | :white_check_mark: Passed |
MARKDOWN
end
let(:expected_message) do
%w[setup-test-env compile-test-assets retrieve-tests-metadata].map do |job_name|
<<~MARKDOWN
**This MR adds new rules to the following dependent jobs for `#{job_name}`:**
- 🚨 **New rules were detected in the following jobs but missing in `#{job_name}`:**
`job1`:
<details><summary>Click to expand details</summary>
```yaml
- if: 'true'
when: always
- if: $NEW_VAR == "true"
```
`job1`:
Please ensure the changes are included in the rules for `#{job_name}` to avoid yaml syntax error!
```yaml
- if: 'true'
when: always
- if: $NEW_VAR == "true"
```
<details><summary>Click to expand rules for #{job_name} to confirm if the new conditions are present</summary>
Here are the rules for `#{job_name}`:
```yaml
- if: $CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE
== "detached"
changes:
- doc/index.md
```
```yaml
- if: $CI_MERGE_REQUEST_EVENT_TYPE == "merged_result" || $CI_MERGE_REQUEST_EVENT_TYPE
== "detached"
changes:
- doc/index.md
```
</details>
</details>
To avoid CI config errors, please verify if the new rules should be added to `#{job_name}`.
Please add a comment if rules should not be added.
MARKDOWN
end.join('').chomp
expect(ci_jobs_dependency_validation.output_message).to eq(expected_markdown)
end.join('').prepend(message_preview).chomp
end
it_behaves_like 'output message', true
end
end
end

View File

@ -1,70 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'devise/shared/_signup_box_form', feature_category: :acquisition do
let(:button_text) { '_button_text_' }
before do
stub_devise
allow(view).to receive(:arkose_labs_enabled?).and_return(false)
allow(view).to receive(:show_omniauth_providers).and_return(false)
allow(view).to receive(:url).and_return('_url_')
allow(view).to receive(:button_text).and_return(button_text)
allow(view).to receive(:signup_username_data_attributes).and_return({})
allow(view).to receive(:tracking_label).and_return('')
stub_template 'devise/shared/_error_messages.html.haml' => ''
end
context 'when signup_intent_step_one experiment is control' do
before do
stub_experiments(signup_intent_step_one: :control)
end
it 'does not render signup_intent select' do
render
expect(rendered).not_to have_css('label[for="signup_intent"]')
expect(rendered).not_to have_css('select[id="signup_intent"]')
end
end
context 'when signup_intent_step_one experiment is candidate' do
before do
stub_experiments(signup_intent_step_one: :candidate)
end
it 'renders signup_intent select' do
render
expect(rendered).to include(s_('SignUp|I want to...'))
expect(rendered).to include(s_('SignUp|Set up a new team'))
expect(rendered).to include(s_('SignUp|Set up a new personal account'))
expect(rendered).to include(s_('SignUp|Join an existing team'))
expect(rendered).to include(s_('SignUp|Contribute to a public project on GitLab'))
expect(rendered).to have_css('select[name="signup_intent"]')
expect(rendered).to have_css(
'option[value="select_signup_intent_dropdown_new_team_registration_step_one"]'
)
expect(rendered).to have_css(
'option[value="select_signup_intent_dropdown_new_personal_account_registration_step_one"]'
)
expect(rendered).to have_css(
'option[value="select_signup_intent_dropdown_join_existing_team_registration_step_one"]'
)
expect(rendered).to have_css(
'option[value="select_signup_intent_dropdown_contribute_public_project_registration_step_one"]'
)
end
end
def stub_devise
allow(view).to receive(:resource).and_return(spy)
allow(view).to receive(:resource_name).and_return(:user)
end
end

View File

@ -3,9 +3,19 @@
module Tooling
module Danger
module CiJobsDependencyValidation
VALIDATED_JOB_NAMES = %w[setup-test-env compile-test-assets retrieve-tests-metadata build-gdk-image].freeze
VALIDATED_JOB_NAMES = %w[
setup-test-env
compile-test-assets
retrieve-tests-metadata
build-gdk-image
build-assets-image
build-qa-image
].freeze
GLOBAL_KEYWORDS = %w[workflow variables stages default].freeze
DEFAULT_BRANCH_NAME = 'master'
FAILED_VALIDATION_WARNING = 'Please review warnings in the *CI Jobs Dependency Validation* section below.'
SKIPPED_VALIDATION_WARNING = 'Job dependency validation is skipped due to error fetching merged CI yaml'
VALIDATION_PASSED_OUTPUT = ':white_check_mark: No warnings found in ci job dependencies.'
Job = Struct.new(:name, :rules, :needs, keyword_init: true) do
def self.parse_rules_from_yaml(name, jobs_yaml)
@ -28,7 +38,6 @@ module Tooling
end
def self.ignore?(job_name)
# hidden jobs are extended by other jobs thus their rules will be verified in the extending jobs
GLOBAL_KEYWORDS.include?(job_name) || job_name.start_with?('.')
end
@ -42,9 +51,27 @@ module Tooling
def output_message
return '' if !helper.ci? || !helper.has_ci_changes? || target_branch_jobs.empty? || source_branch_jobs.empty?
VALIDATED_JOB_NAMES.filter_map do |needed_job_name|
construct_warning_message(needed_job_name)
end.join("\n")
validation_statuses = VALIDATED_JOB_NAMES.to_h do |job_name|
[job_name, { skipped: false, failed: 0 }]
end
output = VALIDATED_JOB_NAMES.filter_map do |needed_job_name|
validate(needed_job_name, validation_statuses)
end.join("\n").chomp
return VALIDATION_PASSED_OUTPUT if output == ''
warn FAILED_VALIDATION_WARNING
<<~MARKDOWN
### CI Jobs Dependency Validation
| name | validation status |
| ------ | --------------- |
#{construct_summary_table(validation_statuses)}
#{output}
MARKDOWN
end
private
@ -70,7 +97,7 @@ module Tooling
YAML.load(api_response['merged_yaml'], aliases: true)
rescue StandardError => e
puts e.message
warn "#{SKIPPED_VALIDATION_WARNING}: #{e.message}"
{}
end
@ -99,43 +126,70 @@ module Tooling
ref == DEFAULT_BRANCH_NAME ? {} : ref_query_params
end
def construct_warning_message(needed_job_name)
def validate(needed_job_name, validation_statuses)
needed_job_in_source_branch = source_branch_jobs.find { |job| job.name == needed_job_name }
needed_job_in_target_branch = target_branch_jobs.find { |job| job.name == needed_job_name }
if needed_job_in_source_branch.nil?
return "Unable to find job #{needed_job_name} in #{source_branch}. Skipping."
validation_statuses[needed_job_name][:skipped] = true
return <<~MARKDOWN
- :warning: Unable to find job `#{needed_job_name}` in branch `#{source_branch}`.
If this job has been removed, please delete it from `Tooling::Danger::CiJobsDependencyValidation::VALIDATED_JOB_NAMES`.
Validation skipped.
MARKDOWN
end
puts "Looking for misconfigured dependent jobs for #{needed_job_name}..."
warnings = changed_jobs_warnings(
failures = validation_failures(
needed_job_in_source_branch: needed_job_in_source_branch,
needed_job_in_target_branch: needed_job_in_target_branch
)
puts "Detected #{warnings.count} dependent jobs with misconfigured rules."
failed_count = failures.count
return if warnings.empty?
return if failed_count == 0
validation_statuses[needed_job_name][:failed] = failed_count
<<~MSG
**This MR adds new rules to the following dependent jobs for `#{needed_job_name}`:**
- 🚨 **New rules were detected in the following jobs but missing in `#{needed_job_name}`:**
#{warnings.join("\n")}
<details><summary>Click to expand details</summary>
Please ensure the changes are included in the rules for `#{needed_job_name}` to avoid yaml syntax error!
#{failures.join("\n")}
<details><summary>Click to expand rules for #{needed_job_name} to confirm if the new conditions are present</summary>
Here are the rules for `#{needed_job_name}`:
```yaml
#{dump_yaml(needed_job_in_source_branch.rules)}
```
</details>
To avoid CI config errors, please verify if the new rules should be added to `#{needed_job_name}`.
Please add a comment if rules should not be added.
MSG
end
def changed_jobs_warnings(needed_job_in_source_branch:, needed_job_in_target_branch:)
def construct_summary_table(validation_statuses)
validation_statuses.map do |job_name, statuses|
skipped, failed_count = statuses.values_at(:skipped, :failed)
summary = if skipped
":warning: Skipped"
elsif failed_count == 0
":white_check_mark: Passed"
else
"🚨 Failed (#{failed_count})"
end
<<~MARKDOWN.chomp
| `#{job_name}` | #{summary} |
MARKDOWN
end.join("\n")
end
def validation_failures(needed_job_in_source_branch:, needed_job_in_target_branch:)
dependent_jobs_new = needed_job_in_source_branch&.dependent_jobs(source_branch_jobs) || []
dependent_jobs_old = needed_job_in_target_branch&.dependent_jobs(target_branch_jobs) || []
@ -150,8 +204,6 @@ module Tooling
dependent_job_with_rule_change.rules - dependent_job_old.rules
end
puts "Detected #{report_candidates.count} jobs with applicable rule changes."
rules_to_report = exact_rules_missing_in_needed_job(
needed_job: needed_job_in_source_branch,
rules: report_candidates