Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-08-22 15:09:06 +00:00
parent 4f94a2050c
commit 58e008dcc5
88 changed files with 1365 additions and 238 deletions

View File

@ -229,6 +229,22 @@ e2e:test-on-gdk:
SKIP_MESSAGE: Skipping test-on-gdk due to mr containing only quarantine changes!
GDK_IMAGE: "${CI_REGISTRY_IMAGE}/gitlab-qa-gdk:${CI_COMMIT_SHA}"
e2e:test-product-analytics:
extends:
- .qa:rules:e2e:test-on-gdk
stage: qa
allow_failure: true
variables:
PIPELINE_NAME: E2E Product Analytics
GDK_IMAGE: "${CI_REGISTRY_IMAGE}/gitlab-qa-gdk:${CI_COMMIT_SHA}"
GITLAB_QA_IMAGE: "${CI_REGISTRY_IMAGE}/gitlab-ee-qa:${CI_COMMIT_SHA}"
needs:
- build-gdk-image
- build-qa-image
trigger:
project: gitlab-org/analytics-section/product-analytics/devkit
strategy: depend
e2e:test-on-cng:
extends:
- .e2e-trigger-base

View File

@ -921,13 +921,17 @@
- if: '$ENABLE_BUILD_QA_IMAGE == "true"'
- <<: *if-merge-request-labels-run-all-e2e
- <<: *if-merge-request-labels-run-review-app
- <<: *if-merge-request
changes: *ruby-version-patterns
- <<: *if-merge-request
changes: *qa-patterns
- <<: *if-merge-request
changes: *gitaly-patterns
- <<: *if-merge-request-targeting-stable-branch
changes: *setup-test-env-patterns
- <<: *if-merge-request
changes: *ci-build-images-patterns
- !reference [".prevent-tier-2-and-below", rules]
- !reference [".prevent-tier-1", rules]
- <<: *if-merge-request
changes: *code-qa-patterns
# Rules to support .qa:rules:package-and-test-ee
@ -939,7 +943,12 @@
changes: *nodejs-patterns
- <<: *if-merge-request
changes: *ci-qa-patterns
- <<: *if-merge-request
changes: *gdk-component-patterns
# These rules have to include rules from ".qa:rules:e2e:test-on-gdk" which are also used
# for "e2e:test-product-analytics" job in "qa.gitkab-ci.yml".
# "e2e:test-product-analytics" job requires QA image to be built.
.build-images:rules:build-qa-image:
rules:
- <<: *if-not-canonical-namespace
@ -1785,7 +1794,6 @@
# - <<: *if-merge-request
# changes: *cng-orchestrator-patterns
.qa:rules:package-and-test-nightly:
rules:
- !reference [".qa:rules:package-and-test-never-run", rules]

View File

@ -1378,7 +1378,6 @@ Gitlab/BoundedContexts:
- 'app/policies/project_policy.rb'
- 'app/policies/project_snippet_policy.rb'
- 'app/policies/project_statistics_policy.rb'
- 'app/policies/prometheus_alert_policy.rb'
- 'app/policies/protected_branch_access_policy.rb'
- 'app/policies/protected_branch_policy.rb'
- 'app/policies/release_policy.rb'

View File

@ -385,7 +385,6 @@ Gitlab/NamespacedClass:
- 'app/policies/project_policy.rb'
- 'app/policies/project_snippet_policy.rb'
- 'app/policies/project_statistics_policy.rb'
- 'app/policies/prometheus_alert_policy.rb'
- 'app/policies/protected_branch_access_policy.rb'
- 'app/policies/protected_branch_policy.rb'
- 'app/policies/release_policy.rb'

View File

@ -11,13 +11,13 @@ export default {
FiltersTemplate,
},
computed: {
...mapGetters(['showArchived']),
...mapGetters(['hasProjectContext']),
},
};
</script>
<template>
<filters-template>
<archived-filter v-if="showArchived" class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -21,7 +21,7 @@ export default {
},
mixins: [glFeatureFlagsMixin()],
computed: {
...mapGetters(['showArchived']),
...mapGetters(['hasProjectContext']),
...mapState(['searchType']),
showLabelFilter() {
return this.searchType === SEARCH_TYPE_ADVANCED;
@ -35,6 +35,6 @@ export default {
<status-filter class="gl-mb-5" />
<confidentiality-filter class="gl-mb-5" />
<label-filter v-if="showLabelFilter" class="gl-mb-5" />
<archived-filter v-if="showArchived" class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -13,7 +13,7 @@ export default {
ArchivedFilter,
},
computed: {
...mapGetters(['showArchived']),
...mapGetters(['hasProjectContext']),
},
};
</script>
@ -21,6 +21,6 @@ export default {
<template>
<filters-template>
<status-filter class="gl-mb-5" />
<archived-filter v-if="showArchived" class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -11,13 +11,13 @@ export default {
FiltersTemplate,
},
computed: {
...mapGetters(['showArchived']),
...mapGetters(['hasProjectContext']),
},
};
</script>
<template>
<filters-template>
<archived-filter v-if="showArchived" class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -11,13 +11,13 @@ export default {
FiltersTemplate,
},
computed: {
...mapGetters(['showArchived']),
...mapGetters(['hasProjectContext']),
},
};
</script>
<template>
<filters-template>
<archived-filter v-if="showArchived" class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -11,13 +11,13 @@ export default {
FiltersTemplate,
},
computed: {
...mapGetters(['showArchived']),
...mapGetters(['hasProjectContext']),
},
};
</script>
<template>
<filters-template>
<archived-filter v-if="showArchived" class="gl-mb-5" />
<archived-filter v-if="hasProjectContext" class="gl-mb-5" />
</filters-template>
</template>

View File

@ -30,6 +30,10 @@ export default {
UpdateEmail,
},
props: {
username: {
type: String,
required: true,
},
obfuscatedEmail: {
type: String,
required: true,
@ -168,6 +172,7 @@ export default {
<gl-form v-else @submit.prevent="verify">
<section class="gl-mb-5">
<gl-sprintf :message="$options.i18n.explanation">
<template #username>{{ username }}</template>
<template #email>
<strong>{{ email }}</strong>
</template>

View File

@ -1,7 +1,7 @@
import { s__, __ } from '~/locale';
export const I18N_EXPLANATION = s__(
"IdentityVerification|For added security, you'll need to verify your identity. We've sent a verification code to %{email}",
"IdentityVerification|You are signed in as %{username}. For added security, you'll need to verify your identity. We've sent a verification code to %{email}",
);
export const I18N_INPUT_LABEL = s__('IdentityVerification|Verification code');
export const I18N_EMAIL_EMPTY_CODE = s__('IdentityVerification|Enter a code.');

View File

@ -9,7 +9,8 @@ export default () => {
return null;
}
const { obfuscatedEmail, verifyPath, resendPath, offerEmailReset, updateEmailPath } = el.dataset;
const { username, obfuscatedEmail, verifyPath, resendPath, offerEmailReset, updateEmailPath } =
el.dataset;
return new Vue({
el,
@ -17,6 +18,7 @@ export default () => {
render(createElement) {
return createElement(EmailVerification, {
props: {
username,
obfuscatedEmail,
verifyPath,
resendPath,

View File

@ -6,20 +6,25 @@ import FrequentItems from './frequent_items.vue';
export default {
name: 'FrequentlyVisitedGroups',
components: {
FrequentItems,
},
inject: ['groupsPath'],
apollo: {
frecentGroups: {
query: currentUserFrecentGroupsQuery,
},
},
components: {
FrequentItems,
},
inject: ['groupsPath'],
i18n: {
groupName: s__('Navigation|Frequently visited groups'),
viewAllText: s__('Navigation|View all my groups'),
emptyStateText: s__('Navigation|Groups you visit often will appear here.'),
},
computed: {
items() {
return this.frecentGroups || [];
},
},
FREQUENTLY_VISITED_GROUPS_HANDLE,
};
</script>
@ -29,7 +34,7 @@ export default {
:loading="$apollo.queries.frecentGroups.loading"
:empty-state-text="$options.i18n.emptyStateText"
:group-name="$options.i18n.groupName"
:items="frecentGroups"
:items="items"
view-all-items-icon="group"
:view-all-items-text="$options.i18n.viewAllText"
:view-all-items-path="groupsPath"

View File

@ -6,20 +6,25 @@ import FrequentItems from './frequent_items.vue';
export default {
name: 'FrequentlyVisitedProjects',
components: {
FrequentItems,
},
inject: ['projectsPath'],
apollo: {
frecentProjects: {
query: currentUserFrecentProjectsQuery,
},
},
components: {
FrequentItems,
},
inject: ['projectsPath'],
i18n: {
groupName: s__('Navigation|Frequently visited projects'),
viewAllText: s__('Navigation|View all my projects'),
emptyStateText: s__('Navigation|Projects you visit often will appear here.'),
},
computed: {
items() {
return this.frecentProjects || [];
},
},
FREQUENTLY_VISITED_PROJECTS_HANDLE,
};
</script>
@ -29,7 +34,7 @@ export default {
:loading="$apollo.queries.frecentProjects.loading"
:empty-state-text="$options.i18n.emptyStateText"
:group-name="$options.i18n.groupName"
:items="frecentProjects"
:items="items"
view-all-items-icon="project"
:view-all-items-text="$options.i18n.viewAllText"
:view-all-items-path="projectsPath"

View File

@ -5,7 +5,6 @@ module Projects
class OperationsController < Projects::ApplicationController
layout 'project_settings'
before_action :authorize_admin_operations!
before_action :authorize_read_prometheus_alerts!, only: [:reset_alerting_token]
before_action do
push_frontend_feature_flag(:integrated_error_tracking, project)

View File

@ -0,0 +1,32 @@
# frozen_string_literal: true
module Resolvers
module Analytics
module CycleAnalytics
module ValueStreams
class StageItemsResolver < BaseResolver
type ::Types::Analytics::CycleAnalytics::ValueStreams::StageItemsType.connection_type, null: true
argument :sort,
Types::Analytics::CycleAnalytics::ValueStreams::StageItemsSortEnum,
description: 'Sort stage items by criteria.',
required: false
# Limit page size to the same value of RecordsFetcher to avoid slow queries
max_page_size Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher::MAX_RECORDS
def resolve(sort: {})
stage = object.stage
params = object.params.merge(sort)
# Create a new data collector with additional sort parameters
data_collector =
Gitlab::Analytics::CycleAnalytics::DataCollector.new(stage: stage, params: params)
offset_pagination(data_collector.records_for_graphql)
end
end
end
end
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
module Types
module Analytics
module CycleAnalytics
module ValueStreams
class StageItemsSortEnum < BaseEnum
graphql_name 'ValueStreamStageItemSort'
description 'Sorting values available to value stream stage items'
value 'DURATION_ASC', 'Duration by ascending order.', value: { sort: :duration, direction: :asc }
value 'DURATION_DESC', 'Duration by ascending order.', value: { sort: :duration, direction: :desc }
value 'END_EVENT_ASC', 'Stage end event time by ascending order.',
value: { sort: :end_event, direction: :asc }
value 'END_EVENT_DESC', 'Stage end event time by descending order.',
value: { sort: :end_event, direction: :desc }
end
end
end
end
end

View File

@ -0,0 +1,46 @@
# frozen_string_literal: true
module Types
module Analytics
module CycleAnalytics
module ValueStreams
# rubocop: disable Graphql/AuthorizeTypes -- # Already authorized in parent.
class StageItemsType < BaseObject
graphql_name 'ValueStreamStageItems'
include EntityDateHelper
field :end_event_timestamp,
Types::TimeType,
null: true,
description: 'When exited the stage.'
field :duration,
GraphQL::Types::String,
null: true,
description: 'Duration of the item on the stage.'
field :record,
::Types::IssuableType,
null: true,
description: 'Item record.'
def duration
return unless object.total_time.present?
duration_array = distance_of_time_as_hash(object.total_time.to_f).first
duration_array.reverse.join(' ')
end
def record
object
end
end
# rubocop: enable Graphql/AuthorizeTypes
end
end
end
end
Types::Analytics::CycleAnalytics::ValueStreams::StageItemsType.prepend_mod

View File

@ -22,6 +22,11 @@ module Types
::Types::Analytics::CycleAnalytics::MetricType,
description: 'Median duration in seconds.'
field :items,
description: 'Items in the stage.',
resolver: Resolvers::Analytics::CycleAnalytics::ValueStreams::StageItemsResolver,
alpha: { milestone: '17.4' }
def count
{
value: object.count,

View File

@ -1,11 +1,13 @@
# frozen_string_literal: true
# All references to this type are deprecated and always return nil,
# so this type should no longer be initialized
module Types
class PrometheusAlertType < BaseObject
graphql_name 'PrometheusAlert'
description 'The alert condition for Prometheus'
authorize :read_prometheus_alerts
authorize :admin_operations
field :id, GraphQL::Types::ID, null: false,
description: 'ID of the alert condition.'

View File

@ -27,6 +27,7 @@ module SessionsHelper
def verification_data(user)
{
username: user.username,
obfuscated_email: obfuscated_email(verification_email(user)),
verify_path: session_path(:user),
resend_path: users_resend_verification_code_path,

View File

@ -10,6 +10,7 @@ module Analytics
alias_attribute :state, :state_id
enum state: Issue.available_states, _suffix: true
belongs_to :issuable, class_name: 'Issue', foreign_key: 'issue_id', inverse_of: :issue_stage_events
scope :assigned_to, ->(user) do
assignees_class = IssueAssignee

View File

@ -11,6 +11,8 @@ module Analytics
alias_attribute :state, :state_id
enum state: MergeRequest.available_states, _suffix: true
belongs_to :issuable, class_name: 'MergeRequest', foreign_key: 'merge_request_id', inverse_of: :merge_request_stage_events
scope :assigned_to, ->(user) do
assignees_class = MergeRequestAssignee
condition = assignees_class.where(user_id: user).where(arel_table[:merge_request_id].eq(assignees_class.arel_table[:merge_request_id]))

View File

@ -496,11 +496,6 @@ class ProjectPolicy < BasePolicy
enable :read_deployment
end
rule { ~anonymous & can?(:metrics_dashboard) }.policy do
enable :create_metrics_user_starred_dashboard
enable :read_metrics_user_starred_dashboard
end
rule { packages_disabled }.policy do
prevent(*create_read_update_admin_destroy(:package))
end
@ -600,7 +595,6 @@ class ProjectPolicy < BasePolicy
enable :read_deploy_token
enable :create_deploy_token
enable :destroy_deploy_token
enable :read_prometheus_alerts
enable :admin_terraform_state
enable :create_freeze_period
enable :read_freeze_period

View File

@ -1,5 +0,0 @@
# frozen_string_literal: true
class PrometheusAlertPolicy < ::BasePolicy
delegate { @subject.project }
end

View File

@ -13,7 +13,7 @@ module Packages
end
def body
<<-HTML
<<-HTML.lstrip
<!DOCTYPE html>
<html>
<head>

View File

@ -19,7 +19,7 @@ module Projects
end
def alerting_setting_params
return {} unless can?(current_user, :read_prometheus_alerts, project)
return {} unless can?(current_user, :admin_operations, project)
attr = params[:alerting_setting_attributes]
return {} unless attr

View File

@ -0,0 +1,62 @@
# frozen_string_literal: true
module WorkItems
class BulkUpdateService
def initialize(parent:, current_user:, work_item_ids:, widget_params: {})
@parent = parent
@work_item_ids = work_item_ids
@current_user = current_user
@widget_params = widget_params.dup
end
def execute
unless @current_user.can?(:"read_#{@parent.to_ability_name}", @parent)
return ServiceResponse.error(message: "User can't read parent", reason: :authorization)
end
updated_work_items = scoped_work_items.find_each(batch_size: 100) # rubocop:disable CodeReuse/ActiveRecord -- Implementation would be identical in model
.filter_map do |work_item|
next unless @current_user.can?(:update_work_item, work_item)
update_result = WorkItems::UpdateService.new(
container: work_item.resource_parent,
widget_params: @widget_params,
current_user: @current_user
).execute(work_item)
work_item if update_result[:status] == :success
end
ServiceResponse.success(payload: { updated_work_item_count: updated_work_items.count })
end
private
def scoped_work_items
ids = WorkItem.id_in(@work_item_ids)
cte = Gitlab::SQL::CTE.new(:work_item_ids_cte, ids)
work_item_scope = WorkItem.all
cte.apply_to(work_item_scope).in_namespaces_with_cte(namespaces)
end
def namespaces
relations = [group_namespaces, project_namespaces].compact
Namespace.from_union(relations, remove_duplicates: false)
end
def group_namespaces
return unless @parent.is_a?(Group)
@parent.self_and_descendants.select(:id)
end
def project_namespaces
if @parent.is_a?(Project)
Project.id_in(@parent)
else
Project.in_namespace(@parent.self_and_descendant_ids)
end.select('projects.project_namespace_id as id')
end
end
end

View File

@ -54,7 +54,7 @@ class RemoveCrmContactsWidgetFromWorkItemTypes < Gitlab::Database::Migration[2.2
WidgetDefinition.upsert_all(
widgets,
unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
on_duplicate: :skip
)
end
end

View File

@ -0,0 +1,21 @@
# frozen_string_literal: true
class RemoveNamespaceIdForeignKeyFromVulnerabilityReads < Gitlab::Database::Migration[2.2]
milestone '17.4'
disable_ddl_transaction!
OLD_CONSTRAINT_NAME = 'fk_4f593f6c62'
def up
with_lock_retries do
remove_foreign_key_if_exists(:vulnerability_reads, column: :namespace_id, on_delete: :cascade,
name: OLD_CONSTRAINT_NAME)
end
end
def down
add_concurrent_foreign_key(:vulnerability_reads, :namespaces, column: :namespace_id, on_delete: :cascade,
name: OLD_CONSTRAINT_NAME)
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class RemoveIndexVulnerabilityReadsCommonFinderQueryWNamespaceId < Gitlab::Database::Migration[2.2]
milestone '17.4'
disable_ddl_transaction!
TABLE_NAME = :vulnerability_reads
COLUMNS = %i[namespace_id state report_type severity vulnerability_id dismissal_reason]
INDEX_NAME = 'index_vulnerability_reads_common_finder_query_w_namespace_id'
def up
remove_concurrent_index_by_name TABLE_NAME, INDEX_NAME
end
def down
add_concurrent_index TABLE_NAME, COLUMNS, order: { vulnerability_id: :desc }, name: INDEX_NAME
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class RemoveIndexVulnerabilityReadsOnNamespaceTypeSeverityId < Gitlab::Database::Migration[2.2]
milestone '17.4'
disable_ddl_transaction!
TABLE_NAME = :vulnerability_reads
COLUMNS = %i[namespace_id report_type severity vulnerability_id]
INDEX_NAME = 'index_vulnerability_reads_on_namespace_type_severity_id'
def up
remove_concurrent_index_by_name TABLE_NAME, INDEX_NAME
end
def down
add_concurrent_index TABLE_NAME, COLUMNS, name: INDEX_NAME
end
end

View File

@ -0,0 +1,68 @@
# frozen_string_literal: true
class CleanTimeTrackingWidgetDefinitions < Gitlab::Database::Migration[2.2]
milestone '17.4'
restrict_gitlab_migration gitlab_schema: :gitlab_main
WIDGET_NAME = 'Time tracking'
WIDGET_ENUM_VALUE = 21
WORK_ITEM_TYPES = [
"Issue",
"Task",
"Epic",
"Requirement",
"Test Case",
"Ticket",
"Incident"
].freeze
class WorkItemType < MigrationRecord
self.table_name = 'work_item_types'
self.inheritance_column = :_type_disabled
end
def up
# Time tracking widget definition was introduced with the wrong casing initially in the migration
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142329
# so we are recreating them with the correct casing. A new index will be added which ignores casing.
# Casing needs to match in lib/gitlab/database_importers/work_items/base_type_importer.rb
work_item_widget_definitions.where(widget_type: WIDGET_ENUM_VALUE).delete_all
widgets = []
WORK_ITEM_TYPES.each do |type_name|
type = WorkItemType.find_by_name(type_name)
unless type
Gitlab::AppLogger.warn("type #{type_name} is missing, not adding widget")
next
end
widgets << {
work_item_type_id: type.id,
name: WIDGET_NAME,
widget_type: WIDGET_ENUM_VALUE
}
end
return if widgets.empty?
work_item_widget_definitions.upsert_all(
widgets,
unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
)
end
def down
# no-op we don't want to put widget definitions back in the wrong state
end
private
def work_item_widget_definitions
@work_item_widget_definitions ||= define_batchable_model('work_item_widget_definitions')
end
end

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
class AddWorkItemWidgetDefinitionsNameUniqueIndex < Gitlab::Database::Migration[2.2]
INDEX_NAME = 'index_work_item_widget_definitions_on_type_id_and_name'
disable_ddl_transaction!
milestone '17.4'
def up
add_concurrent_index :work_item_widget_definitions,
'work_item_type_id, TRIM(BOTH FROM LOWER(name))',
name: INDEX_NAME,
unique: true
end
def down
remove_concurrent_index_by_name :work_item_widget_definitions, INDEX_NAME
end
end

View File

@ -0,0 +1,31 @@
# frozen_string_literal: true
class DropWorkItemWidgetDefinitionsNamespaceId < Gitlab::Database::Migration[2.2]
UNIQUE_INDEX_NAME = 'index_work_item_widget_definitions_on_namespace_type_and_name'
UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME = 'index_work_item_widget_definitions_on_default_witype_and_name'
disable_ddl_transaction!
milestone '17.4'
def up
remove_column :work_item_widget_definitions, :namespace_id
end
def down
add_column :work_item_widget_definitions, :namespace_id, :bigint
add_concurrent_index :work_item_widget_definitions,
[:namespace_id, :work_item_type_id, :name],
unique: true,
name: UNIQUE_INDEX_NAME
add_concurrent_index :work_item_widget_definitions,
[:work_item_type_id, :name],
where: "namespace_id is NULL",
unique: true,
name: UNIQUE_DEFAULT_NAMESPACE_INDEX_NAME
add_concurrent_foreign_key :work_item_widget_definitions, :namespaces, column: :namespace_id, on_delete: :cascade
end
end

View File

@ -0,0 +1 @@
27cac3ea014b0d79fe7fa2a60844bed0eb7715ddcb217afc508fdd094e419e25

View File

@ -0,0 +1 @@
fda727d529ebf7198caa961fdb3042e281afd26aeab55d726f228323e7a250b6

View File

@ -0,0 +1 @@
6043a1cdd3148e362511f3b13bf9b376ad029999ef9a90345ef82e75c9bc62ee

View File

@ -0,0 +1 @@
f4f3ab17c341a602ed556c0f2489c98699a4e86c59e7b90a17f6b54787a7fe2d

View File

@ -0,0 +1 @@
f38e5f28d0c53d58462490dcc3e262a123f7c33d4b2e6670ed26868fcb3a9bb9

View File

@ -0,0 +1 @@
be9dc06b9f53698ff73350e2f4ef7458088e53ad5f1613ca265fc72a23ae26dc

View File

@ -20314,7 +20314,6 @@ ALTER SEQUENCE work_item_types_id_seq OWNED BY work_item_types.id;
CREATE TABLE work_item_widget_definitions (
id bigint NOT NULL,
namespace_id bigint,
work_item_type_id bigint NOT NULL,
widget_type smallint NOT NULL,
disabled boolean DEFAULT false,
@ -30427,8 +30426,6 @@ CREATE INDEX index_vulnerability_reads_common_attrs_and_detection_for_groups ON
CREATE INDEX index_vulnerability_reads_common_finder_query_2 ON vulnerability_reads USING btree (project_id, state, report_type, severity, vulnerability_id DESC, dismissal_reason);
CREATE INDEX index_vulnerability_reads_common_finder_query_w_namespace_id ON vulnerability_reads USING btree (namespace_id, state, report_type, severity, vulnerability_id DESC, dismissal_reason);
CREATE INDEX index_vulnerability_reads_for_vulnerability_export ON vulnerability_reads USING btree (traversal_ids, vulnerability_id) WHERE (archived = false);
CREATE INDEX index_vulnerability_reads_on_cluster_agent_id ON vulnerability_reads USING btree (cluster_agent_id) WHERE (report_type = 7);
@ -30439,8 +30436,6 @@ CREATE INDEX index_vulnerability_reads_on_location_image_partial ON vulnerabilit
CREATE INDEX index_vulnerability_reads_on_location_image_trigram ON vulnerability_reads USING gin (location_image gin_trgm_ops) WHERE ((report_type = ANY (ARRAY[2, 7])) AND (location_image IS NOT NULL));
CREATE INDEX index_vulnerability_reads_on_namespace_type_severity_id ON vulnerability_reads USING btree (namespace_id, report_type, severity, vulnerability_id);
CREATE INDEX index_vulnerability_reads_on_project_id_and_vulnerability_id ON vulnerability_reads USING btree (project_id, vulnerability_id);
CREATE INDEX index_vulnerability_reads_on_scanner_id ON vulnerability_reads USING btree (scanner_id);
@ -30535,9 +30530,7 @@ CREATE INDEX index_work_item_types_on_base_type_and_id ON work_item_types USING
CREATE UNIQUE INDEX index_work_item_types_on_name_unique ON work_item_types USING btree (TRIM(BOTH FROM lower(name)));
CREATE UNIQUE INDEX index_work_item_widget_definitions_on_default_witype_and_name ON work_item_widget_definitions USING btree (work_item_type_id, name) WHERE (namespace_id IS NULL);
CREATE UNIQUE INDEX index_work_item_widget_definitions_on_namespace_type_and_name ON work_item_widget_definitions USING btree (namespace_id, work_item_type_id, name);
CREATE UNIQUE INDEX index_work_item_widget_definitions_on_type_id_and_name ON work_item_widget_definitions USING btree (work_item_type_id, TRIM(BOTH FROM lower(name)));
CREATE INDEX index_work_item_widget_definitions_on_work_item_type_id ON work_item_widget_definitions USING btree (work_item_type_id);
@ -33112,9 +33105,6 @@ ALTER TABLE ONLY ml_model_versions
ALTER TABLE ONLY user_achievements
ADD CONSTRAINT fk_4efde02858 FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY vulnerability_reads
ADD CONSTRAINT fk_4f593f6c62 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY approval_group_rules_protected_branches
ADD CONSTRAINT fk_4f85f13b20 FOREIGN KEY (approval_group_rule_id) REFERENCES approval_group_rules(id) ON DELETE CASCADE;
@ -34072,9 +34062,6 @@ ALTER TABLE ONLY workspaces
ALTER TABLE ONLY merge_requests_compliance_violations
ADD CONSTRAINT fk_ec881c1c6f FOREIGN KEY (violating_user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY work_item_widget_definitions
ADD CONSTRAINT fk_ecf57512f7 FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY zoekt_indices
ADD CONSTRAINT fk_ef0e75ac42 FOREIGN KEY (zoekt_replica_id) REFERENCES zoekt_replicas(id) ON DELETE CASCADE;

View File

@ -16238,6 +16238,29 @@ The edge type for [`ValueStream`](#valuestream).
| <a id="valuestreamedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="valuestreamedgenode"></a>`node` | [`ValueStream`](#valuestream) | The item at the end of the edge. |
#### `ValueStreamStageItemsConnection`
The connection type for [`ValueStreamStageItems`](#valuestreamstageitems).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="valuestreamstageitemsconnectionedges"></a>`edges` | [`[ValueStreamStageItemsEdge]`](#valuestreamstageitemsedge) | A list of edges. |
| <a id="valuestreamstageitemsconnectionnodes"></a>`nodes` | [`[ValueStreamStageItems]`](#valuestreamstageitems) | A list of nodes. |
| <a id="valuestreamstageitemsconnectionpageinfo"></a>`pageInfo` | [`PageInfo!`](#pageinfo) | Information to aid in pagination. |
#### `ValueStreamStageItemsEdge`
The edge type for [`ValueStreamStageItems`](#valuestreamstageitems).
##### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="valuestreamstageitemsedgecursor"></a>`cursor` | [`String!`](#string) | A cursor for use in pagination. |
| <a id="valuestreamstageitemsedgenode"></a>`node` | [`ValueStreamStageItems`](#valuestreamstageitems) | The item at the end of the edge. |
#### `VulnerabilitiesCountByDayConnection`
The connection type for [`VulnerabilitiesCountByDay`](#vulnerabilitiescountbyday).
@ -33352,6 +33375,17 @@ Returns [`ValueStreamStageMetrics!`](#valuestreamstagemetrics).
| <a id="valuestreamstagemetricsmilestonetitle"></a>`milestoneTitle` | [`String`](#string) | Milestone applied to the issue or the merge request. |
| <a id="valuestreamstagemetricstimeframe"></a>`timeframe` | [`Timeframe!`](#timeframe) | Aggregation timeframe. Filters the issue or the merge request creation time for FOSS projects, and the end event timestamp for licensed projects or groups. |
### `ValueStreamStageItems`
#### Fields
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="valuestreamstageitemsduration"></a>`duration` | [`String`](#string) | Duration of the item on the stage. |
| <a id="valuestreamstageitemsdurationinmilliseconds"></a>`durationInMilliseconds` | [`BigInt`](#bigint) | Duration of item on stage in milliseconds. |
| <a id="valuestreamstageitemsendeventtimestamp"></a>`endEventTimestamp` | [`Time`](#time) | When exited the stage. |
| <a id="valuestreamstageitemsrecord"></a>`record` | [`Issuable`](#issuable) | Item record. |
### `ValueStreamStageMetrics`
#### Fields
@ -33362,6 +33396,28 @@ Returns [`ValueStreamStageMetrics!`](#valuestreamstagemetrics).
| <a id="valuestreamstagemetricscount"></a>`count` | [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric) | Limited item count. The backend counts maximum 1000 items, for free projects, and maximum 10,000 items for licensed projects or licensed groups. |
| <a id="valuestreamstagemetricsmedian"></a>`median` | [`ValueStreamAnalyticsMetric`](#valuestreamanalyticsmetric) | Median duration in seconds. |
#### Fields with arguments
##### `ValueStreamStageMetrics.items`
Items in the stage.
DETAILS:
**Introduced** in GitLab 17.4.
**Status**: Experiment.
Returns [`ValueStreamStageItemsConnection`](#valuestreamstageitemsconnection).
This field returns a [connection](#connections). It accepts the
four standard [pagination arguments](#pagination-arguments):
`before: String`, `after: String`, `first: Int`, and `last: Int`.
###### Arguments
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="valuestreamstagemetricsitemssort"></a>`sort` | [`ValueStreamStageItemSort`](#valuestreamstageitemsort) | Sort stage items by criteria. |
### `VulnerabilitiesCountByDay`
Represents the count of vulnerabilities by severity on a particular day. This data is retained for 365 days.
@ -37670,6 +37726,17 @@ Stage event identifiers.
| <a id="valuestreamstageeventmerge_request_reviewer_first_assigned"></a>`MERGE_REQUEST_REVIEWER_FIRST_ASSIGNED` | Merge request reviewer first assigned event. |
| <a id="valuestreamstageeventplan_stage_start"></a>`PLAN_STAGE_START` | Plan stage start event. |
### `ValueStreamStageItemSort`
Sorting values available to value stream stage items.
| Value | Description |
| ----- | ----------- |
| <a id="valuestreamstageitemsortduration_asc"></a>`DURATION_ASC` | Duration by ascending order. |
| <a id="valuestreamstageitemsortduration_desc"></a>`DURATION_DESC` | Duration by ascending order. |
| <a id="valuestreamstageitemsortend_event_asc"></a>`END_EVENT_ASC` | Stage end event time by ascending order. |
| <a id="valuestreamstageitemsortend_event_desc"></a>`END_EVENT_DESC` | Stage end event time by descending order. |
### `VerificationStateEnum`
| Value | Description |

View File

@ -547,6 +547,7 @@ Example response:
## List a group's invited groups
Get a list of invited groups in the given group. When accessed without authentication, only public invited groups are returned.
This endpoint is rate-limited to 60 requests per minute per user (for authenticated users) or IP (for unauthenticated users).
By default, this request returns 20 results at a time because the API results [are paginated](rest/index.md#pagination).

View File

@ -2108,6 +2108,7 @@ Example response:
## List a project's invited groups
Get a list of invited groups in the given project. When accessed without authentication, only public invited groups are returned.
This endpoint is rate-limited to 60 requests per minute per user (for authenticated users) or IP (for unauthenticated users).
By default, this request returns 20 results at a time because the API results [are paginated](rest/index.md#pagination).

View File

@ -45,7 +45,7 @@ When testing interactions with the AI Gateway, you might want to integrate your
with the deployed staging AI Gateway. To do this:
1. You need a cloud staging license that has the Code Suggestions add-on,
because add-ons are enabled on staging. Drop a note in the `#s_fulfillment` or `s_fulfillment_engineering` internal Slack channel to request an add-on to your license. See this [handbook page](https://handbook.gitlab.com/handbook/developer-onboarding/#working-on-gitlab-ee-developer-licenses) for how to request a license for local development.
because add-ons are enabled on staging. Follow [these instructions](#setup-instructions-to-use-gdk-with-the-code-suggestions-add-on) to add the add-on to your license (you can reach out to `#s_fulfillment_engineering` if you have any problems). See this [handbook page](https://handbook.gitlab.com/handbook/developer-onboarding/#working-on-gitlab-ee-developer-licenses) for how to request a license for local development.
1. Set environment variables to point customers-dot to staging, and the AI Gateway to staging:
```shell

View File

@ -1796,6 +1796,48 @@ If you are stubbing an `ee` feature flag, then use:
stub_licensed_features(my_feature_flag: false)
```
#### Asserting browser console errors
By default, feature specs won't fail if a browser console error is found. Sometimes we want to cover that there are not
unexpected console errors which could indicate an integration problem.
To set a feature spec to fail if it encounters browser console errors, use `expect_page_to_have_no_console_errors` from
the `BrowserConsoleHelpers` support module:
```ruby
RSpec.describe 'Pipeline', :js do
after do
expect_page_to_have_no_console_errors
end
# ...
end
```
NOTE:
`expect_page_to_have_no_console_errors` will not work on `WEBDRIVER=firefox`. Logs are only captured when
using the Chrome driver.
Sometimes, there are known console errors that we want to ignore. To ignore a set of messages, such that the test
**will not** fail if the message is observed, you can pass an `allow:` parameter to
`expect_page_to_have_no_console_errors`:
```ruby
RSpec.describe 'Pipeline', :js do
after do
expect_page_to_have_no_console_errors(allow: [
"Blow up!",
/Foo.*happens/
])
end
# ...
end
```
Update the `BROWSER_CONSOLE_ERROR_FILTER` constant in `spec/support/helpers/browser_console_helpers.rb` to change
the list of console errors that should be globally ignored.
### Debugging
You can run your spec with the prefix `WEBDRIVER_HEADLESS=0` to open an actual browser. However, the specs goes though the commands quickly and leaves you no time to look around.

View File

@ -214,12 +214,12 @@ To determine your upgrade path:
- [`15.4.6`](versions/gitlab_15_changes.md#1540).
- [`15.11.13`](versions/gitlab_15_changes.md#15110). The latest GitLab 15.11 release.
- GitLab 16 includes the following required upgrade stops:
- [`16.0.8`](versions/gitlab_16_changes.md#1600). Instances with
- [`16.0.9`](versions/gitlab_16_changes.md#1600). Instances with
[lots of users](versions/gitlab_16_changes.md#long-running-user-type-data-change) or
[large pipeline variables history](versions/gitlab_16_changes.md#1610).
- [`16.1.6`](versions/gitlab_16_changes.md#1610). Instances with NPM packages in their package registry.
- [`16.2.9`](versions/gitlab_16_changes.md#1620). Instances with [large pipeline variables history](versions/gitlab_16_changes.md#1630).
- [`16.3.7`](versions/gitlab_16_changes.md#1630).
- [`16.1.7`](versions/gitlab_16_changes.md#1610). Instances with NPM packages in their package registry.
- [`16.2.10`](versions/gitlab_16_changes.md#1620). Instances with [large pipeline variables history](versions/gitlab_16_changes.md#1630).
- [`16.3.8`](versions/gitlab_16_changes.md#1630).
- [`16.7.z`](versions/gitlab_16_changes.md#1670). The latest GitLab 16.7 release.
- [`16.11.z`](https://gitlab.com/gitlab-org/gitlab/-/releases). The latest GitLab 16.11 release.
- GitLab 17: [`17.y.z`](versions/gitlab_17_changes.md). The latest GitLab 17 release.

View File

@ -83,7 +83,7 @@ DETAILS:
**Offering:** GitLab.com, Self-managed, GitLab Dedicated
- `glab duo ask` helps you discover or recall `git` commands when and where you need them.
- LLM: Vertex AI Codey [`codechat-bison`](https://console.cloud.google.com/vertex-ai/publishers/google/model-garden/codechat-bison)
- LLM: Anthropic [Claude 3 Haiku](https://console.cloud.google.com/vertex-ai/publishers/anthropic/model-garden/claude-3-haiku)
- [View documentation](../../editor_extensions/gitlab_cli/index.md#gitlab-duo-for-the-cli).
### Merge commit message generation

View File

@ -15,7 +15,7 @@ module Gitlab
MAX_COUNT = 10001
delegate :serialized_records, to: :records_fetcher
delegate :serialized_records, :records_for_graphql, to: :records_fetcher
def initialize(stage:, params: {})
@stage = stage

View File

@ -65,6 +65,20 @@ module Gitlab
end
end
# rubocop: disable CodeReuse/ActiveRecord
def records_for_graphql
# Convert duration milliseconds to seconds to be compatible with non-aggregated data format
extra_columns_to_select = ['duration_in_milliseconds / 1000 AS total_time']
preloads_for_issuable = MAPPINGS.fetch(subject_class).fetch(:includes_for_query)
query
.limit(MAX_RECORDS)
.select(stage_event_model.arel_table[Arel.star], extra_columns_to_select)
.preload(issuable: preloads_for_issuable)
end
# rubocop: enable CodeReuse/ActiveRecord
def limited_query
query
.page(page)

View File

@ -14,7 +14,7 @@ module Gitlab
MAX_COUNT = 1001
delegate :serialized_records, to: :records_fetcher
delegate :serialized_records, :records_for_graphql, to: :records_fetcher
def initialize(stage:, params: {})
@stage = stage
@ -57,10 +57,10 @@ module Gitlab
end
end
private
attr_reader :stage, :params
private
def query
query_builder.build
end

View File

@ -42,6 +42,15 @@ module Gitlab
end
end
end
def records_for_graphql
query_with_select = query.select(subject_class.arel_table[Arel.star], *time_columns)
records =
order_by(query_with_select, sort, direction).limit(MAX_RECORDS)
preload_associations(records)
end
# rubocop: enable CodeReuse/ActiveRecord
private

View File

@ -0,0 +1,57 @@
# frozen_string_literal: true
module Gitlab
module Auth
module Atlassian
class TokenRefresher
attr_reader :identity
REFRESH_TOKEN_URL = 'https://auth.atlassian.com/oauth/token'
MIN_TIME_ALLOWED_TILL_EXPIRE = 5.minutes
AtlassianTokenRefreshError = Class.new(StandardError)
def initialize(identity)
@identity = identity
end
def needs_refresh?
identity.expires_at < MIN_TIME_ALLOWED_TILL_EXPIRE.from_now
end
def refresh!
response = Gitlab::HTTP_V2.post(REFRESH_TOKEN_URL, body: payload.to_json, headers: headers)
raise AtlassianTokenRefreshError, response["error"] unless response.success?
identity.update!(
expires_at: Time.zone.now + response["expires_in"].seconds,
refresh_token: response["refresh_token"],
token: response["access_token"]
)
end
def refresh_if_needed!
refresh! if needs_refresh?
end
private
def headers
{ 'Content-Type' => 'application/json' }
end
def payload
{
grant_type: 'refresh_token',
client_id: config.app_id,
client_secret: config.app_secret,
refresh_token: identity.refresh_token
}
end
def config
@config ||= Gitlab::Auth::OAuth::Provider.config_for('atlassian_oauth2')
end
end
end
end
end

View File

@ -217,7 +217,7 @@ module Gitlab
::WorkItems::WidgetDefinition.upsert_all(
widgets,
unique_by: :index_work_item_widget_definitions_on_default_witype_and_name
unique_by: :index_work_item_widget_definitions_on_type_id_and_name
)
end
end

View File

@ -10150,6 +10150,9 @@ msgstr ""
msgid "Buy now"
msgstr ""
msgid "Buy subscription"
msgstr ""
msgid "By default, all projects and groups use the global notifications setting."
msgstr ""
@ -13095,6 +13098,9 @@ msgstr ""
msgid "CodeSuggestions|A user can be assigned a %{title} seat only once each billable month."
msgstr ""
msgid "CodeSuggestions|Before you can buy GitLab Duo seats, you'll need a Premium or Ultimate subscription."
msgstr ""
msgid "CodeSuggestions|Boost productivity across the software development life cycle by using Code Suggestions and GitLab Duo Chat as part of the %{duoLinkStart}GitLab Duo Pro%{duoLinkEnd} add-on. You can now try GitLab Duo Pro for free for %{days} days, no credit card required."
msgstr ""
@ -13125,6 +13131,12 @@ msgstr ""
msgid "CodeSuggestions|Run health check"
msgstr ""
msgid "CodeSuggestions|To buy GitLab Duo seats and regain access, you'll need a Premium or Ultimate subscription."
msgstr ""
msgid "CodeSuggestions|Your GitLab Duo Pro trial has expired"
msgstr ""
msgid "CodeSuggestions|trial"
msgstr ""
@ -27211,9 +27223,6 @@ msgstr ""
msgid "IdentityVerification|For added security, you'll need to verify your identity."
msgstr ""
msgid "IdentityVerification|For added security, you'll need to verify your identity. We've sent a verification code to %{email}"
msgstr ""
msgid "IdentityVerification|GitLab will not charge or store your payment information, it will only be used for verification."
msgstr ""
@ -27352,6 +27361,9 @@ msgstr ""
msgid "IdentityVerification|You are signed in as %{username}. For added security, you'll need to verify your identity in a few quick steps."
msgstr ""
msgid "IdentityVerification|You are signed in as %{username}. For added security, you'll need to verify your identity. We've sent a verification code to %{email}"
msgstr ""
msgid "IdentityVerification|You will receive a text containing a code. Standard charges may apply."
msgstr ""
@ -48237,6 +48249,11 @@ msgstr ""
msgid "SecurityOrchestration|%{scanners}"
msgstr ""
msgid "SecurityOrchestration|%{scanners} of %{severities} severity level"
msgid_plural "SecurityOrchestration|%{scanners} of %{severities} severity levels"
msgstr[0] ""
msgstr[1] ""
msgid "SecurityOrchestration|%{state} and %{statuses}"
msgstr ""
@ -48294,6 +48311,9 @@ msgstr ""
msgid "SecurityOrchestration|All projects linked to this project except:"
msgstr ""
msgid "SecurityOrchestration|All scanners"
msgstr ""
msgid "SecurityOrchestration|All sources"
msgstr ""
@ -48680,6 +48700,9 @@ msgstr ""
msgid "SecurityOrchestration|Requires no approvals if any of the following occur:"
msgstr ""
msgid "SecurityOrchestration|Resolve the following vulnerabilities that are no longer detected on the default branch:"
msgstr ""
msgid "SecurityOrchestration|Resolved"
msgstr ""

View File

@ -175,7 +175,7 @@ RSpec.describe 'Database schema', feature_category: :database do
vulnerability_scanners: %w[external_id],
security_scans: %w[pipeline_id project_id], # foreign key is not added as ci_pipeline table will be moved into different db soon
dependency_list_exports: %w[pipeline_id], # foreign key is not added as ci_pipeline table is in different db
vulnerability_reads: %w[cluster_agent_id],
vulnerability_reads: %w[cluster_agent_id namespace_id], # namespace_id is a denormalization of `project.namespace`
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/87584
# Fixes performance issues with the deletion of web-hooks with many log entries
web_hook_logs: %w[web_hook_id],

View File

@ -4,30 +4,48 @@ require 'spec_helper'
RSpec.describe 'Global search', :js, feature_category: :global_search do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:project) { create(:project, :public, namespace: user.namespace) }
let_it_be(:search_selector) { 'input[type="search"]:focus' }
before do
project.add_maintainer(user)
sign_in(user)
end
describe 'when header search' do
before do
visit dashboard_projects_path
end
after do
expect_page_to_have_no_console_errors
end
shared_examples 'header search' do
it 'renders search button' do
expect(page).to have_button('Search or go to…')
end
it 'opens search modal when shortcut "s" is pressed' do
search_selector = 'input[type="search"]:focus'
expect(page).not_to have_selector(search_selector)
find('body').native.send_key('s')
expect(page).to have_selector(search_selector)
wait_for_requests
end
end
describe 'when signed out' do
before do
visit project_path(project)
end
it_behaves_like 'header search'
end
describe 'when signed in' do
before do
sign_in(user)
visit dashboard_projects_path
end
it_behaves_like 'header search'
end
end

View File

@ -31,7 +31,7 @@ RSpec.describe 'IDE', :js, feature_category: :web_ide do
end
shared_examples "new Web IDE" do
it 'loads new Web IDE', :aggregate_failures, :js_fail_console_error do
it 'loads new Web IDE', :aggregate_failures do
iframe = find(ide_iframe_selector)
page.within_frame(iframe) do
@ -40,6 +40,8 @@ RSpec.describe 'IDE', :js, feature_category: :web_ide do
# Verify that the built-in GitLab Workflow Extension loads
expect(page).to have_css('#GitLab\\.gitlab-workflow\\.gl\\.status\\.code_suggestions')
end
expect_page_to_have_no_console_errors
end
end

View File

@ -75,7 +75,7 @@ describe('Badges store mutations', () => {
store.commit(types.RECEIVE_LOAD_BADGES, badges);
expect(store.state.isLoading).toBe(false);
expect(store.state.badges).toBe(badges);
expect(store.state.badges).toStrictEqual(badges);
});
});
@ -168,7 +168,7 @@ describe('Badges store mutations', () => {
store.commit(types.RECEIVE_RENDERED_BADGE, dummyBadge);
expect(store.state.isRendering).toBe(false);
expect(store.state.renderedBadge).toBe(dummyBadge);
expect(store.state.renderedBadge).toStrictEqual(dummyBadge);
});
});
@ -220,7 +220,7 @@ describe('Badges store mutations', () => {
store.commit(types.RECEIVE_UPDATED_BADGE, newBadge);
expect(store.state.badges.length).toBe(badgeCount);
expect(store.state.badges[badgeIndex]).toBe(newBadge);
expect(store.state.badges[badgeIndex]).toStrictEqual(newBadge);
});
});
@ -389,7 +389,7 @@ describe('Badges store mutations', () => {
store.commit(types.UPDATE_BADGE_IN_FORM, dummyBadge);
expect(store.state.badgeInEditForm).toBe(dummyBadge);
expect(store.state.badgeInEditForm).toStrictEqual(dummyBadge);
});
it('sets badgeInAddForm if isEditing is false', () => {
@ -397,7 +397,7 @@ describe('Badges store mutations', () => {
store.commit(types.UPDATE_BADGE_IN_FORM, dummyBadge);
expect(store.state.badgeInAddForm).toBe(dummyBadge);
expect(store.state.badgeInAddForm).toStrictEqual(dummyBadge);
});
});
@ -412,7 +412,7 @@ describe('Badges store mutations', () => {
it('sets badgeInModal', () => {
store.commit(types.UPDATE_BADGE_IN_MODAL, dummyBadge);
expect(store.state.badgeInModal).toBe(dummyBadge);
expect(store.state.badgeInModal).toStrictEqual(dummyBadge);
});
});
});

View File

@ -93,7 +93,7 @@ describe('Template Selector component', () => {
};
const { template, type } = wrapper.emitted('selected')[0][0];
expect(template).toBe(templates[0]);
expect(template).toStrictEqual(templates[0]);
expect(type).toMatchObject(licenseSelectorType);
});

View File

@ -277,7 +277,7 @@ describe('diffs/components/app', () => {
});
expect(wrapper.findComponent(NoChanges).exists()).toBe(false);
expect(wrapper.findComponent({ name: 'DynamicScroller' }).props('items')).toBe(
expect(wrapper.findComponent({ name: 'DynamicScroller' }).props('items')).toStrictEqual(
store.state.diffs.diffFiles,
);
});
@ -583,7 +583,7 @@ describe('diffs/components/app', () => {
});
expect(wrapper.findComponent({ name: 'DynamicScroller' }).exists()).toBe(true);
expect(wrapper.findComponent({ name: 'DynamicScroller' }).props('items')).toBe(
expect(wrapper.findComponent({ name: 'DynamicScroller' }).props('items')).toStrictEqual(
store.state.diffs.diffFiles,
);
});

View File

@ -407,7 +407,7 @@ describe('IDE store getters', () => {
localState.projects[TEST_PROJECT_ID] = { [projectField]: obj };
expect(callGetter(TEST_PROJECT_ID)).toBe(obj);
expect(callGetter(TEST_PROJECT_ID)).toStrictEqual(obj);
});
});

View File

@ -168,8 +168,8 @@ describe('Code Coverage', () => {
await nextTick();
expect(wrapper.vm.selectedDailyCoverage).not.toBe(originalSelectedData);
expect(wrapper.vm.selectedDailyCoverage).toBe(expectedData);
expect(wrapper.vm.selectedDailyCoverage).not.toStrictEqual(originalSelectedData);
expect(wrapper.vm.selectedDailyCoverage).toStrictEqual(expectedData);
});
});
});

View File

@ -12,7 +12,7 @@ describe('GlobalSearch CommitsFilters', () => {
let wrapper;
const defaultGetters = {
showArchived: () => true,
hasProjectContext: () => true,
};
const createComponent = () => {
@ -41,9 +41,9 @@ describe('GlobalSearch CommitsFilters', () => {
});
});
describe('ShowArchived getter', () => {
describe('hasProjectContext getter', () => {
beforeEach(() => {
defaultGetters.showArchived = () => false;
defaultGetters.hasProjectContext = () => false;
createComponent();
});

View File

@ -17,7 +17,7 @@ describe('GlobalSearch IssuesFilters', () => {
const defaultGetters = {
currentScope: () => 'issues',
showArchived: () => true,
hasProjectContext: () => true,
};
const createComponent = ({ initialState = {} } = {}) => {
@ -83,9 +83,9 @@ describe('GlobalSearch IssuesFilters', () => {
});
});
describe('ShowArchived getter', () => {
describe('hasProjectContext getter', () => {
beforeEach(() => {
defaultGetters.showArchived = () => false;
defaultGetters.hasProjectContext = () => false;
createComponent();
});

View File

@ -15,7 +15,7 @@ describe('GlobalSearch MergeRequestsFilters', () => {
const defaultGetters = {
currentScope: () => 'merge_requests',
showArchived: () => true,
hasProjectContext: () => true,
};
const createComponent = (initialState = {}) => {
@ -64,9 +64,9 @@ describe('GlobalSearch MergeRequestsFilters', () => {
});
});
describe('ShowArchived getter', () => {
describe('hasProjectContext getter', () => {
beforeEach(() => {
defaultGetters.showArchived = () => false;
defaultGetters.hasProjectContext = () => false;
createComponent();
});

View File

@ -12,7 +12,7 @@ describe('GlobalSearch MilestonesFilters', () => {
let wrapper;
const defaultGetters = {
showArchived: () => true,
hasProjectContext: () => true,
};
const findArchivedFilter = () => wrapper.findComponent(ArchivedFilter);
@ -41,9 +41,9 @@ describe('GlobalSearch MilestonesFilters', () => {
});
});
describe('ShowArchived getter', () => {
describe('hasProjectContext getter', () => {
beforeEach(() => {
defaultGetters.showArchived = () => false;
defaultGetters.hasProjectContext = () => false;
createComponent();
});

View File

@ -12,7 +12,7 @@ describe('GlobalSearch ProjectsFilters', () => {
let wrapper;
const defaultGetters = {
showArchived: () => true,
hasProjectContext: () => true,
};
const createComponent = () => {
@ -41,9 +41,9 @@ describe('GlobalSearch ProjectsFilters', () => {
});
});
describe('ShowArchived getter', () => {
describe('hasProjectContext getter', () => {
beforeEach(() => {
defaultGetters.showArchived = () => false;
defaultGetters.hasProjectContext = () => false;
createComponent();
});

View File

@ -12,7 +12,7 @@ describe('GlobalSearch WikiBlobsFilters', () => {
let wrapper;
const defaultGetters = {
showArchived: () => true,
hasProjectContext: () => true,
};
const createComponent = () => {
@ -41,9 +41,9 @@ describe('GlobalSearch WikiBlobsFilters', () => {
});
});
describe('ShowArchived getter', () => {
describe('hasProjectContext getter', () => {
beforeEach(() => {
defaultGetters.showArchived = () => false;
defaultGetters.hasProjectContext = () => false;
createComponent();
});

View File

@ -27,6 +27,7 @@ describe('EmailVerification', () => {
let axiosMock;
const defaultPropsData = {
username: 'al12',
obfuscatedEmail: 'al**@g*****.com',
verifyPath: '/users/sign_in',
resendPath: '/users/resend_verification_code',
@ -63,6 +64,10 @@ describe('EmailVerification', () => {
it('contains the obfuscated email address', () => {
expect(wrapper.text()).toContain(defaultPropsData.obfuscatedEmail);
});
it("contains the user's username", () => {
expect(wrapper.text()).toContain(`You are signed in as ${defaultPropsData.username}`);
});
});
describe('verifying the code', () => {

View File

@ -10,15 +10,13 @@ import { frecentGroupsMock } from '../../../mock_data';
Vue.use(VueApollo);
describe('FrequentlyVisitedGroups', () => {
let wrapper;
const TEST_GROUPS_PATH = '/mock/group/path';
const groupsPath = '/mock/group/path';
const currentUserFrecentGroupsQueryHandler = jest.fn().mockResolvedValue({
data: {
frecentGroups: frecentGroupsMock,
},
});
describe('FrequentlyVisitedGroups', () => {
/** @type {import('@vue/test-utils').Wrapper} */
let wrapper;
/** @type {jest.Mock} */
let currentUserFrecentGroupsQueryHandler;
const createComponent = (options) => {
const mockApollo = createMockApollo([
@ -28,7 +26,7 @@ describe('FrequentlyVisitedGroups', () => {
wrapper = shallowMount(FrequentGroups, {
apolloProvider: mockApollo,
provide: {
groupsPath,
groupsPath: TEST_GROUPS_PATH,
},
...options,
});
@ -40,6 +38,14 @@ describe('FrequentlyVisitedGroups', () => {
...wrapperInstance.vm.$attrs,
});
beforeEach(() => {
currentUserFrecentGroupsQueryHandler = jest.fn().mockResolvedValue({
data: {
frecentGroups: frecentGroupsMock,
},
});
});
it('passes group-specific props', () => {
createComponent();
@ -48,7 +54,7 @@ describe('FrequentlyVisitedGroups', () => {
groupName: 'Frequently visited groups',
viewAllItemsIcon: 'group',
viewAllItemsText: 'View all my groups',
viewAllItemsPath: groupsPath,
viewAllItemsPath: TEST_GROUPS_PATH,
});
});
@ -95,4 +101,22 @@ describe('FrequentlyVisitedGroups', () => {
expect(wrapper.emitted('action')).toStrictEqual([['FREQUENTLY_VISITED_GROUPS_HANDLE']]);
});
});
describe('when query returns null', () => {
beforeEach(async () => {
currentUserFrecentGroupsQueryHandler = jest.fn().mockResolvedValue({
data: {
frecentGroups: null,
},
});
createComponent();
await waitForPromises();
});
it('renders with empty array', () => {
expect(findFrequentItems().props('items')).toEqual([]);
});
});
});

View File

@ -11,15 +11,13 @@ import { frecentProjectsMock } from '../../../mock_data';
Vue.use(VueApollo);
describe('FrequentlyVisitedProjects', () => {
let wrapper;
const TEST_PROJECTS_PATH = '/mock/project/path';
const projectsPath = '/mock/project/path';
const currentUserFrecentProjectsQueryHandler = jest.fn().mockResolvedValue({
data: {
frecentProjects: frecentProjectsMock,
},
});
describe('FrequentlyVisitedProjects', () => {
/** @type {import('@vue/test-utils').Wrapper} */
let wrapper;
/** @type {jest.Mock} */
let currentUserFrecentProjectsQueryHandler;
const createComponent = (options) => {
const mockApollo = createMockApollo([
@ -29,7 +27,7 @@ describe('FrequentlyVisitedProjects', () => {
wrapper = shallowMount(FrequentProjects, {
apolloProvider: mockApollo,
provide: {
projectsPath,
projectsPath: TEST_PROJECTS_PATH,
},
...options,
});
@ -41,6 +39,14 @@ describe('FrequentlyVisitedProjects', () => {
...wrapperInstance.vm.$attrs,
});
beforeEach(() => {
currentUserFrecentProjectsQueryHandler = jest.fn().mockResolvedValue({
data: {
frecentProjects: frecentProjectsMock,
},
});
});
it('passes project-specific props', () => {
createComponent();
@ -49,7 +55,7 @@ describe('FrequentlyVisitedProjects', () => {
groupName: 'Frequently visited projects',
viewAllItemsIcon: 'project',
viewAllItemsText: 'View all my projects',
viewAllItemsPath: projectsPath,
viewAllItemsPath: TEST_PROJECTS_PATH,
});
});
@ -96,4 +102,21 @@ describe('FrequentlyVisitedProjects', () => {
expect(wrapper.emitted('action')).toStrictEqual([[FREQUENTLY_VISITED_PROJECTS_HANDLE]]);
});
});
describe('when query returns null', () => {
beforeEach(async () => {
currentUserFrecentProjectsQueryHandler = jest.fn().mockResolvedValue({
data: {
frecentProjects: null,
},
});
createComponent();
await waitForPromises();
});
it('passes empty array to items', () => {
expect(findFrequentItems().props('items')).toEqual([]);
});
});
});

View File

@ -53,7 +53,7 @@ describe('IssuableForm', () => {
wrapper.vm.handleUpdateSelectedLabels(labels);
expect(wrapper.vm.selectedLabels).toBe(labels);
expect(wrapper.vm.selectedLabels).toStrictEqual(labels);
});
});
});

View File

@ -160,7 +160,7 @@ describe('IssuableItem', () => {
it('returns `issuable.assignees` reference when it is available', () => {
wrapper = createComponent();
expect(wrapper.vm.assignees).toBe(mockIssuable.assignees);
expect(wrapper.vm.assignees).toStrictEqual(mockIssuable.assignees);
});
});

View File

@ -13,5 +13,5 @@ RSpec.describe GitlabSchema.types['PrometheusAlert'] do
expect(described_class).to have_graphql_fields(*expected_fields)
end
specify { expect(described_class).to require_graphql_authorizations(:read_prometheus_alerts) }
specify { expect(described_class).to require_graphql_authorizations(:admin_operations) }
end

View File

@ -69,6 +69,7 @@ RSpec.describe SessionsHelper, feature_category: :system_access do
it 'returns the expected data' do
expect(helper.verification_data(user)).to eq({
username: user.username,
obfuscated_email: obfuscated_email(user.email),
verify_path: helper.session_path(:user),
resend_path: users_resend_verification_code_path,

View File

@ -0,0 +1,106 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Auth::Atlassian::TokenRefresher, feature_category: :integrations do
subject(:token_refresher) { described_class.new(atlassian_identity) }
let(:atlassian_identity) { build(:atlassian_identity) }
let(:refresh_response_headers) { { 'Content-Type' => 'application/json' } }
let(:refresh_response_body) do
{ refresh_token: 'newrefresh', access_token: 'newaccess', expires_in: 3600 }.to_json
end
describe '#needs_refresh?' do
subject(:needs_refresh?) { token_refresher.needs_refresh? }
context 'when the token is expiring in more than 5 minutes' do
before do
atlassian_identity.expires_at = 6.minutes.from_now
end
it { is_expected.to eq(false) }
end
context 'when the token is expiring in less than 5 minutes' do
before do
atlassian_identity.expires_at = 4.minutes.from_now
end
it { is_expected.to eq(true) }
end
context 'when the token has already expired' do
before do
atlassian_identity.expires_at = 1.hour.ago
end
it { is_expected.to eq(true) }
end
end
describe '#refresh!' do
subject(:refresh!) { token_refresher.refresh! }
context 'when the response is good' do
before do
stub_request(:post, described_class::REFRESH_TOKEN_URL)
.to_return(
status: 200,
headers: refresh_response_headers,
body: refresh_response_body
)
end
it 'changes the identity access_token, refresh_token and expires_at' do
expect { refresh! }
.to change { atlassian_identity.refresh_token }.to('newrefresh')
.and change { atlassian_identity.token }.to('newaccess')
.and change { atlassian_identity.expires_at }.to be_within(1.minute).of(3600.seconds.from_now)
end
end
context 'when the response is bad' do
before do
stub_request(:post, described_class::REFRESH_TOKEN_URL)
.to_return(status: 500, headers: refresh_response_headers, body: { error: 'Broken' }.to_json)
end
it 'raises an exception' do
expect { refresh! }.to raise_exception(described_class::AtlassianTokenRefreshError, 'Broken')
end
end
end
describe '#refresh_if_needed!' do
subject(:refresh_if_needed!) { token_refresher.refresh_if_needed! }
before do
stub_request(:post, described_class::REFRESH_TOKEN_URL)
.to_return(
status: 200, headers: refresh_response_headers,
body: refresh_response_body
)
end
context 'when a refresh is needed' do
before do
atlassian_identity.expires_at = 1.minute.from_now
end
it 'refreshes the token' do
expect { refresh_if_needed! }.to change { atlassian_identity.refresh_token }.to('newrefresh')
end
end
context 'when a refresh is not needed' do
before do
atlassian_identity.expires_at = 10.minutes.from_now
end
it 'does not refresh the token' do
expect { refresh_if_needed! }.not_to change { atlassian_identity.refresh_token }
end
end
end
end

View File

@ -44,6 +44,7 @@ issues:
- issuable_severity
- issuable_sla
- issue_assignees
- issue_stage_events
- search_data
- closed_by
- epic_issue
@ -228,6 +229,7 @@ merge_requests:
- merge_head_diff
- merge_request_context_commits
- merge_request_context_commit_diff_files
- merge_request_stage_events
- events
- merge_requests_closing_issues
- cached_closes_issues
@ -823,7 +825,6 @@ project:
- jira_imports
- compliance_framework_settings
- compliance_management_frameworks
- metrics_users_starred_dashboards
- alert_management_alerts
- repository_storage_moves
- freeze_periods

View File

@ -0,0 +1,33 @@
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe CleanTimeTrackingWidgetDefinitions, feature_category: :team_planning, schema: 20240813065105 do
let(:work_item_definitions) { table(:work_item_widget_definitions) }
let(:widget_name) { described_class::WIDGET_NAME }
let(:widget_enum_value) { described_class::WIDGET_ENUM_VALUE }
let(:work_item_types) { described_class::WORK_ITEM_TYPES }
describe '#up', :migration_with_transaction do
it 'fixes all widget definition names if they had the wrong casing' do
work_item_definitions.where(widget_type: widget_enum_value).update_all(name: 'wrong name')
expect do
migrate!
end.to change { work_item_definitions.where(widget_type: widget_enum_value).pluck(:name) }
.from(['wrong name'] * 7).to([widget_name] * 7)
end
it 'logs a warning if the type is missing' do
type_name = work_item_types.first
allow(described_class::WorkItemType).to receive(:find_by_name).and_call_original
allow(described_class::WorkItemType).to receive(:find_by_name)
.with(type_name).and_return(nil)
expect(Gitlab::AppLogger).to receive(:warn).with("type #{type_name} is missing, not adding widget")
migrate!
end
end
end

View File

@ -1115,56 +1115,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
end
end
describe 'read_prometheus_alerts' do
context 'with admin' do
let(:current_user) { admin }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:read_prometheus_alerts) }
end
context 'when admin mode is disabled' do
it { is_expected.to be_disallowed(:read_prometheus_alerts) }
end
end
context 'with owner' do
let(:current_user) { owner }
it { is_expected.to be_allowed(:read_prometheus_alerts) }
end
context 'with maintainer' do
let(:current_user) { maintainer }
it { is_expected.to be_allowed(:read_prometheus_alerts) }
end
context 'with developer' do
let(:current_user) { developer }
it { is_expected.to be_disallowed(:read_prometheus_alerts) }
end
context 'with reporter' do
let(:current_user) { reporter }
it { is_expected.to be_disallowed(:read_prometheus_alerts) }
end
context 'with guest' do
let(:current_user) { guest }
it { is_expected.to be_disallowed(:read_prometheus_alerts) }
end
context 'with anonymous' do
let(:current_user) { anonymous }
it { is_expected.to be_disallowed(:read_prometheus_alerts) }
end
end
describe 'metrics_dashboard feature' do
context 'public project' do
let(:project) { public_project }
@ -1176,8 +1126,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_allowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with guest' do
@ -1204,8 +1152,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_allowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with guest' do
@ -1214,8 +1160,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_disallowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with anonymous' do
@ -1224,8 +1168,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_disallowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_disallowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_disallowed(:create_metrics_user_starred_dashboard) }
end
end
end
@ -1240,8 +1182,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_allowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with guest' do
@ -1270,8 +1210,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_allowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with guest' do
@ -1280,8 +1218,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_disallowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with anonymous' do
@ -1303,8 +1239,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_allowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with guest' do
@ -1329,8 +1263,6 @@ RSpec.describe ProjectPolicy, feature_category: :system_access do
it { is_expected.to be_allowed(:metrics_dashboard) }
it { is_expected.to be_allowed(:read_prometheus) }
it { is_expected.to be_allowed(:read_deployment) }
it { is_expected.to be_allowed(:read_metrics_user_starred_dashboard) }
it { is_expected.to be_allowed(:create_metrics_user_starred_dashboard) }
end
context 'with guest' do

View File

@ -38,6 +38,10 @@ RSpec.describe ::Packages::Pypi::SimpleIndexPresenter, :aggregate_failures, feat
end
end
it 'strips leading whitespace from the output' do
expect(presenter.first).not_to eq(' ')
end
it 'avoids n+1 database queries', :use_sql_query_cache do
control = ActiveRecord::QueryRecorder.new(skip_cached: false) do
described_class.new(project.packages.reload, project_or_group).body

View File

@ -104,6 +104,17 @@ RSpec.describe 'Project.value_streams', feature_category: :value_stream_manageme
it_behaves_like 'a working graphql query'
context 'when querying related stage items' do
let_it_be(:resource) { create(:project) }
let_it_be(:project) { resource }
let(:stage_id_to_paginate) do
Gitlab::GlobalId.as_global_id('test', model_name: Analytics::CycleAnalytics::Stage.to_s).to_s
end
it_behaves_like 'value stream related stage items query', 'project'
end
it 'returns only `default` value stream' do
expect(graphql_data).to eq(expected_value_stream)
end

View File

@ -0,0 +1,145 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe WorkItems::BulkUpdateService, feature_category: :team_planning do
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:user) { create(:user) }
let_it_be(:private_group) { create(:group, :private) }
let_it_be(:parent_group) { create(:group, :private, developers: developer, guests: guest) }
let_it_be(:group) { create(:group, :private, parent: parent_group) }
let_it_be(:project) { create(:project, :private, group: group) }
let_it_be(:label1) { create(:group_label, group: parent_group) }
let_it_be(:label2) { create(:group_label, group: parent_group) }
let_it_be(:label3) { create(:group_label, group: private_group) }
let_it_be_with_reload(:work_item1) { create(:work_item, :group_level, namespace: group, labels: [label1]) }
let_it_be_with_reload(:work_item2) { create(:work_item, project: project, labels: [label1]) }
let_it_be_with_reload(:work_item3) { create(:work_item, :group_level, namespace: parent_group, labels: [label1]) }
let_it_be_with_reload(:work_item4) { create(:work_item, :group_level, namespace: private_group, labels: [label3]) }
let_it_be_with_reload(:work_item5) { create(:work_item, :group_level, namespace: group, labels: [label1]) }
let(:updatable_work_items) { [work_item1, work_item2, work_item3, work_item4] }
let(:updatable_work_item_ids) { updatable_work_items.map(&:id) }
let(:widget_params) do
{
labels_widget: {
add_label_ids: [label2.id],
remove_label_ids: [label1.id, label3.id]
}
}
end
subject(:service_result) do
described_class.new(
parent: parent,
current_user: current_user,
work_item_ids: updatable_work_item_ids,
widget_params: widget_params
).execute
end
context 'when parent is a group' do
let(:parent) { group }
context 'when the user can read the parent' do
let(:current_user) { developer }
it { is_expected.to be_success }
it 'updates all work items scoped to the group hierarchy' do
expect do
service_result
end.to change { work_item1.reload.label_ids }.from([label1.id]).to([label2.id])
.and change { work_item2.reload.label_ids }.from([label1.id]).to([label2.id])
.and not_change { work_item3.reload.label_ids }.from([label1.id])
.and not_change { work_item4.reload.label_ids }.from([label3.id])
.and not_change { work_item5.reload.label_ids }.from([label1.id])
end
it 'returns update count' do
expect(service_result[:updated_work_item_count]).to eq(2)
end
context 'when the user cannot update the work item' do
let(:current_user) { guest }
it 'does not update work items' do
expect do
service_result
end.to not_change { work_item1.reload.label_ids }.from([label1.id])
.and not_change { work_item2.reload.label_ids }.from([label1.id])
.and not_change { work_item3.reload.label_ids }.from([label1.id])
.and not_change { work_item4.reload.label_ids }.from([label3.id])
.and not_change { work_item5.reload.label_ids }.from([label1.id])
end
it 'returns a 0 update count' do
expect(service_result[:updated_work_item_count]).to eq(0)
end
end
end
context 'when the user cannot read the parent' do
let(:current_user) { user }
it { is_expected.to be_error }
it 'returns authorization as the reason for failure' do
expect(service_result.reason).to eq(:authorization)
end
end
end
context 'when parent is a project' do
let(:parent) { project }
context 'when the user can read the parent' do
let(:current_user) { developer }
it { is_expected.to be_success }
it 'updates all work items scoped to the project' do
expect do
service_result
end.to not_change { work_item1.reload.label_ids }.from([label1.id])
.and change { work_item2.reload.label_ids }.from([label1.id]).to([label2.id])
.and not_change { work_item3.reload.label_ids }.from([label1.id])
.and not_change { work_item4.reload.label_ids }.from([label3.id])
.and not_change { work_item5.reload.label_ids }.from([label1.id])
end
it 'returns update count' do
expect(service_result[:updated_work_item_count]).to eq(1)
end
context 'when the user cannot update the work item' do
let(:current_user) { guest }
it 'does not update work items' do
expect do
service_result
end.to not_change { work_item1.reload.label_ids }.from([label1.id])
.and not_change { work_item2.reload.label_ids }.from([label1.id])
.and not_change { work_item3.reload.label_ids }.from([label1.id])
.and not_change { work_item4.reload.label_ids }.from([label3.id])
.and not_change { work_item5.reload.label_ids }.from([label1.id])
end
it 'returns a 0 update count' do
expect(service_result[:updated_work_item_count]).to eq(0)
end
end
end
context 'when the user cannot read the parent' do
let(:current_user) { user }
it { is_expected.to be_error }
it 'returns authorization as the reason for failure' do
expect(service_result.reason).to eq(:authorization)
end
end
end
end

View File

@ -13,30 +13,6 @@ timeout = ENV['CI'] || ENV['CI_SERVER'] ? 30 : 10
# Support running Capybara on a specific port to allow saving commonly used pages
Capybara.server_port = ENV['CAPYBARA_PORT'] if ENV['CAPYBARA_PORT']
# Define an error class for JS console messages
JSConsoleError = Class.new(StandardError)
# Filter out innocuous JS console messages
JS_CONSOLE_FILTER = Regexp.union(
[
'"[HMR] Waiting for update signal from WDS..."',
'"[WDS] Hot Module Replacement enabled."',
'"[WDS] Live Reloading enabled."',
'Download the Vue Devtools extension',
'Download the Apollo DevTools',
"Unrecognized feature: 'interest-cohort'",
'Does this page need fixes or improvements?',
# Needed after https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60933
# which opts out gitlab from FloC by default
# see https://web.dev/floc/ for more info on FloC
"Origin trial controlled feature not enabled: 'interest-cohort'",
# ERR_CONNECTION error could happen due to automated test session disabling browser network request
'net::ERR_CONNECTION'
]
)
CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
SCREENSHOT_FILENAME_LENGTH = ENV['CI'] || ENV['CI_SERVER'] ? 150 : 99
@ -139,6 +115,7 @@ end
RSpec.configure do |config|
config.include CapybaraHelpers, type: :feature
config.include BrowserConsoleHelpers, type: :feature
config.before(:context, :js) do
# This prevents Selenium from creating thousands of connections while waiting for
@ -159,6 +136,8 @@ RSpec.configure do |config|
end
config.before(:example, :js) do
clear_browser_logs
session = Capybara.current_session
allow(Gitlab::Application.routes).to receive(:default_url_options).and_return(
@ -200,17 +179,8 @@ RSpec.configure do |config|
end
config.after(:example, :js) do |example|
# when a test fails, display any messages in the browser's console
# but fail don't add the message if the failure is a pending test that got
# fixed. If we raised the `JSException` the fixed test would be marked as
# failed again.
if example.exception && !example.exception.is_a?(RSpec::Core::Pending::PendingExampleFixedError)
console = page.driver.browser.logs.get(:browser)&.reject { |log| log.message =~ JS_CONSOLE_FILTER }
if console.present?
message = "Unexpected browser console output:\n" + console.map(&:message).join("\n")
raise JSConsoleError, message
end
raise_if_unexpected_browser_console_output
end
# prevent localStorage from introducing side effects based on test order

View File

@ -0,0 +1,93 @@
# frozen_string_literal: true
module BrowserConsoleHelpers
# Define an error class for browser console messages
BrowserConsoleError = Class.new(StandardError)
# Filter out noisy browser console messages
#
# This is used when printing out the full console messages in failed tests
BROWSER_CONSOLE_FILTER = Regexp.union(
[
'"[HMR] Waiting for update signal from WDS..."',
'"[WDS] Hot Module Replacement enabled."',
'"[WDS] Live Reloading enabled."',
'Download the Vue Devtools extension',
'Download the Apollo DevTools',
"Unrecognized feature: 'interest-cohort'",
'Does this page need fixes or improvements?',
# Needed after https://gitlab.com/gitlab-org/gitlab/-/merge_requests/60933
# which opts out gitlab from FloC by default
# see https://web.dev/floc/ for more info on FloC
"Origin trial controlled feature not enabled: 'interest-cohort'",
# ERR_CONNECTION error could happen due to automated test session disabling browser network request
'net::ERR_CONNECTION'
]
)
# Filter out noisy browser console **error** messages
#
# This is used for expect_page_to_have_no_console_errors
BROWSER_CONSOLE_ERROR_FILTER = Regexp.union(
[
/gravatar\.com.*Failed to load resource/,
/snowplowanalytics.*Failed to load resource/
]
)
def browser_logs
@browser_logs ||= []
# note: In chromium, browser logs are *cleared* after fetching them. For us to create the expected behavior of
# returning the *full* set of logs each time this method is called, we need to keep track of a cache of
# @browser_logs and append the new logs to it.
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162499#note_2060667250 for more info.
#
# note: Firefox does not have #logs method, so we need to `try` and check if it's nil
new_browser_logs = page.driver.browser.try(:logs)&.get(:browser)
return @browser_logs if !new_browser_logs || new_browser_logs.empty?
# why: We check for timestamps to determine if the driver is giving us a new set of logs or the same set of logs on
# each call. If it's a new set of logs, we need to append to cache.
if @browser_logs.empty? || @browser_logs.first.timestamp == new_browser_logs.first.timestamp
@browser_logs = new_browser_logs
else
@browser_logs += new_browser_logs
end
@browser_logs
end
def clear_browser_logs
@browser_logs = []
# why: We need to clear browser logs from Chromium, otherwise logs will spill over into other examples.
# Chromium has a built-in behavior that clears it's logs when requested.
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/162499#note_2060667250 for more info.
page.driver.browser.try(:logs)&.get(:browser)
end
def raise_if_unexpected_browser_console_output
console = browser_logs.reject { |log| log.message =~ BROWSER_CONSOLE_FILTER }
return unless console.present?
message = "Unexpected browser console output:\n#{console.map(&:message).join("\n")}"
raise BrowserConsoleError, message
end
def expect_page_to_have_no_console_errors(allow: nil)
message_regex = if allow
Regexp.union([BROWSER_CONSOLE_ERROR_FILTER] + allow)
else
BROWSER_CONSOLE_ERROR_FILTER
end
console = browser_logs.select { |log| log.level == 'SEVERE' && log.message !~ message_regex }
expect(console).to be_empty, "Unexpected browser console errors:\n#{console.map(&:message).join("\n")}"
end
end

View File

@ -0,0 +1,175 @@
# frozen_string_literal: true
RSpec.shared_examples 'value stream related stage items query' do |group_or_project|
let(:resource_path) { group_or_project.to_sym }
let_it_be(:user) { create(:user) }
let_it_be(:current_time) do
Time.zone.parse('2024-07-15')
end
let(:query) do
<<~GQL
query($fullPath: ID!, $from: Date!, $to: Date!, $authorUsername: String) {
#{resource_path}(fullPath: $fullPath) {
id
valueStreams {
#{fields}
}
}
}
GQL
end
let(:fields) do
<<~GRAPHQL
nodes {
stages {
name
metrics(timeframe: { start: $from, end: $to }, authorUsername: $authorUsername) {
items {
nodes {
endEventTimestamp
duration
record {
... on MergeRequest {
id
}
... on Issue {
id
}
}
}
}
}
}
}
GRAPHQL
end
let_it_be(:merge_request1) do
create(:merge_request, :unique_branches, source_project: project, created_at: current_time - 1.day).tap do |mr|
mr.metrics.update!(latest_build_started_at: current_time - 10.hours,
latest_build_finished_at: current_time - 3.hours)
end
end
let_it_be(:merge_request2) do
create(:merge_request, :unique_branches, source_project: project, created_at: current_time - 1.day).tap do |mr|
mr.metrics.update!(latest_build_started_at: current_time - 12.hours,
latest_build_finished_at: current_time - 4.hours)
end
end
let_it_be(:issue1) do
create(:issue, project: project, created_at: current_time - 4.days).tap do |i|
i.metrics.update!(first_associated_with_milestone_at: current_time - 2.days)
end
end
let_it_be(:issue2) do
create(:issue, project: project, created_at: current_time - 1.hour).tap do |i|
i.metrics.update!(first_associated_with_milestone_at: current_time - 30.minutes)
end
end
let(:variables) do
{
fullPath: resource.full_path,
from: "2024-07-01",
to: "2024-08-01"
}
end
before_all do
resource.add_developer(user)
end
before do
travel_to(current_time)
end
it 'returns stage related merge requests data' do
post_graphql(query, current_user: user, variables: variables)
data = get_stage_data_by(name: 'test')
expect(data.size).to eq(2)
expect(data).to include({
'endEventTimestamp' => '2024-07-14T21:00:00Z',
'duration' => '7 hours',
'record' => { 'id' => merge_request1.to_global_id.to_s }
})
expect(data).to include({
'endEventTimestamp' => '2024-07-14T20:00:00Z',
'duration' => '8 hours',
'record' => { 'id' => merge_request2.to_global_id.to_s }
})
end
it 'returns stage related issues data' do
post_graphql(query, current_user: user, variables: variables)
data = get_stage_data_by(name: 'issue')
expect(data.size).to eq(2)
expect(data).to include({
'endEventTimestamp' => '2024-07-13T00:00:00Z',
'duration' => '2 days',
'record' => { 'id' => issue1.to_global_id.to_s }
})
expect(data).to include({
'endEventTimestamp' => '2024-07-14T23:30:00Z',
'duration' => '30 mins',
'record' => { 'id' => issue2.to_global_id.to_s }
})
end
context 'when using pagination' do
def pagination_query(params)
fields =
<<~GRAPHQL
record {
... on MergeRequest {
id
}
}
GRAPHQL
graphql_query_for(resource_path, { full_path: resource.full_path },
<<~QUERY
valueStreams(first: 1) {
nodes {
stages(id: "#{stage_id_to_paginate}") {
metrics(timeframe: { start: "2024-07-01", end: "2024-08-01" }) {
#{query_nodes(:items, fields, include_pagination_info: true, args: params)}
}
}
}
}
QUERY
)
end
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :END_EVENT_ASC }
let(:current_user) { user }
let(:data_path) { [resource_path, :valueStreams, :nodes, 0, :stages, :metrics, :items] }
let(:node_path) { %w[record id] }
let(:first_param) { 1 }
let(:all_records) { [merge_request2, merge_request1].map(&:to_global_id).map(&:to_s) }
end
end
def get_stage_data_by(name:)
data = graphql_data_at(resource_path, :value_streams, :nodes, 0, :stages)
stage_data =
data.find { |node| node['name'].downcase == name }
stage_data.dig('metrics', 'items', 'nodes')
end
end