Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2024-05-14 15:16:45 +00:00
parent 4a0e87faf2
commit 1847ddf46c
108 changed files with 1540 additions and 412 deletions

View File

@ -259,11 +259,42 @@ graphql-schema-dump:
script:
- run_timed_command "yarn jest:ci"
jest-build-cache:
extends:
- .frontend-test-base
- .frontend:rules:jest
needs: []
artifacts:
name: jest-cache
expire_in: 12h
when: always
paths:
- tmp/cache/jest/
script:
- run_timed_command "yarn jest:ci:build-cache"
variables:
# Propagate exit code correctly. See https://gitlab.com/groups/gitlab-org/-/epics/6074.
FF_USE_NEW_BASH_EVAL_STRATEGY: 'true'
FORCE_COLOR: '1'
allow_failure:
# In merge requests, failures exit with 2, so fail the pipeline. Otherwise,
# they exit with 1, so as not to break master and other pipelines.
exit_codes: 1
jest-build-cache-vue3:
extends:
- jest-build-cache
- .frontend:rules:jest-vue3
- .vue3
jest:
extends:
- .jest-base
- .frontend:rules:jest
needs: ["rspec-all frontend_fixture"]
needs:
- "rspec-all frontend_fixture"
- job: jest-build-cache
optional: true
artifacts:
name: coverage-frontend
expire_in: 31d
@ -281,6 +312,10 @@ jest vue3:
- jest
- .frontend:rules:jest-vue3
- .vue3
needs:
- "rspec-all frontend_fixture"
- job: jest-build-cache-vue3
optional: true
jest predictive:
extends:

View File

@ -52,9 +52,6 @@ code_quality cache:
SAST_EXCLUDED_PATHS: "qa, spec, doc, ee/spec, config/gitlab.yml.example, tmp" # GitLab-specific
SAST_EXCLUDED_ANALYZERS: bandit, flawfinder, phpcs-security-audit, pmd-apex, security-code-scan, spotbugs, eslint, nodejs-scan, sobelow
brakeman-sast:
rules: !reference [".reports:rules:brakeman-sast", rules]
semgrep-sast:
rules: !reference [".reports:rules:semgrep-sast", rules]

View File

@ -2634,19 +2634,6 @@
- <<: *if-default-refs
changes: *docs-patterns
.reports:rules:brakeman-sast:
rules:
- <<: *if-merge-request-labels-pipeline-expedite
when: never
- if: $SAST_DISABLED
when: never
- if: $SAST_EXCLUDED_ANALYZERS =~ /brakeman/
when: never
- <<: *if-default-refs
changes:
- '**/*.rb'
- '**/Gemfile'
.reports:rules:semgrep-sast:
rules:
- <<: *if-merge-request-labels-pipeline-expedite

View File

@ -29,7 +29,7 @@ If you have questions about the patch release process, please:
* Refer to the [patch release runbook for engineers and maintainers] for guidance.
* Ask questions on the [`#releases`] Slack channel (internal only).
[severity label]: https://about.gitlab.com/handbook/engineering/quality/issue-triage/#severity
[severity label]: https://handbook.gitlab.com/handbook/engineering/infrastructure/engineering-productivity/issue-triage/#severity
[patch release runbook for engineers and maintainers]: https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/patch/engineers.md
[`#releases`]: https://gitlab.slack.com/archives/C0XM5UU6B

View File

@ -19,7 +19,6 @@ Gettext/StaticIdentifier:
- 'ee/app/models/integrations/github.rb'
- 'ee/app/services/ee/projects/create_from_template_service.rb'
- 'ee/app/services/security/security_orchestration_policies/policy_configuration_validation_service.rb'
- 'ee/app/services/timebox/rollup_report_service.rb'
- 'ee/app/services/timebox_report_service.rb'
- 'ee/spec/controllers/groups/security/policies_controller_spec.rb'
- 'ee/spec/features/registrations/identity_verification_spec.rb'

View File

@ -23,16 +23,3 @@ Layout/MultilineOperationIndentation:
- 'app/services/labels/transfer_service.rb'
- 'app/services/members/approve_access_request_service.rb'
- 'app/services/webauthn/authenticate_service.rb'
- 'app/validators/feature_flag_strategies_validator.rb'
- 'app/workers/container_expiration_policies/cleanup_container_repository_worker.rb'
- 'config/initializers/devise_dynamic_password_length_validation.rb'
- 'danger/utility_css/Dangerfile'
- 'ee/app/controllers/projects/integrations/jira/issues_controller.rb'
- 'ee/app/controllers/smartcard_controller.rb'
- 'ee/app/graphql/resolvers/boards/epic_lists_resolver.rb'
- 'ee/app/helpers/ee/application_settings_helper.rb'
- 'ee/app/helpers/ee/boards_helper.rb'
- 'ee/app/helpers/ee/groups/group_members_helper.rb'
- 'ee/app/helpers/ee/projects/project_members_helper.rb'
- 'ee/app/helpers/groups/security_features_helper.rb'
- 'ee/app/helpers/groups/sso_helper.rb'

View File

@ -15,23 +15,5 @@ Lint/AmbiguousRegexpLiteral:
- 'spec/initializers/validate_database_config_spec.rb'
- 'spec/lib/banzai/filter/references/label_reference_filter_spec.rb'
- 'spec/lib/feature_spec.rb'
- 'spec/lib/gitlab/composer/version_index_spec.rb'
- 'spec/lib/gitlab/config/entry/validator_spec.rb'
- 'spec/lib/gitlab/config/entry/validators_spec.rb'
- 'spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb'
- 'spec/lib/gitlab/database/migration_helpers/v2_spec.rb'
- 'spec/lib/gitlab/database/migration_helpers_spec.rb'
- 'spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb'
- 'spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb'
- 'spec/lib/gitlab/database/partitioning_spec.rb'
- 'spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb'
- 'spec/lib/gitlab/gitaly_client/with_feature_flag_actors_spec.rb'
- 'spec/lib/gitlab/pagination/keyset/in_operator_optimization/array_scope_columns_spec.rb'
- 'spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb'
- 'spec/lib/gitlab/pagination/keyset/iterator_spec.rb'
- 'spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb'
- 'spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb'
- 'spec/lib/gitlab/web_ide/config/entry/global_spec.rb'
- 'spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb'
- 'spec/serializers/commit_entity_spec.rb'

View File

@ -51,3 +51,4 @@ Migration/EnsureFactoryForTable:
- 'db/migrate/20240306121653_create_relation_import_tracker.rb'
- 'db/migrate/20240404192955_create_early_access_program_tracking_events.rb'
- 'db/migrate/20240423064716_create_ci_build_execution_config.rb'
- 'db/migrate/20240419082037_create_ai_self_hosted_models.rb'

View File

@ -3960,7 +3960,6 @@ RSpec/FeatureCategory:
- 'spec/lib/gitlab/usage_data_counters/base_counter_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/designs_counter_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/gitlab_cli_activity_unique_counter_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb'
- 'spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb'

View File

@ -508,7 +508,6 @@ Style/ClassAndModuleChildren:
- 'lib/gitlab/instrumentation/elasticsearch_transport.rb'
- 'lib/gitlab/usage_data_counters/base_counter.rb'
- 'lib/gitlab/usage_data_counters/ci_template_unique_counter.rb'
- 'lib/gitlab/usage_data_counters/designs_counter.rb'
- 'lib/gitlab/usage_data_counters/note_counter.rb'
- 'lib/release_highlights/validator/entry.rb'
- 'qa/qa/page/component/project/templates.rb'

View File

@ -1530,7 +1530,6 @@ Style/InlineDisableAnnotation:
- 'ee/app/services/security/update_training_service.rb'
- 'ee/app/services/status_page/publish_base_service.rb'
- 'ee/app/services/timebox/event_aggregation_service.rb'
- 'ee/app/services/timebox/rollup_report_service.rb'
- 'ee/app/services/timebox_report_service.rb'
- 'ee/app/services/vulnerabilities/bulk_dismiss_service.rb'
- 'ee/app/services/vulnerabilities/create_service_base.rb'

View File

@ -50,6 +50,12 @@ export const I18N = {
update: s__('BranchRules|Update'),
edit: s__('BranchRules|Edit'),
updateBranchRuleError: s__('BranchRules|Something went wrong while updating branch rule.'),
allowedToPushDescription: s__(
'BranchRules|Changes require a merge request. The following users can push and merge directly.',
),
allowedToPushEmptyState: s__('BranchRules|No one is allowed to push and merge changes.'),
allowedToMergeEmptyState: s__('BranchRules|No one is allowed to merge changes.'),
statusChecksEmptyState: s__('BranchRules|No status checks have been added.'),
};
export const EDIT_RULE_MODAL_ID = 'editRuleModal';

View File

@ -293,6 +293,8 @@ export default {
:roles="pushAccessLevels.roles"
:users="pushAccessLevels.users"
:groups="pushAccessLevels.groups"
:empty-state-copy="$options.i18n.allowedToPushEmptyState"
:help-text="$options.i18n.allowedToPushDescription"
data-testid="allowed-to-push-content"
/>
@ -304,6 +306,7 @@ export default {
:roles="mergeAccessLevels.roles"
:users="mergeAccessLevels.users"
:groups="mergeAccessLevels.groups"
:empty-state-copy="$options.i18n.allowedToMergeEmptyState"
is-edit-available
data-testid="allowed-to-merge-content"
/>
@ -377,6 +380,7 @@ export default {
:header-link-title="$options.i18n.statusChecksLinkTitle"
:header-link-href="statusChecksPath"
:status-checks="statusChecks"
:empty-state-copy="$options.i18n.statusChecksEmptyState"
/>
</template>
<!-- EE end -->

View File

@ -58,13 +58,34 @@ export default {
required: false,
default: false,
},
emptyStateCopy: {
type: String,
required: true,
},
helpText: {
type: String,
required: false,
default: () => '',
},
},
computed: {
showUsersDivider() {
hasRoles() {
return Boolean(this.roles.length);
},
hasUsers() {
return Boolean(this.users.length);
},
hasStatusChecks() {
return Boolean(this.statusChecks.length);
},
showGroupsDivider() {
return Boolean(this.roles.length || this.users.length);
return this.hasRoles || this.hasUsers;
},
showEmptyState() {
return !this.hasRoles && !this.hasUsers && !this.hasStatusChecks;
},
showHelpText() {
return Boolean(this.helpText.length);
},
},
};
@ -73,8 +94,8 @@ export default {
<template>
<gl-card
class="gl-new-card gl-mb-5"
header-class="gl-new-card-header"
body-class="gl-new-card-body gl-px-5"
header-class="gl-new-card-header gl-flex-wrap"
body-class="gl-new-card-body gl-px-5 gl-pt-4"
>
<template #header>
<strong>{{ header }}</strong>
@ -86,7 +107,14 @@ export default {
>{{ __('Edit') }}</gl-button
>
<gl-link v-else :href="headerLinkHref">{{ headerLinkTitle }}</gl-link>
</template>
<p v-if="showHelpText" class="gl-flex-basis-full gl-mb-0 gl-text-secondary">
{{ helpText }}
</p></template
>
<p v-if="showEmptyState" class="gl-text-secondary" data-testid="protection-empty-state">
{{ emptyStateCopy }}
</p>
<!-- Roles -->
<protection-row v-if="roles.length" :title="$options.i18n.rolesTitle" :access-levels="roles" />
@ -94,7 +122,7 @@ export default {
<!-- Users -->
<protection-row
v-if="users.length"
:show-divider="showUsersDivider"
:show-divider="hasRoles"
:users="users"
:title="$options.i18n.usersTitle"
/>

View File

@ -66,7 +66,7 @@ export default {
<template>
<div
class="gl-display-flex gl-align-items-center gl-border-gray-100 gl-mb-4 gl-pt-4 gl-border-t-1"
class="gl-display-flex gl-align-items-center gl-border-gray-100 gl-mb-4 gl-border-t-1"
:class="{ 'gl-border-t-solid': showDivider }"
>
<div class="gl-display-flex gl-w-full gl-justify-content-space-between gl-align-items-center">

View File

@ -50,6 +50,7 @@ module Integrations
:google_play_protected_refs,
:group_confidential_mention_events,
:group_mention_events,
:hostname,
:incident_events,
:inherit_from_id,
# We're using `issues_events` and `merge_requests_events`

View File

@ -48,8 +48,9 @@ class Projects::PipelinesController < Projects::ApplicationController
feature_category :continuous_integration, [
:charts, :show, :stage, :cancel, :retry,
:builds, :dag, :failures, :status,
:index, :create, :new, :destroy
:index, :new, :destroy
]
feature_category :pipeline_composition, [:create]
feature_category :code_testing, [:test_report]
feature_category :build_artifacts, [:downloadable_artifacts]

View File

@ -0,0 +1,20 @@
# frozen_string_literal: true
module Members
class PendingInvitationsFinder
def initialize(invite_emails)
@invite_emails = invite_emails
end
def execute
Member.with_case_insensitive_invite_emails(invite_emails)
.invite
.distinct_on_source_and_case_insensitive_invite_email
.order_updated_desc
end
private
attr_reader :invite_emails
end
end

View File

@ -3,7 +3,14 @@
module Integrations
class Telegram < BaseChatNotification
include HasAvatar
TELEGRAM_HOSTNAME = "https://api.telegram.org/bot%{token}/sendMessage"
TELEGRAM_HOSTNAME = "%{hostname}/bot%{token}/sendMessage"
field :hostname,
section: SECTION_TYPE_CONNECTION,
help: 'Custom hostname of the Telegram API. The default value is `https://api.telegram.org`.',
placeholder: 'https://api.telegram.org',
exposes_secrets: true,
required: false
field :token,
section: SECTION_TYPE_CONNECTION,
@ -78,7 +85,8 @@ module Integrations
private
def set_webhook
self.webhook = format(TELEGRAM_HOSTNAME, token: token) if token.present?
hostname = self.hostname.presence || 'https://api.telegram.org'
self.webhook = format(TELEGRAM_HOSTNAME, hostname: hostname, token: token) if token.present?
end
def notify(message, _opts)

View File

@ -143,6 +143,9 @@ class Member < ApplicationRecord
scope :invite, -> { where.not(invite_token: nil) }
scope :non_invite, -> { where(invite_token: nil) }
scope :with_case_insensitive_invite_emails, ->(emails) do
where(arel_table[:invite_email].lower.in(emails.map(&:downcase)))
end
scope :request, -> { where.not(requested_at: nil) }
scope :non_request, -> { where(requested_at: nil) }
@ -208,6 +211,11 @@ class Member < ApplicationRecord
unscoped.from(distinct_members, :members)
end
scope :distinct_on_source_and_case_insensitive_invite_email, -> do
select('DISTINCT ON (source_id, source_type, LOWER(invite_email)) members.*')
.order('source_id, source_type, LOWER(invite_email)')
end
scope :order_name_asc, -> do
build_keyset_order_on_joined_column(
scope: left_join_users,
@ -288,6 +296,8 @@ class Member < ApplicationRecord
)
end
scope :order_updated_desc, -> { order(updated_at: :desc) }
scope :on_project_and_ancestors, ->(project) { where(source: [project] + project.ancestors) }
before_validation :set_member_namespace_id, on: :create

View File

@ -1644,7 +1644,7 @@ class User < MainClusterwide::ApplicationRecord
end
def pending_invitations
Member.where(invite_email: verified_emails).invite
Members::PendingInvitationsFinder.new(verified_emails).execute
end
def all_emails(include_private_email: true)

View File

@ -4,6 +4,7 @@ module DesignManagement
class DeleteDesignsService < DesignService
include RunsDesignActions
include OnSuccessCallbacks
include Gitlab::InternalEventsTracking
def initialize(project, user, params = {})
super
@ -55,16 +56,12 @@ module DesignManagement
def design_action(design)
on_success do
counter.count(:delete)
track_internal_event('delete_design_management_design', user: current_user, project: project)
end
DesignManagement::DesignAction.new(design, :delete)
end
def counter
::Gitlab::UsageDataCounters::DesignsCounter
end
def formatted_file_list
designs.map { |design| "- #{design.full_path}" }.join("\n")
end

View File

@ -4,6 +4,7 @@ module DesignManagement
class SaveDesignsService < DesignService
include RunsDesignActions
include OnSuccessCallbacks
include Gitlab::InternalEventsTracking
MAX_FILES = 10
@ -133,12 +134,12 @@ module DesignManagement
if action == :update
::Gitlab::UsageDataCounters::IssueActivityUniqueCounter
.track_issue_designs_modified_action(author: current_user, project: project)
track_internal_event('update_design_management_design', user: current_user, project: project)
else
::Gitlab::UsageDataCounters::IssueActivityUniqueCounter
.track_issue_designs_added_action(author: current_user, project: project)
track_internal_event('create_design_management_design', user: current_user, project: project)
end
::Gitlab::UsageDataCounters::DesignsCounter.count(action)
end
end
end

View File

@ -28,9 +28,9 @@ class FeatureFlagStrategiesValidator < ActiveModel::EachValidator
def strategy_validations(record, attribute, strategy)
validate_name(record, attribute, strategy) &&
validate_parameters_type(record, attribute, strategy) &&
validate_parameters_keys(record, attribute, strategy) &&
validate_parameters_values(record, attribute, strategy)
validate_parameters_type(record, attribute, strategy) &&
validate_parameters_keys(record, attribute, strategy) &&
validate_parameters_values(record, attribute, strategy)
end
def validate_name(record, attribute, strategy)
@ -79,13 +79,13 @@ class FeatureFlagStrategiesValidator < ActiveModel::EachValidator
def valid_ids?(user_ids)
user_ids.uniq.length == user_ids.length &&
user_ids.all? { |id| valid_id?(id) }
user_ids.all? { |id| valid_id?(id) }
end
def valid_id?(user_id)
user_id.present? &&
user_id.strip == user_id &&
user_id.length <= USERID_MAX_LENGTH
user_id.strip == user_id &&
user_id.length <= USERID_MAX_LENGTH
end
def error(record, attribute, msg)

View File

@ -2012,7 +2012,7 @@
:tags: []
- :name: pipeline_creation:ci_external_pull_requests_create_pipeline
:worker_name: Ci::ExternalPullRequests::CreatePipelineWorker
:feature_category: :continuous_integration
:feature_category: :pipeline_composition
:has_external_dependencies: false
:urgency: :high
:resource_boundary: :cpu
@ -2021,7 +2021,7 @@
:tags: []
- :name: pipeline_creation:create_pipeline
:worker_name: CreatePipelineWorker
:feature_category: :continuous_integration
:feature_category: :pipeline_composition
:has_external_dependencies: false
:urgency: :high
:resource_boundary: :cpu
@ -2030,7 +2030,7 @@
:tags: []
- :name: pipeline_creation:merge_requests_create_pipeline
:worker_name: MergeRequests::CreatePipelineWorker
:feature_category: :continuous_integration
:feature_category: :pipeline_composition
:has_external_dependencies: false
:urgency: :high
:resource_boundary: :cpu
@ -2039,7 +2039,7 @@
:tags: []
- :name: pipeline_creation:run_pipeline_schedule
:worker_name: RunPipelineScheduleWorker
:feature_category: :continuous_integration
:feature_category: :pipeline_composition
:has_external_dependencies: false
:urgency: :low
:resource_boundary: :unknown

View File

@ -7,7 +7,7 @@ module Ci
data_consistency :always
queue_namespace :pipeline_creation
feature_category :continuous_integration
feature_category :pipeline_composition
urgency :high
worker_resource_boundary :cpu

View File

@ -171,8 +171,8 @@ module ContainerExpirationPolicies
before_truncate_size = result.payload[:cleanup_tags_service_before_truncate_size]
after_truncate_size = result.payload[:cleanup_tags_service_after_truncate_size]
truncated = before_truncate_size &&
after_truncate_size &&
before_truncate_size != after_truncate_size
after_truncate_size &&
before_truncate_size != after_truncate_size
log_extra_metadata_on_done(:cleanup_tags_service_truncated, !!truncated)
end

View File

@ -9,7 +9,7 @@ class CreatePipelineWorker # rubocop:disable Scalability/IdempotentWorker
include PipelineQueue
queue_namespace :pipeline_creation
feature_category :continuous_integration
feature_category :pipeline_composition
urgency :high
worker_resource_boundary :cpu
loggable_arguments 2, 3, 4

View File

@ -10,7 +10,7 @@ module MergeRequests
include PipelineQueue
queue_namespace :pipeline_creation
feature_category :continuous_integration
feature_category :pipeline_composition
urgency :high
worker_resource_boundary :cpu
idempotent!

View File

@ -9,7 +9,7 @@ class RunPipelineScheduleWorker # rubocop:disable Scalability/IdempotentWorker
include PipelineQueue
queue_namespace :pipeline_creation
feature_category :continuous_integration
feature_category :pipeline_composition
deduplicate :until_executed, including_scheduled: true
idempotent!

View File

@ -0,0 +1,20 @@
---
description: A design is created
internal_events: true
action: create_design_management_design
identifiers:
- project
- namespace
- user
product_section: dev
product_stage: plan
product_group: product_planning
milestone: '17.0'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150994
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -0,0 +1,20 @@
---
description: A design is deleted
internal_events: true
action: delete_design_management_design
identifiers:
- project
- namespace
- user
product_section: dev
product_stage: plan
product_group: product_planning
milestone: '17.0'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150994
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -0,0 +1,20 @@
---
description: A design is updated
internal_events: true
action: update_design_management_design
identifiers:
- project
- namespace
- user
product_section: dev
product_stage: plan
product_group: product_planning
milestone: '17.0'
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/150994
distributions:
- ce
- ee
tiers:
- free
- premium
- ultimate

View File

@ -12,7 +12,7 @@
def length_validator_supports_dynamic_length_checks?(validator)
validator.options[:minimum].is_a?(Proc) &&
validator.options[:maximum].is_a?(Proc)
validator.options[:maximum].is_a?(Proc)
end
# Get the in-built Devise validator on password length.

View File

@ -8,11 +8,9 @@ product_group: product_planning
value_type: number
status: active
time_frame: all
data_source: redis
instrumentation_class: RedisMetric
options:
prefix: design_management_designs
event: create
data_source: internal_events
events:
- name: create_design_management_design
distribution:
- ce
- ee

View File

@ -8,11 +8,9 @@ product_group: product_planning
value_type: number
status: active
time_frame: all
data_source: redis
instrumentation_class: RedisMetric
options:
prefix: design_management_designs
event: update
data_source: internal_events
events:
- name: update_design_management_design
distribution:
- ce
- ee

View File

@ -8,11 +8,9 @@ product_group: product_planning
value_type: number
status: active
time_frame: all
data_source: redis
instrumentation_class: RedisMetric
options:
prefix: design_management_designs
event: delete
data_source: internal_events
events:
- name: delete_design_management_design
distribution:
- ce
- ee

View File

@ -701,6 +701,8 @@
- 1
- - search_zoekt_delete_project
- 1
- - search_zoekt_indexing_task
- 1
- - search_zoekt_namespace_indexer
- 1
- - search_zoekt_project_transfer

View File

@ -1,132 +1,219 @@
# frozen_string_literal: true
FF_SEE_DOC = "See the [feature flag documentation](https://docs.gitlab.com/ee/development/feature_flags#feature-flag-definition-and-validation)."
FEATURE_FLAG_LABEL = "feature flag"
FEATURE_FLAG_EXISTS_LABEL = "#{FEATURE_FLAG_LABEL}::exists"
FEATURE_FLAG_SKIPPED_LABEL = "#{FEATURE_FLAG_LABEL}::skipped"
DEVOPS_LABELS_REQUIRING_FEATURE_FLAG_REVIEW = ["devops::verify"]
module Tooling
class FeatureFlagDangerfile
SEE_DOC = "See the [feature flag documentation](https://docs.gitlab.com/ee/development/feature_flags#feature-flag-definition-and-validation)."
FEATURE_FLAG_LABEL = "feature flag"
FEATURE_FLAG_EXISTS_LABEL = "#{FEATURE_FLAG_LABEL}::exists".freeze
FEATURE_FLAG_SKIPPED_LABEL = "#{FEATURE_FLAG_LABEL}::skipped".freeze
DEVOPS_LABELS_REQUIRING_FEATURE_FLAG_REVIEW = ["devops::verify"].freeze
FF_SUGGEST_MR_COMMENT = <<~SUGGEST_COMMENT
```suggestion
group: "%<group>s"
```
SUGGEST_MR_COMMENT = <<~SUGGEST_COMMENT.freeze
```suggestion
group: "%<group>s"
```
#{FF_SEE_DOC}
SUGGEST_COMMENT
#{SEE_DOC}
SUGGEST_COMMENT
FEATURE_FLAG_ENFORCEMENT_WARNING = <<~WARNING_MESSAGE
There were no new or modified feature flag YAML files detected in this MR.
FEATURE_FLAG_ENFORCEMENT_WARNING = <<~WARNING_MESSAGE.freeze
There were no new or modified feature flag YAML files detected in this MR.
If the changes here are already controlled under an existing feature flag, please add
the ~"#{FEATURE_FLAG_EXISTS_LABEL}". Otherwise, if you think the changes here don't need
to be under a feature flag, please add the label ~"#{FEATURE_FLAG_SKIPPED_LABEL}", and
add a short comment about why we skipped the feature flag.
If the changes here are already controlled under an existing feature flag, please add
the ~"#{FEATURE_FLAG_EXISTS_LABEL}". Otherwise, if you think the changes here don't need
to be under a feature flag, please add the label ~"#{FEATURE_FLAG_SKIPPED_LABEL}", and
add a short comment about why we skipped the feature flag.
For guidance on when to use a feature flag, please see the [documentation](https://about.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/#when-to-use-feature-flags).
WARNING_MESSAGE
For guidance on when to use a feature flag, please see the [documentation](https://about.gitlab.com/handbook/product-development-flow/feature-flag-lifecycle/#when-to-use-feature-flags).
WARNING_MESSAGE
def check_feature_flag_yaml(feature_flag)
mr_group_label = helper.group_label
def initialize(context:, added_files:, modified_files:, helper:)
@context = context
@added_files = added_files
@modified_files = modified_files
@helper = helper
end
if feature_flag.group.nil?
message_for_feature_flag_missing_group!(feature_flag: feature_flag, mr_group_label: mr_group_label)
else
message_for_feature_flag_with_group!(feature_flag: feature_flag, mr_group_label: mr_group_label)
end
rescue Psych::Exception
# YAML could not be parsed, fail the build.
fail "#{helper.html_link(feature_flag.path)} isn't valid YAML! #{FF_SEE_DOC}"
rescue StandardError => e
warn "There was a problem trying to check the Feature Flag file. Exception: #{e.class.name} - #{e.message}"
end
def check_touched_feature_flag_files
touched_feature_flag_files.each do |feature_flag|
check_feature_flag_yaml(feature_flag)
end
end
def message_for_feature_flag_missing_group!(feature_flag:, mr_group_label:)
if mr_group_label.nil?
warn "Consider setting `group` in #{helper.html_link(feature_flag.path)}. #{FF_SEE_DOC}"
else
mr_line = feature_flag.raw.lines.find_index("group:\n")
def feature_flag_file_touched?
touched_feature_flag_files.any?
end
if mr_line
markdown(format(FF_SUGGEST_MR_COMMENT, group: mr_group_label), file: feature_flag.path, line: mr_line.succ)
else
warn %(Consider setting `group: "#{mr_group_label}"` in #{helper.html_link(feature_flag.path)}. #{FF_SEE_DOC})
def mr_has_backend_or_frontend_changes?
changes = helper.changes_by_category
changes.has_key?(:backend) || changes.has_key?(:frontend)
end
def stage_requires_feature_flag_review?
DEVOPS_LABELS_REQUIRING_FEATURE_FLAG_REVIEW.include?(helper.stage_label)
end
def mr_missing_feature_flag_status_label?
([FEATURE_FLAG_EXISTS_LABEL, FEATURE_FLAG_SKIPPED_LABEL] & helper.mr_labels).none?
end
private
attr_reader :context, :added_files, :modified_files, :helper
def check_feature_flag_yaml(feature_flag)
unless feature_flag.valid?
context.failure("#{helper.html_link(feature_flag.path)} isn't valid YAML! #{SEE_DOC}")
return
end
check_group(feature_flag)
check_feature_issue_url(feature_flag)
# Note: we don't check introduced_by_url as it's already done by danger/config_files/Dangerfile
check_rollout_issue_url(feature_flag)
check_milestone(feature_flag)
check_default_enabled(feature_flag)
end
def touched_feature_flag_files
added_files + modified_files
end
def check_group(feature_flag)
mr_group_label = helper.group_label
if feature_flag.missing_group?
message_for_feature_flag_missing_group!(feature_flag: feature_flag, mr_group_label: mr_group_label)
else
message_for_feature_flag_with_group!(feature_flag: feature_flag, mr_group_label: mr_group_label)
end
end
def message_for_feature_flag_missing_group!(feature_flag:, mr_group_label:)
if mr_group_label.nil?
context.failure("Please specify a valid `group` label in #{helper.html_link(feature_flag.path)}. #{SEE_DOC}")
return
end
add_message_on_line(
feature_flag: feature_flag,
needle: "group:",
note: format(SUGGEST_MR_COMMENT, group: mr_group_label),
fallback_note: %(Please add `group: "#{mr_group_label}"` in #{helper.html_link(feature_flag.path)}. #{SEE_DOC}),
message_method: :failure
)
end
def message_for_feature_flag_with_group!(feature_flag:, mr_group_label:)
return if feature_flag.group_match_mr_label?(mr_group_label)
if mr_group_label.nil?
helper.labels_to_add << feature_flag.group
else
note = <<~FAILURE_MESSAGE
`group` is set to ~"#{feature_flag.group}" in #{helper.html_link(feature_flag.path)},
which does not match ~"#{mr_group_label}" set on the MR!
FAILURE_MESSAGE
add_message_on_line(
feature_flag: feature_flag,
needle: "group:",
note: note,
message_method: :failure
)
end
end
def check_feature_issue_url(feature_flag)
return unless feature_flag.missing_feature_issue_url?
add_message_on_line(
feature_flag: feature_flag,
needle: "feature_issue_url:",
note: "Consider filling `feature_issue_url:`"
)
end
def add_message_on_line(feature_flag:, needle:, note:, fallback_note: note, message_method: :message)
mr_line = feature_flag.find_line_index(needle)
# rubocop:disable GitlabSecurity/PublicSend -- we allow calling context.message, context.warning & context.failure
if mr_line
context.public_send(message_method, note, file: feature_flag.path, line: mr_line.succ)
else
context.public_send(message_method, fallback_note)
end
# rubocop:enable GitlabSecurity/PublicSend
end
def check_rollout_issue_url(feature_flag)
return unless ::Feature::Shared::TYPES.dig(feature_flag.name.to_sym, :rollout_issue)
return unless feature_flag.missing_rollout_issue_url?
missing_field_error(feature_flag: feature_flag, field: :rollout_issue_url)
end
def check_milestone(feature_flag)
return unless feature_flag.missing_milestone?
missing_field_error(feature_flag: feature_flag, field: :milestone)
end
def check_default_enabled(feature_flag)
return unless feature_flag.default_enabled?
if ::Feature::Shared.can_be_default_enabled?(feature_flag.type)
note = <<~SUGGEST_COMMENT
You're about to [release the feature with the feature flag](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Feature%20Flag%20Roll%20Out.md#optional-release-the-feature-with-the-feature-flag).
This process can only be done **after** the [global rollout on production](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Feature%20Flag%20Roll%20Out.md#global-rollout-on-production).
Please make sure in [the rollout issue](#{feature_flag.rollout_issue_url}) that the preliminary steps have already been done. Otherwise, changing the YAML definition might not have the desired effect.
SUGGEST_COMMENT
mr_line = feature_flag.find_line_index("default_enabled: true")
context.markdown(note, file: feature_flag.path, line: mr_line.succ) if mr_line
else
context.failure(
"[Feature flag with the `#{feature_flag.type}` type must not be enabled by default](https://docs.gitlab.com/ee/development/feature_flags/##{feature_flag.type}-type). " \
"Consider changing the feature flag type if it's ready to be enabled by default."
)
end
end
def missing_field_error(feature_flag:, field:)
note = <<~MISSING_FIELD_ERROR
[Feature flag with the `#{feature_flag.type}` type must have `:#{field}` set](https://docs.gitlab.com/ee/development/feature_flags/##{feature_flag.type}-type).
MISSING_FIELD_ERROR
mr_line = feature_flag.find_line_index("#{field}:")
if mr_line
context.message(note, file: feature_flag.path, line: mr_line.succ)
else
context.message(note)
end
end
end
end
def message_for_global_rollout(feature_flag)
return unless feature_flag.default_enabled == true
feature_flag_dangerfile = Tooling::FeatureFlagDangerfile.new(
context: self,
added_files: feature_flag.feature_flag_files(danger_helper: helper, change_type: :added),
modified_files: feature_flag.feature_flag_files(danger_helper: helper, change_type: :modified),
helper: helper
)
message = <<~SUGGEST_COMMENT
You're about to [release the feature with the feature flag](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Feature%20Flag%20Roll%20Out.md#optional-release-the-feature-with-the-feature-flag).
This process can only be done **after** the [global rollout on production](https://gitlab.com/gitlab-org/gitlab/-/blob/master/.gitlab/issue_templates/Feature%20Flag%20Roll%20Out.md#global-rollout-on-production).
Please make sure in [the rollout issue](#{feature_flag.rollout_issue_url}) that the preliminary steps have already been done. Otherwise, changing the YAML definition might not have the desired effect.
SUGGEST_COMMENT
feature_flag_dangerfile.check_touched_feature_flag_files
mr_line = feature_flag.raw.lines.find_index { |l| l.include?('default_enabled:') }
markdown(message, file: feature_flag.path, line: mr_line.succ)
if helper.security_mr? && feature_flag_dangerfile.feature_flag_file_touched?
failure("Feature flags are discouraged from security merge requests. Read the [security documentation](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/utilities/feature_flags.md) for details.")
end
def message_for_feature_flag_with_group!(feature_flag:, mr_group_label:)
return if feature_flag.group_match_mr_label?(mr_group_label)
if mr_group_label.nil?
helper.labels_to_add << feature_flag.group
else
fail %(`group` is set to ~"#{feature_flag.group}" in #{helper.html_link(feature_flag.path)}, which does not match ~"#{mr_group_label}" set on the MR!)
end
end
def added_feature_flag_files
feature_flag.feature_flag_files(change_type: :added)
end
def modified_feature_flag_files
feature_flag.feature_flag_files(change_type: :modified)
end
def feature_flag_file_added?
added_feature_flag_files.any?
end
def feature_flag_file_modified?
modified_feature_flag_files.any?
end
def feature_flag_file_added_or_modified?
feature_flag_file_added? || feature_flag_file_modified?
end
def mr_has_backend_or_frontend_changes?
changes = helper.changes_by_category
changes.has_key?(:backend) || changes.has_key?(:frontend)
end
def mr_missing_feature_flag_status_label?
([FEATURE_FLAG_EXISTS_LABEL, FEATURE_FLAG_SKIPPED_LABEL] & helper.mr_labels).none?
end
def stage_requires_feature_flag_review?
DEVOPS_LABELS_REQUIRING_FEATURE_FLAG_REVIEW.include?(helper.stage_label)
end
added_feature_flag_files.each do |feature_flag|
check_feature_flag_yaml(feature_flag)
end
modified_feature_flag_files.each do |feature_flag|
message_for_global_rollout(feature_flag)
end
if helper.security_mr? && feature_flag_file_added?
fail "Feature flags are discouraged from security merge requests. Read the [security documentation](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/utilities/feature_flags.md) for details."
end
if !helper.security_mr? && mr_has_backend_or_frontend_changes? && stage_requires_feature_flag_review?
if feature_flag_file_added_or_modified? && !helper.mr_has_labels?(FEATURE_FLAG_EXISTS_LABEL)
if !helper.security_mr? && feature_flag_dangerfile.mr_has_backend_or_frontend_changes? && feature_flag_dangerfile.stage_requires_feature_flag_review?
if feature_flag_dangerfile.feature_flag_file_touched? && !helper.mr_has_labels?(Tooling::FeatureFlagDangerfile::FEATURE_FLAG_EXISTS_LABEL)
# Feature flag config file touched in this MR, so let's add the label to avoid the warning.
helper.labels_to_add << FEATURE_FLAG_EXISTS_LABEL
helper.labels_to_add << Tooling::FeatureFlagDangerfile::FEATURE_FLAG_EXISTS_LABEL
end
warn FEATURE_FLAG_ENFORCEMENT_WARNING if mr_missing_feature_flag_status_label?
if feature_flag_dangerfile.mr_missing_feature_flag_status_label?
warn(Tooling::FeatureFlagDangerfile::FEATURE_FLAG_ENFORCEMENT_WARNING)
end
end

View File

@ -6,7 +6,7 @@ utilities = 'app/assets/stylesheets/utilities.scss'
def get_css_files(files, common_filepath, utilities_filepath)
files.select do |file|
file.include?(common_filepath) ||
file.include?(utilities_filepath)
file.include?(utilities_filepath)
end
end

View File

@ -0,0 +1,10 @@
---
table_name: ai_self_hosted_models
classes:
- Ai::SelfHostedModel
feature_categories:
- custom_models
description: An AI Self Hosted Model definition
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/151793
milestone: '17.0'
gitlab_schema: gitlab_main_clusterwide

View File

@ -0,0 +1,16 @@
# frozen_string_literal: true
class CreateAiSelfHostedModels < Gitlab::Database::Migration[2.2]
milestone '17.0'
def change
create_table :ai_self_hosted_models do |t|
t.timestamps_with_timezone null: false
t.integer :model, limit: 2, null: false
t.text :endpoint, limit: 2048, null: false
t.text :name, limit: 255, null: false, index: { unique: true }
t.binary :encrypted_api_token
t.binary :encrypted_api_token_iv
end
end
end

View File

@ -0,0 +1,37 @@
# frozen_string_literal: true
class SwapColumnsForPCiBuildsTriggerRequestAndErasedBy < Gitlab::Database::Migration[2.2]
include ::Gitlab::Database::MigrationHelpers::Swapping
milestone '17.1'
disable_ddl_transaction!
TABLE = :p_ci_builds
COLUMNS = [
{ name: :trigger_request_id_convert_to_bigint, old_name: :trigger_request_id },
{ name: :erased_by_id_convert_to_bigint, old_name: :erased_by_id }
]
TRIGGER_FUNCTION = :trigger_10ee1357e825
def up
with_lock_retries(raise_on_exhaustion: true) do
swap # rubocop:disable Migration/WithLockRetriesDisallowedMethod -- custom implementation
end
end
def down
with_lock_retries(raise_on_exhaustion: true) do
swap # rubocop:disable Migration/WithLockRetriesDisallowedMethod -- custom implementation
end
end
private
def swap
lock_tables(TABLE)
COLUMNS.each do |column|
swap_columns(TABLE, column[:name], column[:old_name])
end
reset_trigger_function(TRIGGER_FUNCTION)
end
end

View File

@ -0,0 +1,22 @@
# frozen_string_literal: true
class AddIndexMembersOnLowerInviteEmailWithToken < Gitlab::Database::Migration[2.2]
milestone '17.1'
OLD_INDEX_NAME = 'index_members_on_lower_invite_email'
INDEX_NAME = 'index_members_on_lower_invite_email_with_token'
disable_ddl_transaction!
def up
add_concurrent_index :members, '(lower(invite_email))', where: 'invite_token IS NOT NULL', name: INDEX_NAME
remove_concurrent_index_by_name :members, OLD_INDEX_NAME
end
def down
add_concurrent_index :members, '(lower(invite_email))', name: OLD_INDEX_NAME
remove_concurrent_index_by_name :members, INDEX_NAME
end
end

View File

@ -0,0 +1,34 @@
# frozen_string_literal: true
class PrepareAsyncIndexForCiPipelinePartitionId < Gitlab::Database::Migration[2.2]
milestone '17.1'
disable_ddl_transaction!
TABLE = :ci_pipelines
INDEXES = [
{
name: :index_ci_pipelines_on_id_and_partition_id,
columns: [:id, :partition_id],
options: { unique: true }
},
{
name: :index_ci_pipelines_on_project_id_and_iid_and_partition_id,
columns: [:project_id, :iid, :partition_id],
options: { unique: true, where: 'iid IS NOT NULL' }
}
]
def up
INDEXES.each do |definition|
name, columns, options = definition.values_at(:name, :columns, :options)
prepare_async_index(TABLE, columns, name: name, **options)
end
end
def down
INDEXES.each do |definition|
name, columns, options = definition.values_at(:name, :columns, :options)
unprepare_async_index(TABLE, columns, name: name, **options)
end
end
end

View File

@ -0,0 +1,58 @@
# frozen_string_literal: true
class SwapVulnerabilityOccurrencePipelinesPipelineIdConvertToBigint < Gitlab::Database::Migration[2.2]
include Gitlab::Database::MigrationHelpers::Swapping
milestone '17.1'
disable_ddl_transaction!
TABLE_NAME = 'vulnerability_occurrence_pipelines'
COLUMN_NAME = 'pipeline_id'
COLUMN_TO_BIGINT_NAME = 'pipeline_id_convert_to_bigint'
INDEX_TO_BIGINT_NAME = 'index_vulnerability_occurrence_pipelines_on_pipeline_id_bigint'
INDEX_NAME = 'index_vulnerability_occurrence_pipelines_on_pipeline_id'
UNIQUE_KEYS_INDEX_NAME_BIGINT = 'vulnerability_occurrence_pipelines_on_unique_keys_bigint'
UNIQUE_KEYS_INDEX_NAME = 'vulnerability_occurrence_pipelines_on_unique_keys'
UNIQUE_INDEX_COLUMNS = %w[occurrence_id pipeline_id_convert_to_bigint]
TRIGGER_NAME = :trigger_2ac3d66ed1d3
def up
add_concurrent_index TABLE_NAME, COLUMN_TO_BIGINT_NAME, name: INDEX_TO_BIGINT_NAME
add_concurrent_index TABLE_NAME, UNIQUE_INDEX_COLUMNS,
name: UNIQUE_KEYS_INDEX_NAME_BIGINT,
unique: true
swap
end
def down
add_concurrent_index TABLE_NAME, COLUMN_TO_BIGINT_NAME, name: INDEX_TO_BIGINT_NAME
add_concurrent_index TABLE_NAME, UNIQUE_INDEX_COLUMNS,
name: UNIQUE_KEYS_INDEX_NAME_BIGINT,
unique: true
swap
end
def swap
with_lock_retries(raise_on_exhaustion: true) do
# Not locking ci_pipelines as it's an LFK column
lock_tables(TABLE_NAME)
swap_columns(TABLE_NAME, COLUMN_NAME, COLUMN_TO_BIGINT_NAME)
reset_trigger_function(TRIGGER_NAME)
change_column_default TABLE_NAME, COLUMN_TO_BIGINT_NAME, 0
change_column_default TABLE_NAME, COLUMN_NAME, nil
execute "DROP INDEX #{INDEX_NAME}"
rename_index TABLE_NAME, INDEX_TO_BIGINT_NAME, INDEX_NAME
execute "DROP INDEX #{UNIQUE_KEYS_INDEX_NAME}"
rename_index TABLE_NAME, UNIQUE_KEYS_INDEX_NAME_BIGINT, UNIQUE_KEYS_INDEX_NAME
end
end
end

View File

@ -0,0 +1 @@
6e174bfc24df22fdef6dd45151a18ba918160f8ccc10ae63bce4976bd4f8a12e

View File

@ -0,0 +1 @@
71dace2cf277ea15641a14a1460e35faf43002fc18d64f022c481fc546cc6b57

View File

@ -0,0 +1 @@
9e383a3d9750d101c6b9fc185f20c5e81e21806ab238e55de9315856f292091f

View File

@ -0,0 +1 @@
ec882df025640d6c08006027aa08f8ec5d987259ed9dcd3de4eaaea8cd20d533

View File

@ -0,0 +1 @@
4eed015cddbe337bda02a94b26398f14f885ab11c78bb08a29c467767788e309

View File

@ -1122,7 +1122,7 @@ CREATE TABLE p_ci_builds (
options text,
allow_failure boolean DEFAULT false NOT NULL,
stage character varying,
trigger_request_id integer,
trigger_request_id_convert_to_bigint integer,
stage_idx integer,
tag boolean,
ref character varying,
@ -1131,7 +1131,7 @@ CREATE TABLE p_ci_builds (
target_url character varying,
description character varying,
project_id_convert_to_bigint integer,
erased_by_id integer,
erased_by_id_convert_to_bigint integer,
erased_at timestamp without time zone,
artifacts_expire_at timestamp without time zone,
environment character varying,
@ -1157,10 +1157,10 @@ CREATE TABLE p_ci_builds (
auto_canceled_by_partition_id bigint DEFAULT 100 NOT NULL,
auto_canceled_by_id bigint,
commit_id bigint,
erased_by_id_convert_to_bigint bigint,
erased_by_id bigint,
project_id bigint,
runner_id bigint,
trigger_request_id_convert_to_bigint bigint,
trigger_request_id bigint,
upstream_pipeline_id bigint,
user_id bigint,
execution_config_id bigint,
@ -3593,6 +3593,28 @@ CREATE SEQUENCE ai_agents_id_seq
ALTER SEQUENCE ai_agents_id_seq OWNED BY ai_agents.id;
CREATE TABLE ai_self_hosted_models (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
model smallint NOT NULL,
endpoint text NOT NULL,
name text NOT NULL,
encrypted_api_token bytea,
encrypted_api_token_iv bytea,
CONSTRAINT check_a28005edb2 CHECK ((char_length(endpoint) <= 2048)),
CONSTRAINT check_cccb37e0de CHECK ((char_length(name) <= 255))
);
CREATE SEQUENCE ai_self_hosted_models_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE ai_self_hosted_models_id_seq OWNED BY ai_self_hosted_models.id;
CREATE TABLE ai_vectorizable_files (
id bigint NOT NULL,
created_at timestamp with time zone NOT NULL,
@ -6267,7 +6289,7 @@ CREATE TABLE ci_builds (
options text,
allow_failure boolean DEFAULT false NOT NULL,
stage character varying,
trigger_request_id integer,
trigger_request_id_convert_to_bigint integer,
stage_idx integer,
tag boolean,
ref character varying,
@ -6276,7 +6298,7 @@ CREATE TABLE ci_builds (
target_url character varying,
description character varying,
project_id_convert_to_bigint integer,
erased_by_id integer,
erased_by_id_convert_to_bigint integer,
erased_at timestamp without time zone,
artifacts_expire_at timestamp without time zone,
environment character varying,
@ -6302,10 +6324,10 @@ CREATE TABLE ci_builds (
auto_canceled_by_partition_id bigint DEFAULT 100 NOT NULL,
auto_canceled_by_id bigint,
commit_id bigint,
erased_by_id_convert_to_bigint bigint,
erased_by_id bigint,
project_id bigint,
runner_id bigint,
trigger_request_id_convert_to_bigint bigint,
trigger_request_id bigint,
upstream_pipeline_id bigint,
user_id bigint,
execution_config_id bigint,
@ -17823,8 +17845,8 @@ CREATE TABLE vulnerability_occurrence_pipelines (
created_at timestamp with time zone NOT NULL,
updated_at timestamp with time zone NOT NULL,
occurrence_id bigint NOT NULL,
pipeline_id integer NOT NULL,
pipeline_id_convert_to_bigint bigint DEFAULT 0 NOT NULL
pipeline_id_convert_to_bigint integer DEFAULT 0 NOT NULL,
pipeline_id bigint NOT NULL
);
CREATE SEQUENCE vulnerability_occurrence_pipelines_id_seq
@ -18972,6 +18994,8 @@ ALTER TABLE ONLY ai_agent_versions ALTER COLUMN id SET DEFAULT nextval('ai_agent
ALTER TABLE ONLY ai_agents ALTER COLUMN id SET DEFAULT nextval('ai_agents_id_seq'::regclass);
ALTER TABLE ONLY ai_self_hosted_models ALTER COLUMN id SET DEFAULT nextval('ai_self_hosted_models_id_seq'::regclass);
ALTER TABLE ONLY ai_vectorizable_files ALTER COLUMN id SET DEFAULT nextval('ai_vectorizable_files_id_seq'::regclass);
ALTER TABLE ONLY alert_management_alert_assignees ALTER COLUMN id SET DEFAULT nextval('alert_management_alert_assignees_id_seq'::regclass);
@ -20710,6 +20734,9 @@ ALTER TABLE ONLY ai_agent_versions
ALTER TABLE ONLY ai_agents
ADD CONSTRAINT ai_agents_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ai_self_hosted_models
ADD CONSTRAINT ai_self_hosted_models_pkey PRIMARY KEY (id);
ALTER TABLE ONLY ai_vectorizable_files
ADD CONSTRAINT ai_vectorizable_files_pkey PRIMARY KEY (id);
@ -24440,6 +24467,8 @@ CREATE INDEX index_ai_agent_versions_on_project_id ON ai_agent_versions USING bt
CREATE UNIQUE INDEX index_ai_agents_on_project_id_and_name ON ai_agents USING btree (project_id, name);
CREATE UNIQUE INDEX index_ai_self_hosted_models_on_name ON ai_self_hosted_models USING btree (name);
CREATE INDEX index_ai_vectorizable_files_on_project_id ON ai_vectorizable_files USING btree (project_id);
CREATE INDEX index_alert_assignees_on_alert_id ON alert_management_alert_assignees USING btree (alert_id);
@ -26142,7 +26171,7 @@ CREATE INDEX index_members_on_invite_email ON members USING btree (invite_email)
CREATE UNIQUE INDEX index_members_on_invite_token ON members USING btree (invite_token);
CREATE INDEX index_members_on_lower_invite_email ON members USING btree (lower((invite_email)::text));
CREATE INDEX index_members_on_lower_invite_email_with_token ON members USING btree (lower((invite_email)::text)) WHERE (invite_token IS NOT NULL);
CREATE INDEX index_members_on_member_namespace_id_compound ON members USING btree (member_namespace_id, type, requested_at, id);

View File

@ -1802,6 +1802,7 @@ Parameters:
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `hostname` | string | false | Custom hostname of the Telegram API ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/461313) in GitLab 17.1). The default value is `https://api.telegram.org`. |
| `token` | string | true | The Telegram bot token (for example, `123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11`). |
| `room` | string | true | Unique identifier for the target chat or the username of the target channel (in the format `@channelusername`). |
| `thread` | integer | false | Unique identifier for the target message thread (topic in a forum supergroup). [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/441097) in GitLab 16.11. |

View File

@ -137,6 +137,7 @@ export const dashboard = {
slug: 'my_dashboard', // Used to set the URL path for the dashboard.
title: 'My dashboard title', // The title to display.
description: 'This is a description of the dashboard', // A description of the dashboard
userDefined: true, // The dashboard editor is only available when true.
// Each dashboard consists of an array of panels to display.
panels: [
{
@ -173,12 +174,14 @@ Here is an example component that renders a customizable dashboard:
```vue
<script>
import CustomizableDashboard from 'ee/vue_shared/components/customizable_dashboard/customizable_dashboard.vue';
import PanelsBase from `ee/vue_shared/components/customizable_dashboard/panels_base.vue`;
import { dashboard } from './constants';
export default {
name: 'AnalyticsDashboard',
name: 'MyCustomDashboard',
components: {
CustomizableDashboard,
PanelsBase,
},
data() {
return {
@ -199,6 +202,14 @@ export default {
showDateRangeFilter: true,
// The maximum size of the date range allowed in days. 0 for unlimited.
dateRangeLimit: 0,
// Array of GlDisclosureDropdown items to show on each panel when editing
panelActions: [
{
text: __('Delete'),
action: () => this.$emit('delete'),
icon: 'remove',
},
],
};
},
};
@ -212,6 +223,20 @@ export default {
:show-date-range-filter="showDateRangeFilter"
:date-range-limit="dateRangeLimit"
/>
<template #panel="{ panel, filters, editing, deletePanel }">
<!-- Panels base provides a styled wrapper for your visualizations. -->
<panels-base
:title="panel.title"
:editing="editing"
:actions="panelActions"
@delete="deletePanel"
>
<template #body>
<!-- Render the panel's visualization here -->
</template>
</panels-base>
</template>
</customizable-dashboard>
</template>
```
@ -220,25 +245,26 @@ export default {
> - Introduced in GitLab 16.1 [with a flag](../../administration/feature_flags.md) named `combined_analytics_dashboards_editor`. Disabled by default.
> - [Generally available](https://gitlab.com/gitlab-org/gitlab/-/issues/411407) in GitLab 16.6. Feature flag `combined_analytics_dashboards_editor` removed.
The dashboard designer provides a graphical interface for users to modify the
panels and add new ones on user-defined dashboards. Is is not available on
GitLab hardcoded dashboards.
The CustomizableDashboard component provides a graphical interface for users to
modify panels of existing dashboards and create new dashboards.
NOTE:
The dashboard designer is in the early experimental stage and subject to
change.
The dashboard editor is only available when `dashboard.userDefined` is `true`.
```vue
<script>
import CustomizableDashboard from 'ee/vue_shared/components/customizable_dashboard/customizable_dashboard.vue';
import { s__ } from '~/locale';
import { dashboard } from './constants';
export const I18N_MY_NEW_CATEGORY = s__('Namespace|My data source');
export default {
name: 'AnalyticsDashboard',
name: 'MyCustomDashboard',
data() {
return {
...,
// The initial saved dashboard. Used to track changes.
initialDashboard: dashboard,
// Set to true to render the dashboard saving state.
isSaving: false,
// A list of availble visualizations categorized by feature.
@ -263,15 +289,11 @@ export default {
* @param {String} newDashboardObject The newly modified dashboard object.
*/
saveDashboard(dashboardId, newDashboardObject) {
// Save changes and modify `this.dashboard`.
},
/**
* Event handler for when a user adds a visualization in a new panel.
* @param {String} visualizationId The ID (usually filename) of the visualization.
* @param {String} visualizationSource The source to get the new visualization config.
*/
addNewPanel(visualizationId, visualizationSource) {
// Load the visualization and push a new panel onto `this.dashboard.panels`.
this.isSaving = true;
// Save changes somewhere.
// Then update the saved dashboard version
this.initialDashboard = newDashboardObject;
this.isSaving = false;
},
},
}
@ -279,11 +301,14 @@ export default {
<template>
<customizable-dashboard
...
:initial-dashboard="initialDashboard"
:available-visualizations="availableVisualizations"
:is-saving="isSaving"
@save="handleSave"
@add-panel="handleAddPanel"
@save="saveDashboard"
/>
<template #panel="{ panel, filters, editing, deletePanel }">
<my-dashboard-panel :panel="panel" />
</template>
</customizable-dashboard>
</template>
```

View File

@ -41,6 +41,7 @@ To configure the bot in Telegram:
## Set up the Telegram integration in GitLab
> - **Message thread ID** [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/441097) in GitLab 16.11.
> - **Hostname** [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/461313) in GitLab 17.1.
After you invite the bot to a Telegram channel, you can configure GitLab to send notifications:
@ -53,6 +54,7 @@ After you invite the bot to a Telegram channel, you can configure GitLab to send
1. Select **Settings > Integrations**.
1. Select **Telegram**.
1. Under **Enable integration**, select the **Active** checkbox.
1. Optional. In **Hostname**, enter the hostname of your [local bot API server](https://core.telegram.org/bots/api#using-a-local-bot-api-server).
1. In **Token**, [paste the token value from the Telegram bot](#create-a-telegram-bot).
1. In the **Trigger** section, select the checkboxes for the GitLab events you want to receive in Telegram.
1. In the **Notification settings** section:

View File

@ -136,6 +136,63 @@ To switch between the two settings, select either **Issues** or **Issue weight**
When sorting by weight, make sure all your issues
have weight assigned, because issues with no weight don't show on the chart.
## Roll up weights
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/381879) in GitLab 16.11 [with a flag](../../../administration/feature_flags.md) named `rollup_timebox_chart`. Disabled by default.
FLAG:
On self-managed GitLab, by default this feature is not available. To make it available, an administrator can [enable the feature flag](../../../administration/feature_flags.md) named `rollup_timebox_chart`.
On GitLab.com and GitLab Dedicated, this feature is not available.
This feature is not ready for production use.
With [tasks](../../tasks.md), a more granular planning is possible.
If this feature is enabled, the weight of issues that have tasks is derived from the tasks in the
same milestone.
Issues with tasks are not counted separately in burndown or burnup charts.
How issue weight is counted in charts:
- If an issue's tasks do not have weights assigned, the issue's weight is used instead.
- If an issue has multiple tasks, and some tasks are completed in a prior iteration, only tasks in
this iteration are shown and counted.
- If a task is directly assigned to an iteration, without its parent, it's the top level item and
contributes its own weight. The parent issue is not shown.
### Weight rollup examples
**Example 1**
- Issue has weight 5 and is assigned to Milestone 2.
- Task 1 has weight 2 and is assigned to Milestone 1.
- Task 2 has weight 2 and is assigned to Milestone 2.
- Task 3 has weight 2 and is assigned to Milestone 2.
The charts for Milestone 1 would show Task 1 as having weight 2.
The charts for Milestone 2 would show Issue as having weight 4.
**Example 2**
- Issue has weight 5 and is assigned to Milestone 2.
- Task 1 is assigned to Milestone 1 without any weight.
- Task 2 is assigned to Milestone 2 without any weight.
- Task 3 is assigned to Milestone 2 without any weight.
The charts for Milestone 1 would show Task 1 as having weight 0.
The charts for Milestone 2 would show Issue as having weight 5.
**Example 3**
- Issue is assigned to Milestone 2 without any weight.
- Task 1 has weight 2 and is assigned to Milestone 1
- Task 2 has weight 2 and is assigned to Milestone 2
- Task 3 has weight 2 and is assigned to Milestone 2
The charts for Milestone 1 would show Task 1 as having weight 2.
The charts for Milestone 2 would show Issue as having weight 4.
## Troubleshooting
### Burndown and burnup charts do not show the correct issue status

View File

@ -4,6 +4,18 @@ require 'gitlab-http'
require_relative 'helpers/groups'
Gitlab::HTTP_V2.configure do |config|
config.allowed_internal_uris = []
config.log_exception_proc = ->(exception, extra_info) do
p exception
p extra_info
end
config.silent_mode_log_info_proc = ->(message, http_method) do
p message
p http_method
end
end
module Keeps
# This is an implementation of a ::Gitlab::Housekeeper::Keep.
# This keep will fetch any `test` + `failure::flaky-test` + `flakiness::1` issues,
@ -91,7 +103,7 @@ module Keeps
end
def get(url)
http_response = Gitlab::HTTP.get(
http_response = Gitlab::HTTP_V2.get(
url,
headers: {
'User-Agent' => "GitLab-Housekeeper/#{self.class.name}",
@ -140,14 +152,23 @@ module Keeps
change.identifiers = [self.class.name.demodulize, filename, line_number.to_s]
change.changed_files = [filename]
change.description = <<~MARKDOWN
The #{description} test has the `flakiness::1` label set, which means it has more than 1000 flakiness reports.
The #{description}
test has the ~"flakiness::1" label set, which means it has
more than 1000 flakiness reports.
This MR quarantines the test. This is a discussion starting point to let the responsible group know about the flakiness
so that they can take action:
This MR quarantines the test. This is a discussion starting point to let the
responsible group know about the flakiness so that they can take action:
- accept the merge request and schedule to improve the test
- accept the merge request and schedule the associated issue to improve the test
- close the merge request in favor of another merge request to delete the test
Please follow the
[Flaky tests management process](https://handbook.gitlab.com/handbook/engineering/infrastructure/engineering-productivity/flaky-tests-management-and-processes/#flaky-tests-management-process)
to help us increase `master` stability.
Please let us know your feedback
[in the dedicated issue](https://gitlab.com/gitlab-org/quality/engineering-productivity/team/-/issues/447).
Related to #{flaky_issue['web_url']}.
MARKDOWN

View File

@ -101,7 +101,7 @@ module API
optional :variable_type, type: String, values: ::Ci::PipelineVariable.variable_types.keys, default: 'env_var', desc: 'The type of variable, must be one of env_var or file. Defaults to env_var'
end
end
post ':id/pipeline', urgency: :low, feature_category: :continuous_integration do
post ':id/pipeline', urgency: :low, feature_category: :pipeline_composition do
Gitlab::QueryLimiting.disable!('https://gitlab.com/gitlab-org/gitlab/-/issues/20711')
authorize! :create_pipeline, user_project

View File

@ -7,7 +7,7 @@ module API
HTTP_GITLAB_EVENT_HEADER = "HTTP_#{::Gitlab::WebHooks::GITLAB_EVENT_HEADER}".underscore.upcase
feature_category :continuous_integration
feature_category :pipeline_composition
urgency :low
params do

View File

@ -576,6 +576,12 @@ module API
}
],
'telegram' => [
{
required: false,
name: :hostname,
type: String,
desc: 'Custom hostname of the Telegram API. The default value is `https://api.telegram.org`.'
},
{
required: true,
name: :token,

View File

@ -558,7 +558,7 @@ module API
]
tags %w[merge_requests]
end
get ':id/merge_requests/:merge_request_iid/pipelines', urgency: :low, feature_category: :continuous_integration do
get ':id/merge_requests/:merge_request_iid/pipelines', urgency: :low, feature_category: :pipeline_composition do
pipelines = merge_request_pipelines_with_access
present paginate(pipelines), with: Entities::Ci::PipelineBasic
end
@ -573,7 +573,7 @@ module API
]
tags %w[merge_requests]
end
post ':id/merge_requests/:merge_request_iid/pipelines', urgency: :low, feature_category: :continuous_integration do
post ':id/merge_requests/:merge_request_iid/pipelines', urgency: :low, feature_category: :pipeline_composition do
pipeline = ::MergeRequests::CreatePipelineService
.new(project: user_project, current_user: current_user, params: { allow_duplicate: true })
.execute(find_merge_request_with_access(params[:merge_request_iid]))

View File

@ -99,5 +99,9 @@ module Feature
group
default_enabled
].freeze
def self.can_be_default_enabled?(feature_flag_type)
TYPES.dig(feature_flag_type.to_sym, :can_be_default_enabled)
end
end
end

View File

@ -5,7 +5,6 @@ module Gitlab
COUNTERS = [
PackageEventCounter,
MergeRequestCounter,
DesignsCounter,
DiffsCounter,
KubernetesAgentCounter,
NoteCounter,

View File

@ -1,8 +0,0 @@
# frozen_string_literal: true
module Gitlab::UsageDataCounters
class DesignsCounter < BaseCounter
KNOWN_EVENTS = %w[create update delete].freeze
PREFIX = 'design_management_designs'
end
end

View File

@ -29,3 +29,6 @@
'{event_counters}_perform_search': ALL_SEARCHES_COUNT
'{event_counters}_perform_navbar_search': NAVBAR_SEARCHES_COUNT
'{event_counters}_source_code_pushed': USAGE_SOURCE_CODE_PUSHES
'{event_counters}_create_design_management_design': USAGE_DESIGN_MANAGEMENT_DESIGNS_CREATE
'{event_counters}_update_design_management_design': USAGE_DESIGN_MANAGEMENT_DESIGNS_UPDATE
'{event_counters}_delete_design_management_design': USAGE_DESIGN_MANAGEMENT_DESIGNS_DELETE

View File

@ -30,16 +30,16 @@ namespace :gitlab do
def mark_migration_complete(version, only_on: nil)
if version.to_i == 0
puts 'Must give a version argument that is a non-zero integer'.color(:red)
puts Rainbow('Must give a version argument that is a non-zero integer').red
exit 1
end
Gitlab::Database::EachDatabase.each_connection(only: only_on) do |connection, name|
connection.execute("INSERT INTO schema_migrations (version) VALUES (#{connection.quote(version)})")
puts "Successfully marked '#{version}' as complete on database #{name}".color(:green)
puts Rainbow("Successfully marked '#{version}' as complete on database #{name}").green
rescue ActiveRecord::RecordNotUnique
puts "Migration version '#{version}' is already marked complete on database #{name}".color(:yellow)
puts Rainbow("Migration version '#{version}' is already marked complete on database #{name}").yellow
end
end
@ -250,7 +250,7 @@ namespace :gitlab do
desc "Reindex database without downtime to eliminate bloat"
task reindex: :environment do
unless Gitlab::Database::Reindexing.enabled?
puts "This feature (database_reindexing) is currently disabled.".color(:yellow)
puts Rainbow("This feature (database_reindexing) is currently disabled.").yellow
exit
end
@ -262,7 +262,7 @@ namespace :gitlab do
desc "Reindex #{database_name} database without downtime to eliminate bloat"
task database_name => :environment do
unless Gitlab::Database::Reindexing.enabled?
puts "This feature (database_reindexing) is currently disabled.".color(:yellow)
puts Rainbow("This feature (database_reindexing) is currently disabled.").yellow
exit
end
@ -274,7 +274,7 @@ namespace :gitlab do
def disabled_db_flags_note
return unless Feature.enabled?(:disallow_database_ddl_feature_flags, type: :ops)
puts <<~NOTE.color(:yellow)
puts Rainbow(<<~NOTE).yellow
Note: disallow_database_ddl_feature_flags feature is currently enabled. Disable it to proceed.
Disable with: Feature.disable(:disallow_database_ddl_feature_flags)
@ -297,7 +297,7 @@ namespace :gitlab do
disabled_db_flags_note
if Feature.disabled?(:database_reindexing, type: :ops)
puts <<~NOTE.color(:yellow)
puts Rainbow(<<~NOTE).yellow
Note: database_reindexing feature is currently disabled.
Enable with: Feature.enable(:database_reindexing)
@ -313,7 +313,7 @@ namespace :gitlab do
disabled_db_flags_note { exit }
if Feature.disabled?(:database_async_index_operations, type: :ops)
puts <<~NOTE.color(:yellow)
puts Rainbow(<<~NOTE).yellow
Note: database async index operations feature is currently disabled.
Enable with: Feature.enable(:database_async_index_operations)
@ -345,7 +345,7 @@ namespace :gitlab do
disabled_db_flags_note { exit }
if Feature.disabled?(:database_async_foreign_key_validation, type: :ops)
puts <<~NOTE.color(:yellow)
puts Rainbow(<<~NOTE).yellow
Note: database async foreign key validation feature is currently disabled.
Enable with: Feature.enable(:database_async_foreign_key_validation)

View File

@ -17,9 +17,9 @@ namespace :gitlab do
end
if failures.empty?
puts "Done".color(:green)
puts Rainbow("Done").green
else
puts "The following repositories reported errors:".color(:red)
puts Rainbow("The following repositories reported errors:").red
failures.each { |f| puts "- #{f}" }
end
end

View File

@ -9017,6 +9017,9 @@ msgstr ""
msgid "BranchRules|Cancel"
msgstr ""
msgid "BranchRules|Changes require a merge request. The following users can push and merge directly."
msgstr ""
msgid "BranchRules|Check for a status response in merge requests. Failures do not block merges. %{linkStart}Learn more.%{linkEnd}"
msgstr ""
@ -9077,6 +9080,15 @@ msgstr ""
msgid "BranchRules|No matching results"
msgstr ""
msgid "BranchRules|No one is allowed to merge changes."
msgstr ""
msgid "BranchRules|No one is allowed to push and merge changes."
msgstr ""
msgid "BranchRules|No status checks have been added."
msgstr ""
msgid "BranchRules|Protect branch"
msgstr ""
@ -59050,12 +59062,18 @@ msgstr ""
msgid "Workspaces|A workspace is a virtual sandbox environment for your code in GitLab."
msgstr ""
msgid "Workspaces|Agents connect workspaces to your Kubernetes cluster. To create a workspace with an allowed agent, group members must have at least the Developer role."
msgstr ""
msgid "Workspaces|Cancel"
msgstr ""
msgid "Workspaces|Cluster agent"
msgstr ""
msgid "Workspaces|Could not load available agents. Refresh the page to try again."
msgstr ""
msgid "Workspaces|Could not load workspaces"
msgstr ""
@ -59095,6 +59113,9 @@ msgstr ""
msgid "Workspaces|GitLab Workspaces is a powerful collaborative platform that provides a comprehensive set of tools for software development teams to manage their entire development lifecycle."
msgstr ""
msgid "Workspaces|Group agents"
msgstr ""
msgid "Workspaces|If your devfile is not in the root directory of your project, specify a relative path."
msgstr ""
@ -59146,6 +59167,9 @@ msgstr ""
msgid "Workspaces|The branch, tag, or commit hash GitLab uses to create your workspace."
msgstr ""
msgid "Workspaces|This group has no available agents. Select the All agents tab and allow at least one agent."
msgstr ""
msgid "Workspaces|To create a workspace, add a devfile to this project. A devfile is a configuration file for your workspace."
msgstr ""

View File

@ -14,7 +14,8 @@
"tailwindcss:build": "node scripts/frontend/tailwind_all_the_way.mjs --only-used",
"jest": "jest --config jest.config.js",
"jest-debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
"jest:ci": "jest --config jest.config.js --ci --coverage --testSequencer ./scripts/frontend/parallel_ci_sequencer.js",
"jest:ci:build-cache": "./scripts/frontend/warm_jest_cache.mjs",
"jest:ci": "jest --config jest.config.js --ci --coverage --testSequencer ./scripts/frontend/parallel_ci_sequencer.js --logHeapUsage",
"jest:ci:predictive": "jest --config jest.config.js --ci --coverage --findRelatedTests $(cat $RSPEC_CHANGED_FILES_PATH) $(cat $RSPEC_MATCHING_JS_FILES_PATH) --passWithNoTests --testSequencer ./scripts/frontend/parallel_ci_sequencer.js",
"jest:contract": "PACT_DO_NOT_TRACK=true jest --config jest.config.contract.js --runInBand",
"jest:integration": "jest --config jest.config.integration.js",

View File

@ -0,0 +1,149 @@
#!/usr/bin/env node
import { join, relative } from 'node:path';
import { spawnSync } from 'node:child_process';
import { writeFile, rename, rm, mkdir } from 'node:fs/promises';
import chalk from 'chalk';
import pkg from 'glob';
const { glob } = pkg;
const rootPath = join(import.meta.dirname, '..', '..');
const testPath = join(rootPath, 'spec/frontend/_warm_cache');
const maxTestPerFile = 50;
let currentTestFile = 0;
function findFiles() {
return glob
.sync('{jh,ee/,}app/assets/javascripts/**/*.{js,vue,graphql}', {
cwd: rootPath,
ignore: [
// Try to avoid side effects and unparseable files (e.g., node modules
// we don't transpile for Jest, like mermaid) by excluding files we
// normally wouldn't write specs for.
//
// The most correct way to do this would be to parse all our spec files
// and import all files *they* import, but that's a lot more effort.
'**/app/assets/javascripts/main{_ee,_jh,}.js',
'**/app/assets/javascripts/{behaviors,pages,entrypoints}/**/*',
// A dev-only file
'**/app/assets/javascripts/webpack_non_compiled_placeholder.js',
// Generated translation files
'**/app/assets/javascripts/locale/*/app.js',
// Storybook stories
'**/*.stories.js',
// This file imports the `mermaid` node module, which is written in ES
// module format, and Jest isn't configured to transpile it. It's
// surprising that we don't have any specs that even transitively
// import mermaid.
'**/app/assets/javascripts/lib/mermaid.js',
// These *should* be in /pages/ 🤷
'**/app/assets/javascripts/snippet/snippet_show.js',
'**/app/assets/javascripts/admin/application_settings/setup_metrics_and_profiling.js',
// # Thse ones have a problem with jQuery.ajaxPrefilter not being defined
'app/assets/javascripts/lib/utils/rails_ujs.js',
'app/assets/javascripts/profile/profile.js',
'app/assets/javascripts/namespaces/leave_by_url.js',
// # These ones aren't working for some reason or another,
'app/assets/javascripts/blob/stl_viewer.js',
'app/assets/javascripts/blob/3d_viewer/index.js',
'app/assets/javascripts/filtered_search/dropdown_ajax_filter.js',
],
})
.sort();
}
async function writeTestFile(arr) {
currentTestFile += 1;
const data = `${arr.join('\n')}
it('nothing', () => { expect(1).toBe(1); })
`;
const baseName = `${currentTestFile}`.padStart(3, '0');
return writeFile(join(testPath, `${baseName}_spec.js`), data);
}
function setExitCode(statusOrError) {
// No error, do nothing.
if (statusOrError === 0) return;
if (process.env.CI) {
if (process.env.CI_MERGE_REQUEST_IID) {
// In merge requests, fail the pipeline by setting the exit code to
// something other than the allowed failure value.
process.exitCode = 2;
} else {
// In master and other pipelines, set it to the allowed exit code.
process.exitCode = 1;
}
} else {
// Not in CI, pass through status as-is
process.exitCode = typeof statusOrError === 'number' ? statusOrError : 1;
}
}
async function main() {
let curr = [];
await mkdir(testPath, { recursive: true });
const files = findFiles();
for (const item of files) {
const transformedPath = item
.replace(/^app\/assets\/javascripts\//, '~/')
.replace(/^(ee|jh)\/app\/assets\/javascripts\//, '$1/')
.replace(/\.js$/, '');
if (curr.length >= maxTestPerFile) {
// eslint-disable-next-line no-await-in-loop
await writeTestFile(curr);
curr = [];
}
curr.push(`import '${transformedPath}';`);
}
await writeTestFile(curr);
console.log(`[WARMING JEST]: Start execution`);
const result = spawnSync('yarn', ['run', 'jest', testPath], {
cwd: rootPath,
detached: true,
stdio: 'inherit',
});
console.log(`[WARMING JEST]: End execution: jest exited with ${result.status}`);
if (process.env.CI) {
console.log(`Moving spec/frontend/_warm_cache to tmp/`);
await rename(testPath, join(rootPath, 'tmp/cache/jest/_warm_cache'));
} else {
console.log(`Removing spec/frontend/_warm_cache`);
await rm(testPath, { recursive: true, force: true });
}
if (result.status !== 0) {
const scriptPath = relative(rootPath, import.meta.filename);
console.log(chalk.red('Jest cache warming failed!'));
console.log(
chalk.red(
`If the failure is due to an import error, add the problematic file(s) to the ignore list in ${scriptPath}.`,
),
);
console.log(chalk.red('For help, contact the Manage:Foundations team.'));
}
return result.status;
}
try {
setExitCode(await main());
} catch (error) {
setExitCode(error);
console.error(error);
}

View File

@ -0,0 +1,83 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Members::PendingInvitationsFinder, feature_category: :groups_and_projects do
describe '#execute' do
let_it_be(:user, reload: true) { create(:user, email: 'user@email.com') }
let(:invite_emails) { [user.email] }
subject(:execute) { described_class.new(invite_emails).execute }
context 'when the invite_email is the same case as the user email' do
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: user.email)
end
it 'finds the invite' do
expect(execute).to match_array([invited_member])
end
end
context 'when there is a non-lowercased private commit email' do
let_it_be(:invite_emails) do
["#{user.id}-BOBBY_TABLES@#{Gitlab::CurrentSettings.current_application_settings.commit_email_hostname}"]
end
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: invite_emails.first)
end
before do
user.update!(username: 'BOBBY_TABLES')
end
it 'finds the invite' do
expect(execute).to match_array([invited_member])
end
end
context 'when the invite has already been accepted' do
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: user.email)
end
it 'finds only the valid pending invite' do
create(:project_member, :invited, invite_email: user.email).accept_invite!(user)
expect(execute).to match_array([invited_member])
end
end
context 'when the invite_email is a different case than the user email' do
let_it_be(:upper_case_existing_invite) do
create(:project_member, :invited, invite_email: user.email.upcase)
end
it 'finds the invite' do
expect(execute).to match_array([upper_case_existing_invite])
end
end
context 'with an uppercase version of the email matches another member' do
let_it_be(:project_member_invite) { create(:project_member, :invited, invite_email: user.email) }
let_it_be(:upper_case_existing_invite) do
create(:project_member, :invited, source: project_member_invite.project, invite_email: user.email.upcase)
end
it 'contains only the latest updated case insensitive email invite' do
travel_to 10.minutes.ago do
project_member_invite.touch # in past, so shouldn't get accepted over the one created
end
upper_case_existing_invite.touch # ensure updated_at is being verified. This one should be first now.
travel_to 10.minutes.from_now do
project_member_invite.touch # now we'll make the original first so we are verifying updated_at
expect(execute).to match_array([project_member_invite])
end
end
end
end
end

View File

@ -72,6 +72,12 @@ export const protectionPropsMock = {
statusChecks: statusChecksRulesMock,
};
export const protectionEmptyStatePropsMock = {
header: '',
headerLinkTitle: 'Status checks',
emptyStateCopy: 'No status checks',
};
export const protectionRowPropsMock = {
title: 'Test title',
users: usersMock,

View File

@ -2,16 +2,18 @@ import { GlCard, GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import Protection, { i18n } from '~/projects/settings/branch_rules/components/view/protection.vue';
import ProtectionRow from '~/projects/settings/branch_rules/components/view/protection_row.vue';
import { protectionPropsMock } from './mock_data';
import { protectionPropsMock, protectionEmptyStatePropsMock } from './mock_data';
describe('Branch rule protection', () => {
let wrapper;
const createComponent = (glFeatures = { editBranchRules: true }, props = {}) => {
const createComponent = (glFeatures = { editBranchRules: true }, props = protectionPropsMock) => {
wrapper = shallowMountExtended(Protection, {
propsData: {
header: 'Allowed to merge',
headerLinkHref: '/foo/bar',
headerLinkTitle: 'Manage here',
...props,
...protectionPropsMock,
},
stubs: { GlCard },
provide: { glFeatures },
@ -25,6 +27,7 @@ describe('Branch rule protection', () => {
const findLink = () => wrapper.findComponent(GlLink);
const findProtectionRows = () => wrapper.findAllComponents(ProtectionRow);
const findEditButton = () => wrapper.findByTestId('edit-button');
const findEmptyState = () => wrapper.findByTestId('protection-empty-state');
it('renders a card component', () => {
expect(findCard().exists()).toBe(true);
@ -34,29 +37,16 @@ describe('Branch rule protection', () => {
expect(findHeader().exists()).toBe(true);
});
it('renders link when `edit_branch_rules` FF is enabled and `isEditAvailable` prop is false', () => {
expect(findLink().text()).toBe(protectionPropsMock.headerLinkTitle);
expect(findLink().attributes('href')).toBe(protectionPropsMock.headerLinkHref);
it('renders empty state for Status Checks when there is none', () => {
createComponent({ editBranchRules: true }, { ...protectionEmptyStatePropsMock });
expect(findEmptyState().text()).toBe('No status checks');
});
describe('When `isEditAvailable` prop is set to true and `edit_branch_rules` FF is enabled', () => {
beforeEach(() => createComponent({ editBranchRules: true }, { isEditAvailable: true }));
it('renders `Edit` button', () => {
expect(findEditButton().exists()).toBe(true);
});
});
it('renders a help text when provided', () => {
createComponent({ editBranchRules: true }, { helpText: 'Help text' });
describe('When `edit_branch_rules` FF is disabled', () => {
beforeEach(() => createComponent({ editBranchRules: false }));
it('does not render `Edit` button', () => {
expect(findEditButton().exists()).toBe(false);
});
it('renders link to manage branch protections', () => {
expect(findLink().text()).toBe(protectionPropsMock.headerLinkTitle);
expect(findLink().attributes('href')).toBe(protectionPropsMock.headerLinkHref);
});
expect(findCard().text()).toContain('Help text');
});
it('renders a protection row for roles', () => {
@ -102,4 +92,25 @@ describe('Branch rule protection', () => {
expect(findProtectionRows().at(5).props('showDivider')).toBe(true);
});
describe('When `isEditAvailable` prop is set to true', () => {
beforeEach(() => createComponent({ editBranchRules: true }, { isEditAvailable: true }));
it('renders `Edit` button', () => {
expect(findEditButton().exists()).toBe(true);
});
});
describe('When `edit_branch_rules` FF is disabled', () => {
beforeEach(() => createComponent({ editBranchRules: false }));
it('does not render `Edit` button', () => {
expect(findEditButton().exists()).toBe(false);
});
it('renders link to manage branch protections', () => {
expect(findLink().text()).toBe(protectionPropsMock.headerLinkTitle);
expect(findLink().attributes('href')).toBe(protectionPropsMock.headerLinkHref);
});
});
});

View File

@ -88,6 +88,7 @@ RSpec.describe Mutations::DesignManagement::Delete do
it 'runs no more than 34 queries' do
allow(Gitlab::Tracking).to receive(:event) # rubocop:disable RSpec/ExpectGitlabTracking
allow(Gitlab::InternalEvents).to receive(:track_event)
filenames.each(&:present?) # ignore setup
# Queries: as of 2022-12-01

View File

@ -0,0 +1,17 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Feature::Shared, feature_category: :tooling do
describe '.can_be_default_enabled?' do
subject { described_class.can_be_default_enabled?(type) }
described_class::TYPES.each do |type, data|
context "when type is #{type}" do
let(:type) { type }
it { is_expected.to eq(data[:can_be_default_enabled]) }
end
end
end
end

View File

@ -89,7 +89,7 @@ RSpec.describe Gitlab::Composer::VersionIndex, feature_category: :package_regist
subject(:sha) { described_class.new(packages).sha }
it 'returns the json SHA' do
expect(sha).to match /^[A-Fa-f0-9]{64}$/
expect(sha).to match(/^[A-Fa-f0-9]{64}$/)
end
end
end

View File

@ -50,7 +50,7 @@ RSpec.describe Gitlab::Config::Entry::Validator do
validator_instance.validate
expect(validator_instance.messages)
.to include /test attribute can't be blank/
.to include(/test attribute can't be blank/)
end
end
end

View File

@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
describe '#restrict_gitlab_migration' do
it 'invalid schema raises exception' do
expect { schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_non_existing }
.to raise_error /Unknown 'gitlab_schema:/
.to raise_error(/Unknown 'gitlab_schema:/)
end
it 'does configure allowed_gitlab_schema' do

View File

@ -2839,7 +2839,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
describe '#add_primary_key_using_index' do
it "executes the statement to add the primary key" do
expect(model).to receive(:execute).with /ALTER TABLE "_test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/
expect(model).to receive(:execute).with(/ALTER TABLE "_test_table" ADD CONSTRAINT "old_name" PRIMARY KEY USING INDEX "new_name"/)
model.add_primary_key_using_index(:_test_table, :old_name, :new_name)
end
@ -2890,7 +2890,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers, feature_category: :database d
describe '#drop_sequence' do
it "executes the statement to drop the sequence" do
expect(model).to receive(:execute).with /ALTER TABLE "_test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "_test_table_id_seq"/
expect(model).to receive(:execute).with(/ALTER TABLE "_test_table" ALTER COLUMN "test_column" DROP DEFAULT;\nDROP SEQUENCE IF EXISTS "_test_table_id_seq"/)
model.drop_sequence(:_test_table, :test_column, :_test_table_id_seq)
end

View File

@ -214,7 +214,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does raise an exception' do
expect do
model.queue_background_migration_jobs_by_range_at_intervals(ProjectAuthorization, 'FooJob', 10.seconds)
end.to raise_error /use `restrict_gitlab_migration:` " with `:gitlab_shared`/
end.to raise_error(/use `restrict_gitlab_migration:` " with `:gitlab_shared`/)
end
end
end
@ -227,7 +227,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does raise an exception' do
expect do
model.queue_background_migration_jobs_by_range_at_intervals(ProjectAuthorization, 'FooJob', 10.seconds)
end.to raise_error /The `#queue_background_migration_jobs_by_range_at_intervals` can not be run inside a transaction./
end.to raise_error(/The `#queue_background_migration_jobs_by_range_at_intervals` can not be run inside a transaction./)
end
end
end
@ -273,7 +273,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does raise an exception' do
expect { subject }
.to raise_error /The `#requeue_background_migration_jobs_by_range_at_intervals` cannot use `restrict_gitlab_migration:`./
.to raise_error(/The `#requeue_background_migration_jobs_by_range_at_intervals` cannot use `restrict_gitlab_migration:`./)
end
end
end
@ -285,7 +285,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does raise an exception' do
expect { subject }
.to raise_error /The `#requeue_background_migration_jobs_by_range_at_intervals` can not be run inside a transaction./
.to raise_error(/The `#requeue_background_migration_jobs_by_range_at_intervals` can not be run inside a transaction./)
end
end
@ -405,7 +405,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does raise an exception' do
expect { model.finalize_background_migration(job_class_name, delete_tracking_jobs: %w[pending succeeded]) }
.to raise_error /The `#finalize_background_migration` can not be run inside a transaction./
.to raise_error(/The `#finalize_background_migration` can not be run inside a transaction./)
end
end
@ -425,7 +425,7 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
it 'does raise an exception' do
expect { model.finalize_background_migration(job_class_name, delete_tracking_jobs: %w[pending succeeded]) }
.to raise_error /The `#finalize_background_migration` cannot use `restrict_gitlab_migration:`./
.to raise_error(/The `#finalize_background_migration` cannot use `restrict_gitlab_migration:`./)
end
end
end

View File

@ -303,7 +303,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
it 'does raise an exception' do
expect { migration.finalize_batched_background_migration(job_class_name: 'MyJobClass', table_name: :projects, column_name: :id, job_arguments: []) }
.to raise_error /`finalize_batched_background_migration` cannot be run inside a transaction./
.to raise_error(/`finalize_batched_background_migration` cannot be run inside a transaction./)
end
end
@ -342,7 +342,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
expect do
migration.finalize_batched_background_migration(
job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
end.to raise_error /Could not find batched background migration/
end.to raise_error(/Could not find batched background migration/)
end
end
@ -351,7 +351,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
expect do
migration.finalize_batched_background_migration(
job_class_name: 'Ci::MyClass', table_name: :ci_builds, column_name: :id, job_arguments: [])
end.to raise_error /Could not find batched background migration/
end.to raise_error(/Could not find batched background migration/)
end
end
end
@ -364,7 +364,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
it 'does raise an exception' do
expect { migration.finalize_batched_background_migration(job_class_name: 'MyJobClass', table_name: :projects, column_name: :id, job_arguments: []) }
.to raise_error /`finalize_batched_background_migration` cannot be run inside a transaction./
.to raise_error(/`finalize_batched_background_migration` cannot be run inside a transaction./)
end
end
end
@ -559,7 +559,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers,
it 'does raise an exception' do
expect { ensure_batched_background_migration_is_finished }
.to raise_error /`ensure_batched_background_migration_is_finished` cannot be run inside a transaction./
.to raise_error(/`ensure_batched_background_migration_is_finished` cannot be run inside a transaction./)
end
end

View File

@ -29,7 +29,7 @@ RSpec.describe Gitlab::Database::Partitioning, feature_category: :database do
context 'ensure that the registered models have partitioning strategy' do
it 'fails when partitioning_strategy is not specified for the model' do
model = Class.new(ApplicationRecord)
expect { described_class.register_models([model]) }.to raise_error /should have partitioning strategy defined/
expect { described_class.register_models([model]) }.to raise_error(/should have partitioning strategy defined/)
end
end
end

View File

@ -235,7 +235,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
end
rescue StandardError
# Ensures that standard rescue does not silence errors
end.to raise_error /Cross-database data modification/
end.to raise_error(/Cross-database data modification/)
end
end
@ -267,7 +267,7 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModificatio
# the ensure of `.transaction` executes `ROLLBACK TO SAVEPOINT`
end
end.to raise_error /force rollback/
end.to raise_error(/force rollback/)
end
end
end

View File

@ -192,7 +192,7 @@ RSpec.describe Gitlab::GitalyClient::WithFeatureFlagActors do
it 'calls error tracking track_and_raise_for_dev_exception' do
expect do
service.gitaly_client_call(call_arg_1, call_arg_2, karg: call_arg_3)
end.to raise_error /gitaly_client_call called without setting repository_actor/
end.to raise_error(/gitaly_client_call called without setting repository_actor/)
expect(Gitlab::ErrorTracking).to have_received(:track_and_raise_for_dev_exception).with(
be_a(Feature::InvalidFeatureFlagError)

View File

@ -14,7 +14,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::ArrayScopeCol
context 'when no columns are given' do
let(:columns) { [] }
it { expect { array_scope_columns }.to raise_error /No array columns were given/ }
it { expect { array_scope_columns }.to raise_error(/No array columns were given/) }
end
context 'when Arel AS node is given as input' do

View File

@ -385,7 +385,7 @@ RSpec.describe Gitlab::Pagination::Keyset::InOperatorOptimization::QueryBuilder
expect do
iterator.each_batch(of: batch_size) { |records| records.to_a }
end.to raise_error /The "RecordLoaderStrategy" does not support/
end.to raise_error(/The "RecordLoaderStrategy" does not support/)
end
end
end

View File

@ -12,6 +12,7 @@ RSpec.describe Gitlab::PrivateCommitEmail, feature_category: :shared do
subject { described_class.regex }
it { is_expected.to match("1-foo@#{hostname}") }
it { is_expected.to match("1-BLAH@#{hostname}") }
it { is_expected.not_to match("1-foo@#{hostname}.foo") }
it { is_expected.not_to match('1-foo@users.noreply.gitlab.com') }
it { is_expected.not_to match('foo-1@users.noreply.gitlab.com') }
@ -53,5 +54,11 @@ RSpec.describe Gitlab::PrivateCommitEmail, feature_category: :shared do
expect(described_class.for_user(user)).to eq("#{user.id}-#{user.username}@#{hostname}")
end
it 'stores the private commit email with uppercase' do
user = create(:user, username: 'BOBBY_TABLES')
expect(described_class.for_user(user)).to eq("#{user.id}-BOBBY_TABLES@#{hostname}")
end
end
end

View File

@ -385,7 +385,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob,
describe '#duplicate?' do
it "raises an error if the check wasn't performed" do
expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/
expect { duplicate_job.duplicate? }.to raise_error(/Call `#check!` first/)
end
it 'returns false if the existing jid equals the job jid' do

View File

@ -146,7 +146,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
end
it 'raises error for development environment' do
expect { save_aggregated_metrics }.to raise_error /Unsupported data type/
expect { save_aggregated_metrics }.to raise_error(/Unsupported data type/)
end
end
end

View File

@ -1,14 +0,0 @@
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::DesignsCounter do
it_behaves_like 'a redis usage counter', 'Designs', :create
it_behaves_like 'a redis usage counter', 'Designs', :update
it_behaves_like 'a redis usage counter', 'Designs', :delete
it_behaves_like 'a redis usage counter with totals', :design_management_designs,
create: 5,
update: 3,
delete: 2
end

View File

@ -502,18 +502,18 @@ RSpec.describe Gitlab::Utils::UsageData do
context 'with counter given' do
context 'when gets an error' do
subject { described_class.redis_usage_data(::Gitlab::UsageDataCounters::DesignsCounter) }
subject { described_class.redis_usage_data(::Gitlab::UsageDataCounters::MergeRequestCounter) }
let(:fallback) { ::Gitlab::UsageDataCounters::DesignsCounter.fallback_totals }
let(:failing_class) { ::Gitlab::UsageDataCounters::DesignsCounter }
let(:fallback) { ::Gitlab::UsageDataCounters::MergeRequestCounter.fallback_totals }
let(:failing_class) { ::Gitlab::UsageDataCounters::MergeRequestCounter }
let(:failing_method) { :totals }
it_behaves_like 'failing hardening method', ::Redis::CommandError
end
it 'returns the totals when couter is given' do
allow(::Gitlab::UsageDataCounters::DesignsCounter).to receive(:totals).and_return({ design_management_designs_create: 2 })
expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::DesignsCounter)).to eql({ design_management_designs_create: 2 })
allow(::Gitlab::UsageDataCounters::MergeRequestCounter).to receive(:totals).and_return({ merge_request_create: 2 })
expect(described_class.redis_usage_data(::Gitlab::UsageDataCounters::MergeRequestCounter)).to eql({ merge_request_create: 2 })
end
end
end

View File

@ -125,7 +125,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Global do
describe '#errors' do
it 'returns error about invalid type' do
expect(global.errors.first).to match /should be a hash/
expect(global.errors.first).to match(/should be a hash/)
end
end
end

View File

@ -57,7 +57,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do
it 'is not valid' do
expect(entry).not_to be_valid
expect(entry.errors.first)
.to match /port config contains unknown keys: invalid_key/
.to match(/port config contains unknown keys: invalid_key/)
end
end
end

View File

@ -15,7 +15,7 @@ RSpec.describe Integrations::Telegram, feature_category: :integrations do
describe 'validations' do
context 'when integration is active' do
before do
subject.active = true
subject.activate!
end
it { is_expected.to validate_presence_of(:token) }
@ -25,7 +25,7 @@ RSpec.describe Integrations::Telegram, feature_category: :integrations do
context 'when integration is inactive' do
before do
subject.active = false
subject.deactivate!
end
it { is_expected.not_to validate_presence_of(:token) }
@ -45,12 +45,23 @@ RSpec.describe Integrations::Telegram, feature_category: :integrations do
end
context 'when token is present' do
let(:integration) { create(:telegram_integration) }
let(:integration) { build_stubbed(:telegram_integration) }
it 'sets webhook value' do
expect(integration).to be_valid
expect(integration.webhook).to eq("https://api.telegram.org/bot123456:ABC-DEF1234/sendMessage")
end
context 'with custom hostname' do
before do
integration.hostname = 'https://gitlab.example.com'
end
it 'sets webhook value with custom hostname' do
expect(integration).to be_valid
expect(integration.webhook).to eq("https://gitlab.example.com/bot123456:ABC-DEF1234/sendMessage")
end
end
end
end

View File

@ -273,6 +273,40 @@ RSpec.describe Member, feature_category: :groups_and_projects do
end
end
describe '.with_case_insensitive_invite_emails' do
let_it_be(:email) { 'bob@example.com' }
context 'when the invite_email is the same case' do
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: email)
end
it 'finds the members' do
expect(described_class.with_case_insensitive_invite_emails([email])).to match_array([invited_member])
end
end
context 'when the invite_email is lowercased and we have an uppercase email for searching' do
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: email)
end
it 'finds the members' do
expect(described_class.with_case_insensitive_invite_emails([email.upcase])).to match_array([invited_member])
end
end
context 'when the invite_email is non lower cased' do
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: email.upcase)
end
it 'finds the members' do
expect(described_class.with_case_insensitive_invite_emails([email])).to match_array([invited_member])
end
end
end
describe '.invite' do
it { expect(described_class.invite).not_to include @maintainer }
it { expect(described_class.invite).to include @invited_member }
@ -848,6 +882,46 @@ RSpec.describe Member, feature_category: :groups_and_projects do
end
end
describe '.distinct_on_source_and_case_insensitive_invite_email' do
it 'finds distinct members on email' do
email = 'bob@example.com'
project = create(:project)
project_owner_member = project.members.first
member = create(:project_member, :invited, source: project, invite_email: email)
# The one below is the duplicate and will not be returned.
create(:project_member, :invited, source: project, invite_email: email.upcase)
another_project = create(:project)
another_project_owner_member = another_project.members.first
another_project_member = create(:project_member, :invited, source: another_project, invite_email: email)
# The one below is the duplicate and will not be returned.
create(:project_member, :invited, source: another_project, invite_email: email.upcase)
expect(described_class.distinct_on_source_and_case_insensitive_invite_email)
.to match_array([project_owner_member, member, another_project_owner_member, another_project_member])
end
end
describe '.order_updated_desc' do
it 'contains only the latest updated case insensitive email invite' do
project = create(:project)
member = project.members.first
another_member = create(:project_member, source: member.project)
travel_to 10.minutes.ago do
another_member.touch # in past, so shouldn't get accepted over the one created
end
member.touch # ensure updated_at is being verified. This one should be first now.
travel_to 10.minutes.from_now do
another_member.touch # now we'll make the original first so we are verifying updated_at
expect(described_class.order_updated_desc).to eq([another_member, member])
end
end
end
describe '.with_group_group_sharing_access' do
let_it_be(:shared_group) { create(:group) }
let_it_be(:invited_group) { create(:group) }

View File

@ -3795,6 +3795,47 @@ RSpec.describe User, feature_category: :user_profile do
expect(project_member_invite_via_unconfirmed_secondary_email.reload).to be_invite
expect(group_member_invite_via_unconfirmed_secondary_email.reload).to be_invite
end
context 'with an uppercase version of the email matches another member' do
let!(:uppercase_existing_invite) do
create(:project_member, :invited, source: project_member_invite.project, invite_email: user.email.upcase)
end
it 'accepts only one of the invites' do
travel_to 10.minutes.ago do
project_member_invite.touch # in past, so shouldn't get accepted over the one created
end
uppercase_existing_invite.touch # ensure updated_at is being verified. This one should be first now.
travel_to 10.minutes.from_now do
project_member_invite.touch # now we'll make the original first so we are verifying updated_at
result = [
project_member_invite,
group_member_invite,
project_member_invite_via_confirmed_secondary_email,
group_member_invite_via_confirmed_secondary_email
]
accepted_members = user.accept_pending_invitations!
expect(accepted_members).to match_array(result)
expect(uppercase_existing_invite.reset.user).to be_nil
end
end
end
end
describe '#pending_invitations' do
let_it_be(:user, reload: true) { create(:user, email: 'user@email.com') }
let_it_be(:invited_member) do
create(:project_member, :invited, invite_email: user.email)
end
it 'finds the invite' do
expect(user.pending_invitations).to match_array([invited_member])
end
end
describe '#can_create_project?' do

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::Ci::Triggers, feature_category: :continuous_integration do
RSpec.describe API::Ci::Triggers, feature_category: :pipeline_composition do
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }

View File

@ -79,15 +79,14 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
it_behaves_like "a top-level error"
it 'does not log any events' do
counter = ::Gitlab::UsageDataCounters::DesignsCounter
it_behaves_like 'internal event not tracked' do
let(:event) { 'delete_design_management_design' }
expect do
subject(:service_action) do
run_service
rescue StandardError
nil
end
.not_to change { [counter.totals, Event.count] }
end
it 'does not log any UsageData metrics' do
@ -120,9 +119,11 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
expect { run_service }.to change { issue.designs.current.count }.from(3).to(2)
end
it 'logs a deletion event' do
counter = ::Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }.to change { counter.read(:delete) }.by(1)
it_behaves_like 'internal event tracking' do
let(:event) { 'delete_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
it 'updates UsageData for removed designs' do
@ -189,15 +190,19 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
let!(:designs) { create_designs(2) }
it 'makes the correct changes' do
counter = ::Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }
.to change { issue.designs.current.count }.from(3).to(1)
.and change { counter.read(:delete) }.by(2)
.and change { Event.count }.by(2)
.and change { Event.destroyed_action.for_design.count }.by(2)
end
it_behaves_like 'internal event tracking' do
let(:event) { 'delete_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
it 'schedules deleting todos for that design' do
expect(TodosDestroyer::DestroyedDesignsWorker).to receive(:perform_async).with(designs.map(&:id))
@ -238,6 +243,9 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
describe 'scalability' do
before do
run_service(create_designs(1)) # ensure project, issue, etc are created
# Exclude internal event tracking from the DB request count. The events are tracked independently of each
# other and each make a query for the project's namespace. There's no way to avoid these requests for now.
allow(Gitlab::InternalEvents).to receive(:track_event)
end
it 'makes the same number of DB requests for one design as for several' do

View File

@ -104,15 +104,12 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it 'creates a commit, an event in the activity stream and updates the creation count', :aggregate_failures do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_designs_added_action)
.with(author: user, project: project)
expect { run_service }
.to change { Event.count }.by(1)
.and change { Event.for_design.created_action.count }.by(1)
.and change { counter.read(:create) }.by(1)
expect(design_repository.commit).to have_attributes(
author: user,
@ -126,6 +123,13 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
subject(:service_action) { run_service }
end
it_behaves_like 'internal event tracking' do
let(:event) { 'create_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
it 'can run the same command in parallel',
quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/450483' do
parellism = 4
@ -227,13 +231,18 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it 'records the correct events' do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect { run_service }
.to change { counter.read(:update) }.by(1)
.and change { Event.count }.by(1)
.to change { Event.count }.by(1)
.and change { Event.for_design.updated_action.count }.by(1)
end
it_behaves_like 'internal event tracking' do
let(:event) { 'update_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
context 'when uploading a new design' do
it 'does not link the new version to the existing design' do
existing_design = issue.designs.first
@ -293,8 +302,6 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it 'has the correct side-effects' do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect(DesignManagement::NewVersionWorker)
.to receive(:perform_async).once.with(Integer, false).and_return(nil)
@ -303,10 +310,22 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
.and change { Event.for_design.count }.by(2)
.and change { Event.created_action.count }.by(1)
.and change { Event.updated_action.count }.by(1)
.and change { counter.read(:create) }.by(1)
.and change { counter.read(:update) }.by(1)
.and change { commit_count }.by(1)
end
it_behaves_like 'internal event tracking' do
let(:event) { 'create_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
it_behaves_like 'internal event tracking' do
let(:event) { 'update_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
end
context 'when uploading multiple files' do
@ -317,7 +336,6 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
end
it 'has the correct side-effects', :request_store do
counter = Gitlab::UsageDataCounters::DesignsCounter
service = described_class.new(project, user, issue: issue, files: files)
# Some unrelated calls that are usually cached or happen only once
@ -335,11 +353,17 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
expect { service.execute }
.to change { issue.designs.count }.from(0).to(2)
.and change { DesignManagement::Version.count }.by(1)
.and change { counter.read(:create) }.by(2)
.and change { Gitlab::GitalyClient.get_request_count }.by(3)
.and change { commit_count }.by(1)
end
it_behaves_like 'internal event tracking' do
let(:event) { 'create_design_management_design' }
let(:namespace) { project.namespace }
let(:category) { described_class }
subject(:service_action) { run_service }
end
context 'when uploading too many files' do
let(:files) { Array.new(DesignManagement::SaveDesignsService::MAX_FILES + 1) { dk_png } }

View File

@ -6601,7 +6601,6 @@
- './spec/lib/gitlab/usage_data_counters/base_counter_spec.rb'
- './spec/lib/gitlab/usage_data_counters/ci_template_unique_counter_spec.rb'
- './spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb'
- './spec/lib/gitlab/usage_data_counters/designs_counter_spec.rb'
- './spec/lib/gitlab/usage_data_counters/gitlab_cli_activity_unique_counter_spec.rb'
- './spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb'
- './spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb'

Some files were not shown because too many files have changed in this diff Show More