Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
6c20a1150a
commit
20d3e87b4f
|
|
@ -143,15 +143,23 @@ pajamas_adoption:
|
|||
extends:
|
||||
- .default-retry
|
||||
- .reports:rules:pajamas_adoption
|
||||
allow_failure: true
|
||||
allow_failure:
|
||||
# The scanner exits with status:
|
||||
# - 0 when no errors occur, and no blocking findings are found;
|
||||
# - 1 when unexpected errors occur;
|
||||
# - 2 when blocking findings are found.
|
||||
# We only want to block the merge request in this last case.
|
||||
exit_codes: 1
|
||||
image: ${REGISTRY_HOST}/${REGISTRY_GROUP}/frontend/pajamas-adoption-scanner:latest
|
||||
stage: lint
|
||||
needs: []
|
||||
variables:
|
||||
FF_USE_NEW_BASH_EVAL_STRATEGY: 'true'
|
||||
script:
|
||||
- EE_APP=$(test -d ee/app/ && echo 'ee/app/' || true)
|
||||
- scan_gitlab_code_quality app/ $EE_APP
|
||||
- scan_gitlab_code_quality {,ee/}app/
|
||||
artifacts:
|
||||
name: pajamas-adoption
|
||||
when: always
|
||||
paths:
|
||||
- gl-code-quality-report.json
|
||||
- pas-findings.json
|
||||
|
|
|
|||
|
|
@ -2727,7 +2727,8 @@
|
|||
- <<: *if-merge-request
|
||||
changes:
|
||||
- '{,ee/}app/**/*.{js,vue,rb,haml}'
|
||||
- <<: *if-default-refs
|
||||
- <<: *if-default-branch-refs
|
||||
allow_failure: true
|
||||
|
||||
################
|
||||
# Review rules #
|
||||
|
|
|
|||
|
|
@ -126,7 +126,6 @@ Layout/EmptyLineAfterMagicComment:
|
|||
- 'config/routes/merge_requests.rb'
|
||||
- 'danger/ce_ee_vue_templates/Dangerfile'
|
||||
- 'danger/feature_flag/Dangerfile'
|
||||
- 'danger/pajamas/Dangerfile'
|
||||
- 'ee/app/controllers/concerns/epics_actions.rb'
|
||||
- 'ee/app/controllers/concerns/saml_authorization.rb'
|
||||
- 'ee/app/controllers/ee/projects/analytics/cycle_analytics/stages_controller.rb'
|
||||
|
|
|
|||
|
|
@ -668,7 +668,6 @@ Layout/LineLength:
|
|||
- 'danger/ci_config/Dangerfile'
|
||||
- 'danger/documentation/Dangerfile'
|
||||
- 'danger/feature_flag/Dangerfile'
|
||||
- 'danger/pajamas/Dangerfile'
|
||||
- 'danger/roulette/Dangerfile'
|
||||
- 'danger/vue_shared_documentation/Dangerfile'
|
||||
- 'ee/app/controllers/admin/elasticsearch_controller.rb'
|
||||
|
|
|
|||
|
|
@ -59,7 +59,6 @@ Lint/MissingCopEnableDirective:
|
|||
- 'app/services/projects/container_repository/third_party/delete_tags_service.rb'
|
||||
- 'app/services/search/global_service.rb'
|
||||
- 'danger/feature_flag/Dangerfile'
|
||||
- 'danger/pajamas/Dangerfile'
|
||||
- 'ee/app/controllers/ee/admin/dashboard_controller.rb'
|
||||
- 'ee/app/controllers/ee/admin/groups_controller.rb'
|
||||
- 'ee/app/controllers/ee/admin/users_controller.rb'
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ export default {
|
|||
<gl-button
|
||||
v-gl-tooltip
|
||||
:title="$options.i18n.addReaction"
|
||||
:class="toggleClass"
|
||||
:class="[toggleClass, { 'is-active': isVisible }]"
|
||||
class="gl-relative gl-h-full"
|
||||
data-testid="add-reaction-button"
|
||||
>
|
||||
|
|
@ -154,10 +154,16 @@ export default {
|
|||
<gl-icon class="award-control-icon-neutral gl-button-icon" name="slight-smile" />
|
||||
</span>
|
||||
<span class="reaction-control-icon reaction-control-icon-positive">
|
||||
<gl-icon class="award-control-icon-positive gl-button-icon" name="smiley" />
|
||||
<gl-icon
|
||||
class="award-control-icon-positive gl-button-icon gl-left-3!"
|
||||
name="smiley"
|
||||
/>
|
||||
</span>
|
||||
<span class="reaction-control-icon reaction-control-icon-super-positive">
|
||||
<gl-icon class="award-control-icon-super-positive gl-button-icon" name="smile" />
|
||||
<gl-icon
|
||||
class="award-control-icon-super-positive gl-button-icon gl-left-3!"
|
||||
name="smile"
|
||||
/>
|
||||
</span>
|
||||
</slot>
|
||||
</gl-button>
|
||||
|
|
|
|||
|
|
@ -135,6 +135,8 @@ export default {
|
|||
:fields="$options.fields"
|
||||
show-empty
|
||||
stacked="md"
|
||||
class="mb-3"
|
||||
:aria-label="$options.i18n.settingBlockTitle"
|
||||
:busy="isLoadingPackageProtectionRules"
|
||||
>
|
||||
<template #table-busy>
|
||||
|
|
|
|||
|
|
@ -112,21 +112,21 @@ export default {
|
|||
</div>
|
||||
<div
|
||||
class="committer gl-flex-basis-full"
|
||||
:class="truncateAuthorName ? 'gl-display-inline-flex' : ''"
|
||||
:class="{ 'gl-display-inline-flex': truncateAuthorName }"
|
||||
data-testid="committer"
|
||||
>
|
||||
<gl-link
|
||||
v-if="commit.author"
|
||||
:href="commit.author.webPath"
|
||||
class="commit-author-link js-user-link"
|
||||
:class="truncateAuthorName ? 'gl-display-inline-block gl-text-truncate' : ''"
|
||||
:class="{ 'gl-display-inline-block gl-text-truncate': truncateAuthorName }"
|
||||
>
|
||||
{{ commit.author.name }}</gl-link
|
||||
>
|
||||
<template v-else>
|
||||
{{ commit.authorName }}
|
||||
</template>
|
||||
{{ $options.i18n.authored }}
|
||||
{{ $options.i18n.authored }}
|
||||
<timeago-tooltip :time="commit.authoredDate" tooltip-placement="bottom" />
|
||||
</div>
|
||||
<pre
|
||||
|
|
|
|||
|
|
@ -413,6 +413,7 @@ span.idiff {
|
|||
}
|
||||
|
||||
.version-link {
|
||||
display: inline-block;
|
||||
align-self: center;
|
||||
margin-top: $gl-spacing-scale-2;
|
||||
width: $gl-spacing-scale-5;
|
||||
|
|
|
|||
|
|
@ -137,8 +137,12 @@
|
|||
color: $gl-text-color;
|
||||
}
|
||||
|
||||
&.gl-display-inline-flex {
|
||||
gap: 0.5ch;
|
||||
}
|
||||
|
||||
.commit-author-link.gl-text-truncate {
|
||||
max-width: 20ch;
|
||||
max-width: 19ch;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,5 +26,45 @@ module Namespaces
|
|||
|
||||
connection.execute(sanitize_sql_array([sql, namespace_ids, namespace_ids, Time.current]))
|
||||
end
|
||||
|
||||
def self.load_outdated_batch(batch_size)
|
||||
where
|
||||
.not(outdated_at: nil)
|
||||
.limit(batch_size)
|
||||
.lock('FOR UPDATE SKIP LOCKED')
|
||||
.pluck_primary_key
|
||||
end
|
||||
|
||||
def self.upsert_with_consistent_data(namespace:, self_and_descendant_group_ids:, all_project_ids:)
|
||||
query = <<~SQL
|
||||
INSERT INTO namespace_descendants
|
||||
(namespace_id, traversal_ids, self_and_descendant_group_ids, all_project_ids, outdated_at, calculated_at)
|
||||
VALUES
|
||||
(
|
||||
?,
|
||||
ARRAY[?]::bigint[],
|
||||
ARRAY_REMOVE(ARRAY[?]::bigint[], NULL),
|
||||
ARRAY_REMOVE(ARRAY[?]::bigint[], NULL),
|
||||
NULL,
|
||||
?
|
||||
)
|
||||
ON CONFLICT(namespace_id)
|
||||
DO UPDATE SET
|
||||
traversal_ids = EXCLUDED.traversal_ids,
|
||||
self_and_descendant_group_ids = EXCLUDED.self_and_descendant_group_ids,
|
||||
all_project_ids = EXCLUDED.all_project_ids,
|
||||
outdated_at = EXCLUDED.outdated_at,
|
||||
calculated_at = EXCLUDED.calculated_at
|
||||
SQL
|
||||
|
||||
connection.execute(sanitize_sql_array([
|
||||
query,
|
||||
namespace.id,
|
||||
namespace.traversal_ids,
|
||||
self_and_descendant_group_ids,
|
||||
all_project_ids,
|
||||
Time.current
|
||||
]))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1942,8 +1942,17 @@ class Project < ApplicationRecord
|
|||
end
|
||||
|
||||
def track_project_repository
|
||||
repository = project_repository || build_project_repository
|
||||
repository.update!(shard_name: repository_storage, disk_path: disk_path)
|
||||
(project_repository || build_project_repository).tap do |proj_repo|
|
||||
attributes = { shard_name: repository_storage, disk_path: disk_path }
|
||||
|
||||
if Feature.enabled?(:store_object_format, namespace, type: :gitlab_com_derisk)
|
||||
object_format = repository.object_format
|
||||
|
||||
attributes[:object_format] = object_format if object_format.present?
|
||||
end
|
||||
|
||||
proj_repo.update!(**attributes)
|
||||
end
|
||||
|
||||
cleanup
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1281,8 +1281,6 @@ class Repository
|
|||
end
|
||||
|
||||
def object_format
|
||||
return unless exists?
|
||||
|
||||
cache_key = "object_format:#{full_path}"
|
||||
|
||||
request_store_cache.fetch(cache_key) do
|
||||
|
|
@ -1293,6 +1291,8 @@ class Repository
|
|||
FORMAT_SHA256
|
||||
end
|
||||
end
|
||||
rescue Gitlab::Git::Repository::NoRepository
|
||||
nil
|
||||
end
|
||||
|
||||
def blank_ref
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
module WorkItems
|
||||
class DatesSource < ApplicationRecord
|
||||
include FromUnion
|
||||
|
||||
self.table_name = 'work_item_dates_sources'
|
||||
|
||||
# namespace is required as the sharding key
|
||||
|
|
|
|||
|
|
@ -0,0 +1,65 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Namespaces
|
||||
class UpdateDenormalizedDescendantsService
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
NAMESPACE_TYPE_MAPPING = {
|
||||
'Project' => :all_project_ids,
|
||||
'Group' => :self_and_descendant_group_ids
|
||||
}.freeze
|
||||
|
||||
def initialize(namespace_id:)
|
||||
@namespace_id = namespace_id
|
||||
end
|
||||
|
||||
def execute
|
||||
Namespaces::Descendants.transaction do
|
||||
namespace = Namespace.primary_key_in(namespace_id).lock.first # rubocop: disable CodeReuse/ActiveRecord -- this is a special service for updating records
|
||||
# If there is another process updating the hierarchy, this query will return nil and we just
|
||||
# stop the processing.
|
||||
descendants = Namespaces::Descendants.primary_key_in(namespace_id).lock('FOR UPDATE SKIP LOCKED').first # rubocop: disable CodeReuse/ActiveRecord -- this is a special service for updating records
|
||||
next unless descendants
|
||||
|
||||
if namespace
|
||||
update_namespace_descendants(namespace)
|
||||
else
|
||||
descendants.destroy
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :namespace_id
|
||||
|
||||
def update_namespace_descendants(namespace)
|
||||
ids = collect_namespace_ids
|
||||
|
||||
Namespaces::Descendants.upsert_with_consistent_data(
|
||||
namespace: namespace,
|
||||
self_and_descendant_group_ids: ids[:self_and_descendant_group_ids].sort,
|
||||
all_project_ids: Project.where(project_namespace_id: ids[:all_project_ids]).order(:id).pluck_primary_key # rubocop: disable CodeReuse/ActiveRecord -- Service specific record lookup
|
||||
)
|
||||
end
|
||||
|
||||
def collect_namespace_ids
|
||||
denormalized_ids = { self_and_descendant_group_ids: [], all_project_ids: [] }
|
||||
|
||||
iterator.each_batch do |ids|
|
||||
namespaces = Namespace.primary_key_in(ids).select(:id, :type)
|
||||
namespaces.each do |namespace|
|
||||
denormalized_attribute = NAMESPACE_TYPE_MAPPING[namespace.type]
|
||||
denormalized_ids[denormalized_attribute] << namespace.id if denormalized_attribute
|
||||
end
|
||||
end
|
||||
|
||||
denormalized_ids
|
||||
end
|
||||
|
||||
def iterator
|
||||
Gitlab::Database::NamespaceEachBatch
|
||||
.new(namespace_class: Namespace, cursor: { current_id: namespace_id, depth: [namespace_id] })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -642,6 +642,15 @@
|
|||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: cronjob:namespaces_process_outdated_namespace_descendants_cron
|
||||
:worker_name: Namespaces::ProcessOutdatedNamespaceDescendantsCronWorker
|
||||
:feature_category: :groups_and_projects
|
||||
:has_external_dependencies: false
|
||||
:urgency: :low
|
||||
:resource_boundary: :unknown
|
||||
:weight: 1
|
||||
:idempotent: true
|
||||
:tags: []
|
||||
- :name: cronjob:namespaces_prune_aggregation_schedules
|
||||
:worker_name: Namespaces::PruneAggregationSchedulesWorker
|
||||
:feature_category: :source_code_management
|
||||
|
|
|
|||
|
|
@ -0,0 +1,42 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Namespaces
|
||||
class ProcessOutdatedNamespaceDescendantsCronWorker
|
||||
BATCH_SIZE = 50
|
||||
|
||||
include ApplicationWorker
|
||||
|
||||
data_consistency :always # rubocop: disable SidekiqLoadBalancing/WorkerDataConsistency -- The worker updates data
|
||||
|
||||
# rubocop:disable Scalability/CronWorkerContext -- This worker does not perform work scoped to a context
|
||||
include CronjobQueue
|
||||
# rubocop:enable Scalability/CronWorkerContext
|
||||
|
||||
feature_category :groups_and_projects
|
||||
idempotent!
|
||||
|
||||
def perform
|
||||
runtime_limiter = Analytics::CycleAnalytics::RuntimeLimiter.new(45.seconds)
|
||||
|
||||
processed_namespaces = 0
|
||||
loop do
|
||||
namespace_ids = Namespaces::Descendants.load_outdated_batch(BATCH_SIZE)
|
||||
|
||||
break if namespace_ids.empty?
|
||||
|
||||
namespace_ids.each do |namespace_id|
|
||||
Namespaces::UpdateDenormalizedDescendantsService
|
||||
.new(namespace_id: namespace_id)
|
||||
.execute
|
||||
|
||||
processed_namespaces += 1
|
||||
break if runtime_limiter.over_time?
|
||||
end
|
||||
|
||||
break if runtime_limiter.over_time?
|
||||
end
|
||||
|
||||
log_extra_metadata_on_done(:result, { processed_namespaces: processed_namespaces })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
name: store_object_format
|
||||
feature_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/419887
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/139108
|
||||
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/434364
|
||||
milestone: '16.9'
|
||||
type: gitlab_com_derisk
|
||||
group: group::source code
|
||||
default_enabled: false
|
||||
|
|
@ -706,6 +706,9 @@ Settings.cron_jobs['ci_schedule_unlock_pipelines_in_queue_worker']['job_class']
|
|||
Settings.cron_jobs['ci_catalog_resources_process_sync_events_worker'] ||= {}
|
||||
Settings.cron_jobs['ci_catalog_resources_process_sync_events_worker']['cron'] ||= '*/1 * * * *'
|
||||
Settings.cron_jobs['ci_catalog_resources_process_sync_events_worker']['job_class'] = 'Ci::Catalog::Resources::ProcessSyncEventsWorker'
|
||||
Settings.cron_jobs['namespaces_process_outdated_namespace_descendants_cron_worker'] ||= {}
|
||||
Settings.cron_jobs['namespaces_process_outdated_namespace_descendants_cron_worker']['cron'] ||= '*/1 * * * *'
|
||||
Settings.cron_jobs['namespaces_process_outdated_namespace_descendants_cron_worker']['job_class'] = 'Namespaces::ProcessOutdatedNamespaceDescendantsCronWorker'
|
||||
|
||||
Gitlab.ee do
|
||||
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}
|
||||
|
|
|
|||
|
|
@ -555,14 +555,6 @@ module.exports = {
|
|||
minChunks: 2,
|
||||
reuseExistingChunk: true,
|
||||
},
|
||||
security_reports: {
|
||||
priority: 13,
|
||||
name: 'security_reports',
|
||||
chunks: 'initial',
|
||||
test: /[\\/](vue_shared[\\/](security_reports|license_compliance)|security_dashboard)[\\/]/,
|
||||
minChunks: 2,
|
||||
reuseExistingChunk: true,
|
||||
},
|
||||
vendors: {
|
||||
priority: 10,
|
||||
chunks: 'async',
|
||||
|
|
|
|||
|
|
@ -1,77 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
PATTERNS = %w[
|
||||
%a.btn.btn-
|
||||
%button.btn.btn-
|
||||
.alert
|
||||
.alert-danger
|
||||
.alert-dismissible
|
||||
.alert-info
|
||||
.alert-link
|
||||
.alert-primary
|
||||
.alert-success
|
||||
.alert-warning
|
||||
.nav-tabs
|
||||
.toolbar-button-icon
|
||||
.tooltip
|
||||
.tooltip-inner
|
||||
<button
|
||||
<tabs
|
||||
bs-callout
|
||||
deprecated-modal
|
||||
initDeprecatedJQueryDropdown
|
||||
loading-button
|
||||
v-popover
|
||||
v-tooltip
|
||||
with_tooltip
|
||||
].freeze
|
||||
|
||||
BLOCKING_PATTERNS = %w[
|
||||
pagination-button
|
||||
graphql_pagination
|
||||
].freeze
|
||||
|
||||
def get_added_lines(files)
|
||||
lines = []
|
||||
files.each do |file|
|
||||
lines += helper.changed_lines(file).select { |line| %r{^[+]}.match?(line) }
|
||||
end
|
||||
lines
|
||||
end
|
||||
|
||||
changed_vue_haml_files = helper.changed_files(/.vue$|.haml$/)
|
||||
|
||||
return if changed_vue_haml_files.empty?
|
||||
|
||||
changed_lines_in_mr = get_added_lines(changed_vue_haml_files)
|
||||
deprecated_components_in_mr = PATTERNS.select { |pattern| changed_lines_in_mr.any? { |line| line[pattern] } }
|
||||
blocking_components_in_mr = BLOCKING_PATTERNS.select { |pattern| changed_lines_in_mr.any? { |line| line[pattern] } }
|
||||
|
||||
return if (deprecated_components_in_mr + blocking_components_in_mr).empty?
|
||||
|
||||
markdown(<<~MARKDOWN)
|
||||
## Deprecated components
|
||||
|
||||
MARKDOWN
|
||||
|
||||
if blocking_components_in_mr.any?
|
||||
markdown(<<~MARKDOWN)
|
||||
These deprecated components have already been migrated and can no longer be used. Please use [Pajamas components](https://design.gitlab.com/components/overview) instead.
|
||||
|
||||
* #{blocking_components_in_mr.join("\n* ")}
|
||||
|
||||
MARKDOWN
|
||||
|
||||
fail "This merge request contains deprecated components that have been migrated and can no longer be used. Please use Pajamas components instead."
|
||||
end
|
||||
|
||||
if deprecated_components_in_mr.any?
|
||||
markdown(<<~MARKDOWN)
|
||||
These deprecated components are in the process of being migrated. Please consider using [Pajamas components](https://design.gitlab.com/components/overview) instead.
|
||||
|
||||
* #{deprecated_components_in_mr.join("\n* ")}
|
||||
|
||||
MARKDOWN
|
||||
|
||||
warn "This merge request contains deprecated components. Please consider using Pajamas components instead."
|
||||
end
|
||||
|
|
@ -7,4 +7,12 @@ feature_categories:
|
|||
description: Dependency proxy blob files
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10676
|
||||
milestone: '11.11'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
sharding_key:
|
||||
group_id: namespaces
|
||||
|
|
|
|||
|
|
@ -8,3 +8,5 @@ description: Group-level settings for the dependency proxy
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/10676
|
||||
milestone: '11.11'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key:
|
||||
group_id: namespaces
|
||||
|
|
|
|||
|
|
@ -7,4 +7,12 @@ feature_categories:
|
|||
description: Group-level settings for dependency proxy cleanup policies
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68809
|
||||
milestone: '14.3'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
sharding_key:
|
||||
group_id: namespaces
|
||||
|
|
|
|||
|
|
@ -7,4 +7,12 @@ feature_categories:
|
|||
description: Dependency proxy manifest files
|
||||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/48535
|
||||
milestone: '13.7'
|
||||
gitlab_schema: gitlab_main
|
||||
gitlab_schema: gitlab_main_cell
|
||||
allow_cross_joins:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_transactions:
|
||||
- gitlab_main_clusterwide
|
||||
allow_cross_foreign_keys:
|
||||
- gitlab_main_clusterwide
|
||||
sharding_key:
|
||||
group_id: namespaces
|
||||
|
|
|
|||
|
|
@ -9,3 +9,5 @@ description: Project settings related to security features.
|
|||
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/32577
|
||||
milestone: '13.1'
|
||||
gitlab_schema: gitlab_main_cell
|
||||
sharding_key:
|
||||
project_id: projects
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddWorkItemsDatesSourcesFixedDatesFields < Gitlab::Database::Migration[2.2]
|
||||
milestone '16.9'
|
||||
|
||||
def change
|
||||
change_table :work_item_dates_sources do |t|
|
||||
t.date :start_date_fixed
|
||||
t.date :due_date_fixed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1 @@
|
|||
f72f09fa71f990cebfc2381f6c2b804826d3dee3c6756fdc6547b32f7d67c488
|
||||
|
|
@ -25966,7 +25966,9 @@ CREATE TABLE work_item_dates_sources (
|
|||
start_date_sourcing_work_item_id bigint,
|
||||
start_date_sourcing_milestone_id bigint,
|
||||
due_date_sourcing_work_item_id bigint,
|
||||
due_date_sourcing_milestone_id bigint
|
||||
due_date_sourcing_milestone_id bigint,
|
||||
start_date_fixed date,
|
||||
due_date_fixed date
|
||||
);
|
||||
|
||||
CREATE TABLE work_item_hierarchy_restrictions (
|
||||
|
|
|
|||
|
|
@ -8564,6 +8564,7 @@ Input type: `WorkItemCreateInput`
|
|||
| <a id="mutationworkitemcreatemilestonewidget"></a>`milestoneWidget` | [`WorkItemWidgetMilestoneInput`](#workitemwidgetmilestoneinput) | Input for milestone widget. |
|
||||
| <a id="mutationworkitemcreatenamespacepath"></a>`namespacePath` | [`ID`](#id) | Full path of the namespace(project or group) the work item is created in. |
|
||||
| <a id="mutationworkitemcreateprojectpath"></a>`projectPath` **{warning-solid}** | [`ID`](#id) | **Deprecated:** Please use namespace_path instead. That will cover for both projects and groups. Deprecated in 15.10. |
|
||||
| <a id="mutationworkitemcreaterolledupdateswidget"></a>`rolledupDatesWidget` **{warning-solid}** | [`WorkItemWidgetRolledupDatesInput`](#workitemwidgetrolledupdatesinput) | **Deprecated:** This feature is an Experiment. It can be changed or removed at any time. Introduced in 16.9. |
|
||||
| <a id="mutationworkitemcreatetitle"></a>`title` | [`String!`](#string) | Title of the work item. |
|
||||
| <a id="mutationworkitemcreateworkitemtypeid"></a>`workItemTypeId` | [`WorkItemsTypeID!`](#workitemstypeid) | Global ID of a work item type. |
|
||||
|
||||
|
|
@ -8738,6 +8739,7 @@ Input type: `WorkItemUpdateInput`
|
|||
| <a id="mutationworkitemupdatenoteswidget"></a>`notesWidget` | [`WorkItemWidgetNotesInput`](#workitemwidgetnotesinput) | Input for notes widget. |
|
||||
| <a id="mutationworkitemupdatenotificationswidget"></a>`notificationsWidget` | [`WorkItemWidgetNotificationsUpdateInput`](#workitemwidgetnotificationsupdateinput) | Input for notifications widget. |
|
||||
| <a id="mutationworkitemupdateprogresswidget"></a>`progressWidget` | [`WorkItemWidgetProgressInput`](#workitemwidgetprogressinput) | Input for progress widget. |
|
||||
| <a id="mutationworkitemupdaterolledupdateswidget"></a>`rolledupDatesWidget` **{warning-solid}** | [`WorkItemWidgetRolledupDatesInput`](#workitemwidgetrolledupdatesinput) | **Deprecated:** This feature is an Experiment. It can be changed or removed at any time. Introduced in 16.9. |
|
||||
| <a id="mutationworkitemupdatestartandduedatewidget"></a>`startAndDueDateWidget` | [`WorkItemWidgetStartAndDueDateUpdateInput`](#workitemwidgetstartandduedateupdateinput) | Input for start and due date widget. |
|
||||
| <a id="mutationworkitemupdatestateevent"></a>`stateEvent` | [`WorkItemStateEvent`](#workitemstateevent) | Close or reopen a work item. |
|
||||
| <a id="mutationworkitemupdatestatuswidget"></a>`statusWidget` | [`StatusInput`](#statusinput) | Input for status widget. |
|
||||
|
|
@ -29366,10 +29368,12 @@ Represents the rolledup dates widget.
|
|||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="workitemwidgetrolledupdatesduedate"></a>`dueDate` | [`Date`](#date) | Due date for the work item. |
|
||||
| <a id="workitemwidgetrolledupdatesduedatefixed"></a>`dueDateFixed` | [`Date`](#date) | Fixed due date for the work item. |
|
||||
| <a id="workitemwidgetrolledupdatesduedateisfixed"></a>`dueDateIsFixed` | [`Boolean`](#boolean) | Indicates if the due date for the work item is fixed. |
|
||||
| <a id="workitemwidgetrolledupdatesduedatesourcingmilestone"></a>`dueDateSourcingMilestone` | [`Milestone`](#milestone) | Indicates which milestone sources the rolledup due date. |
|
||||
| <a id="workitemwidgetrolledupdatesduedatesourcingworkitem"></a>`dueDateSourcingWorkItem` | [`WorkItem`](#workitem) | Indicates which work_item sources the rolledup due date. |
|
||||
| <a id="workitemwidgetrolledupdatesstartdate"></a>`startDate` | [`Date`](#date) | Start date for the work item. |
|
||||
| <a id="workitemwidgetrolledupdatesstartdatefixed"></a>`startDateFixed` | [`Date`](#date) | Fixed start date for the work item. |
|
||||
| <a id="workitemwidgetrolledupdatesstartdateisfixed"></a>`startDateIsFixed` | [`Boolean`](#boolean) | Indicates if the start date for the work item is fixed. |
|
||||
| <a id="workitemwidgetrolledupdatesstartdatesourcingmilestone"></a>`startDateSourcingMilestone` | [`Milestone`](#milestone) | Indicates which milestone sources the rolledup start date. |
|
||||
| <a id="workitemwidgetrolledupdatesstartdatesourcingworkitem"></a>`startDateSourcingWorkItem` | [`WorkItem`](#workitem) | Indicates which work_item sources the rolledup start date. |
|
||||
|
|
@ -34933,6 +34937,17 @@ Attributes for value stream stage.
|
|||
| <a id="workitemwidgetprogressinputendvalue"></a>`endValue` | [`Int`](#int) | End value of the work item. |
|
||||
| <a id="workitemwidgetprogressinputstartvalue"></a>`startValue` | [`Int`](#int) | Start value of the work item. |
|
||||
|
||||
### `WorkItemWidgetRolledupDatesInput`
|
||||
|
||||
#### Arguments
|
||||
|
||||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="workitemwidgetrolledupdatesinputduedatefixed"></a>`dueDateFixed` | [`Date`](#date) | Fixed due date for the work item. |
|
||||
| <a id="workitemwidgetrolledupdatesinputduedateisfixed"></a>`dueDateIsFixed` | [`Boolean`](#boolean) | When due_date_fixed is not provided it defaults to `false`. |
|
||||
| <a id="workitemwidgetrolledupdatesinputstartdatefixed"></a>`startDateFixed` | [`Date`](#date) | Fixed start date for the work item. |
|
||||
| <a id="workitemwidgetrolledupdatesinputstartdateisfixed"></a>`startDateIsFixed` | [`Boolean`](#boolean) | When start_date_fixed is not provided it defaults to `false`. |
|
||||
|
||||
### `WorkItemWidgetStartAndDueDateUpdateInput`
|
||||
|
||||
#### Arguments
|
||||
|
|
|
|||
|
|
@ -161,6 +161,8 @@ class PreparePrimaryKeyForPartitioning < Gitlab::Database::Migration[2.1]
|
|||
add_concurrent_index(TABLE_NAME, [:id, :partition_id], unique: true, name: NEW_INDEX_NAME)
|
||||
|
||||
unswap_primary_key(TABLE_NAME, PRIMARY_KEY, OLD_INDEX_NAME)
|
||||
|
||||
# We need to add back referenced FKs if any, eg: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/113725/diffs
|
||||
end
|
||||
end
|
||||
```
|
||||
|
|
|
|||
|
|
@ -574,6 +574,14 @@ test causing the failure:
|
|||
|
||||
If these commands return `undercover: ✅ No coverage is missing in latest changes` then you can apply `pipeline:skip-undercoverage` to bypass pipeline failures.
|
||||
|
||||
### `pajamas_adoption` job
|
||||
|
||||
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141368) in GitLab 16.8.
|
||||
|
||||
The `pajamas_adoption` job runs the [Pajamas Adoption Scanner](https://gitlab-org.gitlab.io/frontend/pajamas-adoption-scanner/) in merge requests to prevent regressions in the adoption of the [Pajamas Design System](https://design.gitlab.com/).
|
||||
|
||||
The job fails if the scanner detects regressions caused by a merge request. If the regressions cannot be fixed in the merge request, add the `pipeline:skip-pajamas-adoption` label to the merge request, then retry the job.
|
||||
|
||||
## Test suite parallelization
|
||||
|
||||
Our current RSpec tests parallelization setup is as follows:
|
||||
|
|
|
|||
|
|
@ -184,6 +184,14 @@ sudo /etc/init.d/gitlab start
|
|||
|
||||
## Bulk assign users to GitLab Duo Pro
|
||||
|
||||
DETAILS:
|
||||
**Tier:** Premium, Ultimate
|
||||
**Offering:** Self-managed
|
||||
|
||||
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/142189) in GitLab 16.9.
|
||||
|
||||
The Rake task for bulk user assignment is available in GitLab 16.9 and later. For GitLab 16.8, use the script [`bulk_user_assignment.rb`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/ee/lib/duo_pro/bulk_user_assignment.rb) instead.
|
||||
|
||||
To perform bulk user assignment for GitLab Duo Pro, you can use the following Rake task:
|
||||
|
||||
```shell
|
||||
|
|
|
|||
|
|
@ -5584,9 +5584,6 @@ msgstr ""
|
|||
msgid "Analytics|Browser Family"
|
||||
msgstr ""
|
||||
|
||||
msgid "Analytics|By GitLab"
|
||||
msgstr ""
|
||||
|
||||
msgid "Analytics|Cancel"
|
||||
msgstr ""
|
||||
|
||||
|
|
@ -5617,6 +5614,9 @@ msgstr ""
|
|||
msgid "Analytics|Create your visualization"
|
||||
msgstr ""
|
||||
|
||||
msgid "Analytics|Created by GitLab"
|
||||
msgstr ""
|
||||
|
||||
msgid "Analytics|Custom dashboards"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -14,5 +14,9 @@ FactoryBot.define do
|
|||
record.reload.update!(outdated_at: nil)
|
||||
end
|
||||
end
|
||||
|
||||
trait :outdated do
|
||||
outdated_at { Time.current }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe WorkItems::WorkItemsFinder, feature_category: :team_planning do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
include_context 'WorkItemsFinder context'
|
||||
|
||||
it_behaves_like 'issues or work items finder', :work_item, 'WorkItemsFinder#execute context'
|
||||
end
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
import { GlButton, GlTable, GlLoadingIcon } from '@gitlab/ui';
|
||||
import { shallowMount, mount } from '@vue/test-utils';
|
||||
import { GlLoadingIcon } from '@gitlab/ui';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import PackagesProtectionRules from '~/packages_and_registries/settings/project/components/packages_protection_rules.vue';
|
||||
|
|
@ -21,17 +22,16 @@ describe('Packages protection rules project settings', () => {
|
|||
projectPath: 'path',
|
||||
};
|
||||
const findSettingsBlock = () => wrapper.findComponent(SettingsBlock);
|
||||
const findTable = () => wrapper.findComponent(GlTable);
|
||||
const findTable = () => extendedWrapper(wrapper.findByRole('table', /protected packages/i));
|
||||
const findTableBody = () => extendedWrapper(findTable().findAllByRole('rowgroup').at(1));
|
||||
const findTableRow = (i) => extendedWrapper(findTableBody().findAllByRole('row').at(i));
|
||||
const findTableLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
|
||||
const findProtectionRuleForm = () => wrapper.findComponent(PackagesProtectionRuleForm);
|
||||
const findAddProtectionRuleButton = () => wrapper.findComponent(GlButton);
|
||||
const findTableRows = () => findTable().find('tbody').findAll('tr');
|
||||
const findAddProtectionRuleButton = () =>
|
||||
wrapper.findByRole('button', { name: /add package protection rule/i });
|
||||
|
||||
const mountComponent = (mountFn = shallowMount, provide = defaultProvidedValues, config) => {
|
||||
wrapper = mountFn(PackagesProtectionRules, {
|
||||
stubs: {
|
||||
SettingsBlock,
|
||||
},
|
||||
provide,
|
||||
...config,
|
||||
});
|
||||
|
|
@ -52,7 +52,7 @@ describe('Packages protection rules project settings', () => {
|
|||
};
|
||||
|
||||
it('renders the setting block with table', async () => {
|
||||
createComponent();
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
|
|
@ -62,21 +62,21 @@ describe('Packages protection rules project settings', () => {
|
|||
|
||||
describe('table package protection rules', () => {
|
||||
it('renders table with packages protection rules', async () => {
|
||||
createComponent({ mountFn: mount });
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findTable().exists()).toBe(true);
|
||||
|
||||
packagesProtectionRulesData.forEach((protectionRule, i) => {
|
||||
expect(findTableRows().at(i).text()).toContain(protectionRule.packageNamePattern);
|
||||
expect(findTableRows().at(i).text()).toContain(protectionRule.packageType);
|
||||
expect(findTableRows().at(i).text()).toContain(protectionRule.pushProtectedUpToAccessLevel);
|
||||
expect(findTableRow(i).text()).toContain(protectionRule.packageNamePattern);
|
||||
expect(findTableRow(i).text()).toContain(protectionRule.packageType);
|
||||
expect(findTableRow(i).text()).toContain(protectionRule.pushProtectedUpToAccessLevel);
|
||||
});
|
||||
});
|
||||
|
||||
it('displays table in busy state and shows loading icon inside table', async () => {
|
||||
createComponent({ mountFn: mount });
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
expect(findTableLoadingIcon().exists()).toBe(true);
|
||||
expect(findTableLoadingIcon().attributes('aria-label')).toBe('Loading');
|
||||
|
|
@ -88,37 +88,29 @@ describe('Packages protection rules project settings', () => {
|
|||
expect(findTableLoadingIcon().exists()).toBe(false);
|
||||
expect(findTable().attributes('aria-busy')).toBe('false');
|
||||
});
|
||||
|
||||
it('renders table', async () => {
|
||||
createComponent();
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findTable().exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not initially render package protection form', async () => {
|
||||
createComponent();
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findAddProtectionRuleButton().exists()).toBe(true);
|
||||
expect(findAddProtectionRuleButton().isVisible()).toBe(true);
|
||||
expect(findProtectionRuleForm().exists()).toBe(false);
|
||||
});
|
||||
|
||||
describe('button "add protection rule"', () => {
|
||||
describe('button "Add protection rule"', () => {
|
||||
it('button exists', async () => {
|
||||
createComponent();
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
expect(findAddProtectionRuleButton().exists()).toBe(true);
|
||||
expect(findAddProtectionRuleButton().isVisible()).toBe(true);
|
||||
});
|
||||
|
||||
describe('when button is clicked', () => {
|
||||
beforeEach(async () => {
|
||||
createComponent({ mountFn: mount });
|
||||
createComponent({ mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
|
|
@ -126,7 +118,7 @@ describe('Packages protection rules project settings', () => {
|
|||
});
|
||||
|
||||
it('renders package protection form', () => {
|
||||
expect(findProtectionRuleForm().exists()).toBe(true);
|
||||
expect(findProtectionRuleForm().isVisible()).toBe(true);
|
||||
});
|
||||
|
||||
it('disables the button "add protection rule"', () => {
|
||||
|
|
@ -141,14 +133,14 @@ describe('Packages protection rules project settings', () => {
|
|||
beforeEach(async () => {
|
||||
resolver = jest.fn().mockResolvedValue(packagesProtectionRuleQueryPayload());
|
||||
|
||||
createComponent({ resolver, mountFn: mount });
|
||||
createComponent({ resolver, mountFn: mountExtended });
|
||||
|
||||
await waitForPromises();
|
||||
|
||||
await findAddProtectionRuleButton().trigger('click');
|
||||
});
|
||||
|
||||
it("handles event 'submit'", async () => {
|
||||
it('handles event "submit"', async () => {
|
||||
await findProtectionRuleForm().vm.$emit('submit');
|
||||
|
||||
expect(resolver).toHaveBeenCalledTimes(2);
|
||||
|
|
@ -157,7 +149,7 @@ describe('Packages protection rules project settings', () => {
|
|||
expect(findAddProtectionRuleButton().attributes('disabled')).not.toBeDefined();
|
||||
});
|
||||
|
||||
it("handles event 'cancel'", async () => {
|
||||
it('handles event "cancel"', async () => {
|
||||
await findProtectionRuleForm().vm.$emit('cancel');
|
||||
|
||||
expect(resolver).toHaveBeenCalledTimes(1);
|
||||
|
|
|
|||
|
|
@ -65,4 +65,44 @@ RSpec.describe Namespaces::Descendants, feature_category: :database do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '.load_outdated_batch' do
|
||||
let_it_be(:cache1) { create(:namespace_descendants, :outdated) }
|
||||
let_it_be(:cache2) { create(:namespace_descendants, :up_to_date) }
|
||||
let_it_be(:cache3) { create(:namespace_descendants, :outdated) }
|
||||
let_it_be(:cache4) { create(:namespace_descendants, :outdated) }
|
||||
let_it_be(:cache5) { create(:namespace_descendants, :up_to_date) }
|
||||
|
||||
it 'returns outdated namespace_descendants ids' do
|
||||
ids = described_class.load_outdated_batch(2)
|
||||
|
||||
expect(ids.size).to eq(2)
|
||||
expect([cache1.namespace_id, cache3.namespace_id, cache4.namespace_id]).to include(*ids)
|
||||
|
||||
expect(described_class.load_outdated_batch(10)).to match_array([cache1.namespace_id, cache3.namespace_id,
|
||||
cache4.namespace_id])
|
||||
end
|
||||
end
|
||||
|
||||
describe '.upsert_with_consistent_data' do
|
||||
let_it_be(:cache) { create(:namespace_descendants, :outdated, calculated_at: nil, traversal_ids: [100, 200]) }
|
||||
|
||||
it 'updates the namespace descendant record', :freeze_time do
|
||||
described_class.upsert_with_consistent_data(
|
||||
namespace: cache.namespace,
|
||||
self_and_descendant_group_ids: [1, 2, 3],
|
||||
all_project_ids: [5, 6, 7]
|
||||
)
|
||||
|
||||
cache.reload
|
||||
|
||||
expect(cache).to have_attributes(
|
||||
traversal_ids: cache.namespace.traversal_ids,
|
||||
self_and_descendant_group_ids: [1, 2, 3],
|
||||
all_project_ids: [5, 6, 7],
|
||||
outdated_at: nil,
|
||||
calculated_at: Time.current
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3032,9 +3032,54 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
|
||||
expect(project.project_repository).to have_attributes(
|
||||
disk_path: project.disk_path,
|
||||
shard_name: project.repository_storage
|
||||
shard_name: project.repository_storage,
|
||||
object_format: 'sha1'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when repository is missing' do
|
||||
let(:project) { create(:project) }
|
||||
|
||||
it 'sets a default sha1 object format' do
|
||||
project.track_project_repository
|
||||
|
||||
expect(project.project_repository).to have_attributes(
|
||||
disk_path: project.disk_path,
|
||||
shard_name: project.repository_storage,
|
||||
object_format: 'sha1'
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when repository has sha256 object format' do
|
||||
let(:project) { create(:project, :empty_repo, object_format: 'sha256') }
|
||||
|
||||
it 'tracks a correct object format' do
|
||||
project.track_project_repository
|
||||
|
||||
expect(project.project_repository).to have_attributes(
|
||||
disk_path: project.disk_path,
|
||||
shard_name: project.repository_storage,
|
||||
object_format: 'sha256'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when feature flag "store_object_format" is disabled' do
|
||||
before do
|
||||
stub_feature_flags(store_object_format: false)
|
||||
end
|
||||
|
||||
it 'tracks a SHA1 object format' do
|
||||
project.track_project_repository
|
||||
|
||||
expect(project.project_repository).to have_attributes(
|
||||
disk_path: project.disk_path,
|
||||
shard_name: project.repository_storage,
|
||||
object_format: 'sha1'
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when a tracking entry exists' do
|
||||
|
|
@ -3048,12 +3093,14 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
it 'updates the project storage location' do
|
||||
allow(project).to receive(:disk_path).and_return('fancy/new/path')
|
||||
allow(project).to receive(:repository_storage).and_return('foo')
|
||||
allow(project.repository).to receive(:object_format).and_return('sha1')
|
||||
|
||||
project.track_project_repository
|
||||
|
||||
expect(project.project_repository).to have_attributes(
|
||||
disk_path: 'fancy/new/path',
|
||||
shard_name: 'foo'
|
||||
shard_name: 'foo',
|
||||
object_format: 'sha1'
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -3062,6 +3109,7 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
|
||||
allow(project).to receive(:disk_path).and_return('fancy/new/path')
|
||||
allow(project).to receive(:repository_storage).and_return('foo')
|
||||
allow(project.repository).to receive(:object_format).and_return('sha1')
|
||||
|
||||
project.track_project_repository
|
||||
|
||||
|
|
@ -3071,13 +3119,13 @@ RSpec.describe Project, factory_default: :keep, feature_category: :groups_and_pr
|
|||
end
|
||||
|
||||
context 'with projects on legacy storage' do
|
||||
let(:project) { create(:project, :legacy_storage) }
|
||||
let_it_be_with_reload(:project) { create(:project, :empty_repo, :legacy_storage) }
|
||||
|
||||
it_behaves_like 'tracks storage location'
|
||||
end
|
||||
|
||||
context 'with projects on hashed storage' do
|
||||
let(:project) { create(:project) }
|
||||
let_it_be_with_reload(:project) { create(:project, :empty_repo) }
|
||||
|
||||
it_behaves_like 'tracks storage location'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4024,9 +4024,7 @@ RSpec.describe Repository, feature_category: :source_code_management do
|
|||
end
|
||||
|
||||
context 'for missing repository' do
|
||||
before do
|
||||
allow(repository).to receive(:exists?).and_return(false)
|
||||
end
|
||||
let(:project) { build_stubbed(:project) }
|
||||
|
||||
it { is_expected.to be_nil }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,64 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Namespaces::UpdateDenormalizedDescendantsService, feature_category: :database do
|
||||
let_it_be(:group) { create(:group) }
|
||||
let_it_be(:subgroup) { create(:group, parent: group) }
|
||||
let_it_be(:subsub_group) { create(:group, parent: subgroup) }
|
||||
|
||||
let_it_be(:project1) { create(:project, group: subgroup) }
|
||||
let_it_be(:project2) { create(:project, group: subsub_group) }
|
||||
|
||||
let_it_be_with_reload(:cache) do
|
||||
create(:namespace_descendants,
|
||||
:outdated,
|
||||
calculated_at: nil,
|
||||
namespace: subgroup,
|
||||
# outdated values:
|
||||
traversal_ids: [group.id + 100, subgroup.id],
|
||||
self_and_descendant_group_ids: [],
|
||||
all_project_ids: [project1.id]
|
||||
)
|
||||
end
|
||||
|
||||
def run_service(id)
|
||||
described_class.new(namespace_id: id).execute
|
||||
end
|
||||
|
||||
it 'updates an outdated cache' do
|
||||
run_service(subgroup.id)
|
||||
|
||||
cache.reload
|
||||
|
||||
expect(cache).to have_attributes(
|
||||
traversal_ids: [group.id, subgroup.id],
|
||||
self_and_descendant_group_ids: [subgroup.id, subsub_group.id],
|
||||
all_project_ids: [project1.id, project2.id]
|
||||
)
|
||||
end
|
||||
|
||||
context 'when the namespace was removed in the meantime' do
|
||||
it 'removes the cache record' do
|
||||
namespace_id = non_existing_record_id
|
||||
create(:namespace_descendants, namespace_id: namespace_id)
|
||||
|
||||
run_service(namespace_id)
|
||||
|
||||
record = Namespaces::Descendants.find_by(namespace_id: namespace_id)
|
||||
expect(record).to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing in a non existing namespace' do
|
||||
it 'does nothing' do
|
||||
expect { run_service(non_existing_record_id) }.not_to change { Namespaces::Descendants.all.sort }
|
||||
end
|
||||
end
|
||||
|
||||
context 'when passing in a namespace without cache' do
|
||||
it 'does nothing' do
|
||||
expect { run_service(group) }.not_to change { Namespaces::Descendants.all.sort }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -822,6 +822,7 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
|
|||
|
||||
it 'creates a repository with SHA256 commit hashes', :aggregate_failures do
|
||||
expect(project.repository.commit_count).to be(1)
|
||||
expect(project.project_repository.object_format).to eq 'sha256'
|
||||
expect(project.commit.id.size).to eq 64
|
||||
end
|
||||
|
||||
|
|
@ -832,6 +833,7 @@ RSpec.describe Projects::CreateService, '#execute', feature_category: :groups_an
|
|||
|
||||
it 'creates a repository with default SHA1 commit hash' do
|
||||
expect(project.repository.commit_count).to be(1)
|
||||
expect(project.project_repository.object_format).to eq 'sha1'
|
||||
expect(project.commit.id.size).to eq 40
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Namespaces::ProcessOutdatedNamespaceDescendantsCronWorker, feature_category: :database do
|
||||
let(:worker) { described_class.new }
|
||||
|
||||
subject(:run_job) { worker.perform }
|
||||
|
||||
include_examples 'an idempotent worker' do
|
||||
it 'executes sucessfully' do
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { processed_namespaces: 0 })
|
||||
|
||||
run_job
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are records to be processed' do
|
||||
let_it_be_with_reload(:outdated1) { create(:namespace_descendants, :outdated) }
|
||||
let_it_be_with_reload(:outdated2) { create(:namespace_descendants, :outdated) }
|
||||
|
||||
it 'invokes the service and increments the processed_namespaces' do
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { processed_namespaces: 2 })
|
||||
|
||||
run_job
|
||||
|
||||
outdated1.reload
|
||||
outdated2.reload
|
||||
|
||||
expect(outdated1.outdated_at).to eq(nil)
|
||||
expect(outdated2.outdated_at).to eq(nil)
|
||||
end
|
||||
|
||||
context 'when time limit is reached' do
|
||||
it 'stops the processing' do
|
||||
allow_next_instance_of(Analytics::CycleAnalytics::RuntimeLimiter) do |runtime_limiter|
|
||||
allow(runtime_limiter).to receive(:over_time?).and_return(true) # stop after the 1st record
|
||||
end
|
||||
|
||||
expect(worker).to receive(:log_extra_metadata_on_done).with(:result, { processed_namespaces: 1 })
|
||||
|
||||
run_job
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in New Issue