Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
3fc19e1442
commit
320d8adff1
|
|
@ -48,6 +48,11 @@ GITALY_SERVER_VERSION @project_278964_bot6 @gitlab-org/maintainers/rails-backend
|
|||
/spec/frontend_integration/
|
||||
/ee/spec/frontend_integration/
|
||||
|
||||
[Clickhouse] @gitlab-org/maintainers/clickhouse
|
||||
/db/clickhouse/
|
||||
/ee/db/clickhouse/
|
||||
/**/click(_|-)?house/
|
||||
|
||||
[Database] @gitlab-org/maintainers/database
|
||||
/db/
|
||||
/ee/db/
|
||||
|
|
|
|||
|
|
@ -1001,7 +1001,6 @@ RSpec/VerifiedDoubles:
|
|||
- 'spec/workers/create_commit_signature_worker_spec.rb'
|
||||
- 'spec/workers/environments/auto_stop_worker_spec.rb'
|
||||
- 'spec/workers/error_tracking_issue_link_worker_spec.rb'
|
||||
- 'spec/workers/gitlab/github_import/advance_stage_worker_spec.rb'
|
||||
- 'spec/workers/gitlab/github_import/import_diff_note_worker_spec.rb'
|
||||
- 'spec/workers/gitlab/github_import/import_issue_worker_spec.rb'
|
||||
- 'spec/workers/gitlab/github_import/import_note_worker_spec.rb'
|
||||
|
|
|
|||
|
|
@ -2,64 +2,6 @@
|
|||
# Cop supports --autocorrect.
|
||||
Style/PercentLiteralDelimiters:
|
||||
Exclude:
|
||||
- 'app/models/lfs_download_object.rb'
|
||||
- 'app/models/namespace.rb'
|
||||
- 'app/models/namespace/root_storage_statistics.rb'
|
||||
- 'app/models/note.rb'
|
||||
- 'app/models/notification_setting.rb'
|
||||
- 'app/models/project.rb'
|
||||
- 'app/models/project_feature.rb'
|
||||
- 'app/models/project_setting.rb'
|
||||
- 'app/models/releases/link.rb'
|
||||
- 'app/models/repository.rb'
|
||||
- 'app/models/resource_label_event.rb'
|
||||
- 'app/models/resource_state_event.rb'
|
||||
- 'app/models/resource_timebox_event.rb'
|
||||
- 'app/models/user.rb'
|
||||
- 'app/models/user_interacted_project.rb'
|
||||
- 'app/presenters/dev_ops_report/metric_presenter.rb'
|
||||
- 'app/presenters/search_service_presenter.rb'
|
||||
- 'app/serializers/pipeline_serializer.rb'
|
||||
- 'app/services/application_settings/update_service.rb'
|
||||
- 'app/services/auth/container_registry_authentication_service.rb'
|
||||
- 'app/services/boards/update_service.rb'
|
||||
- 'app/services/bulk_imports/file_download_service.rb'
|
||||
- 'app/services/ci/update_instance_variables_service.rb'
|
||||
- 'app/services/clusters/kubernetes/create_or_update_service_account_service.rb'
|
||||
- 'app/services/feature_flags/base_service.rb'
|
||||
- 'app/services/files/multi_service.rb'
|
||||
- 'app/services/import/bitbucket_server_service.rb'
|
||||
- 'app/services/import/fogbugz_service.rb'
|
||||
- 'app/services/import/github_service.rb'
|
||||
- 'app/services/import/gitlab_projects/file_acquisition_strategies/remote_file.rb'
|
||||
- 'app/services/import/gitlab_projects/file_acquisition_strategies/remote_file_s3.rb'
|
||||
- 'app/services/import_export_clean_up_service.rb'
|
||||
- 'app/services/incident_management/pager_duty/process_webhook_service.rb'
|
||||
- 'app/services/issuable/bulk_update_service.rb'
|
||||
- 'app/services/merge_requests/update_service.rb'
|
||||
- 'app/services/packages/debian/generate_distribution_service.rb'
|
||||
- 'app/services/preview_markdown_service.rb'
|
||||
- 'app/services/projects/apple_target_platform_detector_service.rb'
|
||||
- 'app/services/projects/download_service.rb'
|
||||
- 'app/services/projects/hashed_storage/migrate_attachments_service.rb'
|
||||
- 'app/services/projects/lfs_pointers/lfs_object_download_list_service.rb'
|
||||
- 'app/services/projects/update_service.rb'
|
||||
- 'app/services/repositories/base_service.rb'
|
||||
- 'app/services/repository_archive_clean_up_service.rb'
|
||||
- 'app/services/resource_access_tokens/create_service.rb'
|
||||
- 'app/services/resource_access_tokens/revoke_service.rb'
|
||||
- 'app/services/search/global_service.rb'
|
||||
- 'app/services/search/project_service.rb'
|
||||
- 'app/services/snippets/update_service.rb'
|
||||
- 'app/services/todos/destroy/destroyed_issuable_service.rb'
|
||||
- 'app/services/todos/destroy/entity_leave_service.rb'
|
||||
- 'app/uploaders/design_management/design_v432x230_uploader.rb'
|
||||
- 'app/uploaders/gitlab_uploader.rb'
|
||||
- 'app/validators/addressable_url_validator.rb'
|
||||
- 'app/validators/gitlab/zoom_url_validator.rb'
|
||||
- 'app/validators/json_schema_validator.rb'
|
||||
- 'app/workers/members_destroyer/unassign_issuables_worker.rb'
|
||||
- 'app/workers/projects/record_target_platforms_worker.rb'
|
||||
- 'config/application.rb'
|
||||
- 'config/boot.rb'
|
||||
- 'config/environments/production.rb'
|
||||
|
|
@ -970,7 +912,6 @@ Style/PercentLiteralDelimiters:
|
|||
- 'spec/views/projects/commit/branches.html.haml_spec.rb'
|
||||
- 'spec/workers/concerns/worker_context_spec.rb'
|
||||
- 'spec/workers/container_registry/migration/enqueuer_worker_spec.rb'
|
||||
- 'spec/workers/gitlab/github_import/advance_stage_worker_spec.rb'
|
||||
- 'spec/workers/groups/update_statistics_worker_spec.rb'
|
||||
- 'spec/workers/jira_connect/sync_branch_worker_spec.rb'
|
||||
- 'spec/workers/post_receive_spec.rb'
|
||||
|
|
|
|||
|
|
@ -426,6 +426,7 @@ export default {
|
|||
<gl-empty-state
|
||||
v-else-if="stateToRender === $options.stateMap.emptyTab"
|
||||
:svg-path="noPipelinesSvgPath"
|
||||
:svg-height="150"
|
||||
:title="emptyTabMessage"
|
||||
/>
|
||||
|
||||
|
|
|
|||
|
|
@ -63,6 +63,7 @@ export default {
|
|||
:primary-button-link="createUrl"
|
||||
:primary-button-text="$options.i18n.createServiceAccount"
|
||||
:svg-path="emptyIllustrationUrl"
|
||||
:svg-height="150"
|
||||
/>
|
||||
|
||||
<div v-else>
|
||||
|
|
|
|||
|
|
@ -729,3 +729,11 @@ export const isCurrentUser = (userId) => {
|
|||
|
||||
return Number(userId) === currentUserId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Clones an object via JSON stringifying and re-parsing.
|
||||
* This ensures object references are not persisted (e.g. unlike lodash cloneDeep)
|
||||
*/
|
||||
export const cloneWithoutReferences = (obj) => {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
};
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import { GlAlert, GlButton, GlLink, GlLoadingIcon } from '@gitlab/ui';
|
|||
import { sprintf } from '~/locale';
|
||||
import { updateRepositorySize } from '~/api/projects_api';
|
||||
import { numberToHumanSize } from '~/lib/utils/number_utils';
|
||||
import SectionedPercentageBar from '~/usage_quotas/components/sectioned_percentage_bar.vue';
|
||||
import {
|
||||
ERROR_MESSAGE,
|
||||
LEARN_MORE_LABEL,
|
||||
|
|
@ -19,7 +20,6 @@ import {
|
|||
} from '../constants';
|
||||
import getProjectStorageStatistics from '../queries/project_storage.query.graphql';
|
||||
import { getStorageTypesFromProjectStatistics, descendingStorageUsageSort } from '../utils';
|
||||
import UsageGraph from './usage_graph.vue';
|
||||
import ProjectStorageDetail from './project_storage_detail.vue';
|
||||
|
||||
export default {
|
||||
|
|
@ -29,8 +29,8 @@ export default {
|
|||
GlButton,
|
||||
GlLink,
|
||||
GlLoadingIcon,
|
||||
UsageGraph,
|
||||
ProjectStorageDetail,
|
||||
SectionedPercentageBar,
|
||||
},
|
||||
inject: ['projectPath'],
|
||||
apollo: {
|
||||
|
|
@ -88,6 +88,67 @@ export default {
|
|||
storageTypeHelpPaths,
|
||||
);
|
||||
},
|
||||
|
||||
sections() {
|
||||
if (!this.project?.statistics) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const {
|
||||
buildArtifactsSize,
|
||||
lfsObjectsSize,
|
||||
packagesSize,
|
||||
repositorySize,
|
||||
storageSize,
|
||||
wikiSize,
|
||||
snippetsSize,
|
||||
} = this.project.statistics;
|
||||
|
||||
if (storageSize === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'repository',
|
||||
value: repositorySize,
|
||||
},
|
||||
{
|
||||
id: 'lfsObjects',
|
||||
value: lfsObjectsSize,
|
||||
},
|
||||
{
|
||||
id: 'packages',
|
||||
value: packagesSize,
|
||||
},
|
||||
{
|
||||
id: 'buildArtifacts',
|
||||
value: buildArtifactsSize,
|
||||
},
|
||||
{
|
||||
id: 'wiki',
|
||||
value: wikiSize,
|
||||
},
|
||||
{
|
||||
id: 'snippets',
|
||||
value: snippetsSize,
|
||||
},
|
||||
]
|
||||
.filter((data) => data.value !== 0)
|
||||
.sort(descendingStorageUsageSort('value'))
|
||||
.map((storageType) => {
|
||||
const storageTypeExtraData = PROJECT_STORAGE_TYPES.find(
|
||||
(type) => storageType.id === type.id,
|
||||
);
|
||||
const label = storageTypeExtraData?.name;
|
||||
|
||||
return {
|
||||
label,
|
||||
formattedValue: numberToHumanSize(storageType.value),
|
||||
...storageType,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
clearError() {
|
||||
|
|
@ -123,11 +184,11 @@ export default {
|
|||
{{ error }}
|
||||
</gl-alert>
|
||||
<div v-else>
|
||||
<div class="gl-pt-5 gl-px-3">
|
||||
<div class="gl-display-flex gl-justify-content-space-between gl-align-items-center">
|
||||
<div class="gl-pt-5">
|
||||
<div class="gl-display-flex gl-justify-content-space-between">
|
||||
<div>
|
||||
<h2 class="gl-m-0 gl-font-lg gl-font-weight-bold">{{ $options.TOTAL_USAGE_TITLE }}</h2>
|
||||
<p class="gl-m-0 gl-text-gray-400">
|
||||
<h4 class="gl-font-lg gl-mb-3 gl-mt-0">{{ $options.TOTAL_USAGE_TITLE }}</h4>
|
||||
<p>
|
||||
{{ $options.TOTAL_USAGE_SUBTITLE }}
|
||||
<gl-link
|
||||
:href="$options.usageQuotasHelpPaths.usageQuotas"
|
||||
|
|
@ -137,13 +198,16 @@ export default {
|
|||
>
|
||||
</p>
|
||||
</div>
|
||||
<p class="gl-m-0 gl-font-size-h-display gl-font-weight-bold" data-testid="total-usage">
|
||||
<p
|
||||
class="gl-m-0 gl-font-size-h-display gl-font-weight-bold gl-white-space-nowrap"
|
||||
data-testid="total-usage"
|
||||
>
|
||||
{{ totalUsage }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="!isStatisticsEmpty" class="gl-w-full">
|
||||
<usage-graph :root-storage-statistics="project.statistics" :limit="0" />
|
||||
<sectioned-percentage-bar class="gl-mt-5" :sections="sections" />
|
||||
</div>
|
||||
<div class="gl-w-full gl-my-5">
|
||||
<gl-button
|
||||
|
|
|
|||
|
|
@ -1,135 +0,0 @@
|
|||
<script>
|
||||
import { numberToHumanSize } from '~/lib/utils/number_utils';
|
||||
import { PROJECT_STORAGE_TYPES } from '../constants';
|
||||
import { descendingStorageUsageSort } from '../utils';
|
||||
|
||||
export default {
|
||||
name: 'UsageGraph',
|
||||
props: {
|
||||
rootStorageStatistics: {
|
||||
required: true,
|
||||
type: Object,
|
||||
},
|
||||
limit: {
|
||||
required: true,
|
||||
type: Number,
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
storageTypes() {
|
||||
const {
|
||||
buildArtifactsSize,
|
||||
lfsObjectsSize,
|
||||
packagesSize,
|
||||
repositorySize,
|
||||
storageSize,
|
||||
wikiSize,
|
||||
snippetsSize,
|
||||
} = this.rootStorageStatistics;
|
||||
|
||||
if (storageSize === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'repository',
|
||||
style: this.usageStyle(this.barRatio(repositorySize)),
|
||||
class: 'gl-bg-data-viz-blue-500',
|
||||
size: repositorySize,
|
||||
},
|
||||
{
|
||||
id: 'lfsObjects',
|
||||
style: this.usageStyle(this.barRatio(lfsObjectsSize)),
|
||||
class: 'gl-bg-data-viz-orange-600',
|
||||
size: lfsObjectsSize,
|
||||
},
|
||||
{
|
||||
id: 'packages',
|
||||
style: this.usageStyle(this.barRatio(packagesSize)),
|
||||
class: 'gl-bg-data-viz-aqua-500',
|
||||
size: packagesSize,
|
||||
},
|
||||
{
|
||||
id: 'buildArtifacts',
|
||||
style: this.usageStyle(this.barRatio(buildArtifactsSize)),
|
||||
class: 'gl-bg-data-viz-green-500',
|
||||
size: buildArtifactsSize,
|
||||
},
|
||||
{
|
||||
id: 'wiki',
|
||||
style: this.usageStyle(this.barRatio(wikiSize)),
|
||||
class: 'gl-bg-data-viz-magenta-500',
|
||||
size: wikiSize,
|
||||
},
|
||||
{
|
||||
id: 'snippets',
|
||||
style: this.usageStyle(this.barRatio(snippetsSize)),
|
||||
class: 'gl-bg-data-viz-orange-800',
|
||||
size: snippetsSize,
|
||||
},
|
||||
]
|
||||
.filter((data) => data.size !== 0)
|
||||
.sort(descendingStorageUsageSort('size'))
|
||||
.map((storageType) => {
|
||||
const storageTypeExtraData = PROJECT_STORAGE_TYPES.find(
|
||||
(type) => storageType.id === type.id,
|
||||
);
|
||||
const name = storageTypeExtraData?.name;
|
||||
|
||||
return {
|
||||
name,
|
||||
...storageType,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
formatSize(size) {
|
||||
return numberToHumanSize(size);
|
||||
},
|
||||
usageStyle(ratio) {
|
||||
return { flex: ratio };
|
||||
},
|
||||
barRatio(size) {
|
||||
let max = this.rootStorageStatistics.storageSize;
|
||||
|
||||
if (this.limit !== 0 && max <= this.limit) {
|
||||
max = this.limit;
|
||||
}
|
||||
|
||||
return size / max;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<div v-if="storageTypes" class="gl-display-flex gl-flex-direction-column w-100">
|
||||
<div class="gl-h-6 gl-my-5 gl-bg-gray-50 gl-rounded-base gl-display-flex">
|
||||
<div
|
||||
v-for="storageType in storageTypes"
|
||||
:key="storageType.name"
|
||||
class="storage-type-usage gl-h-full gl-display-inline-block"
|
||||
:class="storageType.class"
|
||||
:style="storageType.style"
|
||||
data-testid="storage-type-usage"
|
||||
></div>
|
||||
</div>
|
||||
<div class="row gl-mb-4">
|
||||
<div
|
||||
v-for="storageType in storageTypes"
|
||||
:key="storageType.name"
|
||||
class="col-md-auto gl-display-flex gl-align-items-center"
|
||||
data-testid="storage-type-legend"
|
||||
>
|
||||
<div class="gl-h-2 gl-w-5 gl-mr-2 gl-display-inline-block" :class="storageType.class"></div>
|
||||
<span class="gl-mr-2 gl-font-weight-bold gl-font-sm">
|
||||
{{ storageType.name }}
|
||||
</span>
|
||||
<span class="gl-text-gray-500 gl-font-sm">
|
||||
{{ formatSize(storageType.size) }}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
|
@ -547,6 +547,7 @@ $tabs-holder-z-index: 250;
|
|||
}
|
||||
|
||||
.mr-widget-section:not(:first-child) > div,
|
||||
.mr-widget-section:not(:first-child) > section,
|
||||
.mr-widget-section .mr-widget-section > div {
|
||||
border-top: solid 1px var(--border-color, $border-color);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
@import 'mixins_and_variables_and_functions';
|
||||
|
||||
.storage-type-usage {
|
||||
&:first-child {
|
||||
@include gl-rounded-top-left-base;
|
||||
@include gl-rounded-bottom-left-base;
|
||||
}
|
||||
|
||||
&:last-child {
|
||||
@include gl-rounded-top-right-base;
|
||||
@include gl-rounded-bottom-right-base;
|
||||
}
|
||||
|
||||
&:not(:last-child) {
|
||||
@include gl-border-r-2;
|
||||
@include gl-border-r-solid;
|
||||
border-right-color: var(--white, $white);
|
||||
}
|
||||
}
|
||||
|
|
@ -283,11 +283,12 @@
|
|||
$theme-color,
|
||||
$theme-color-darkest,
|
||||
) {
|
||||
--sidebar-background: #{mix(white, $theme-color-lightest, 50%)};
|
||||
--transparent-white-16: rgba(255, 255, 255, 0.16);
|
||||
--transparent-white-24: rgba(255, 255, 255, 0.24);
|
||||
|
||||
.super-sidebar {
|
||||
background-color: $theme-color-lightest;
|
||||
background-color: var(--sidebar-background);
|
||||
}
|
||||
|
||||
.super-sidebar .user-bar {
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class LfsDownloadObject
|
|||
|
||||
validates :oid, format: { with: /\A\h{64}\z/ }
|
||||
validates :size, numericality: { greater_than_or_equal_to: 0 }
|
||||
validates :link, public_url: { protocols: %w(http https) }
|
||||
validates :link, public_url: { protocols: %w[http https] }
|
||||
validate :headers_must_be_hash
|
||||
|
||||
def initialize(oid:, size:, link:, headers: {})
|
||||
|
|
|
|||
|
|
@ -753,7 +753,7 @@ class Namespace < ApplicationRecord
|
|||
end
|
||||
|
||||
def reload_namespace_details
|
||||
return unless !project_namespace? && (previous_changes.keys & %w(description description_html cached_markdown_version)).any? && namespace_details.present?
|
||||
return unless !project_namespace? && (previous_changes.keys & %w[description description_html cached_markdown_version]).any? && namespace_details.present?
|
||||
|
||||
namespace_details.reset
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
class Namespace::RootStorageStatistics < ApplicationRecord
|
||||
SNIPPETS_SIZE_STAT_NAME = 'snippets_size'
|
||||
STATISTICS_ATTRIBUTES = %W(
|
||||
STATISTICS_ATTRIBUTES = %W[
|
||||
storage_size
|
||||
repository_size
|
||||
wiki_size
|
||||
|
|
@ -12,7 +12,7 @@ class Namespace::RootStorageStatistics < ApplicationRecord
|
|||
#{SNIPPETS_SIZE_STAT_NAME}
|
||||
pipeline_artifacts_size
|
||||
uploads_size
|
||||
).freeze
|
||||
].freeze
|
||||
|
||||
self.primary_key = :namespace_id
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ class Namespace::RootStorageStatistics < ApplicationRecord
|
|||
end
|
||||
|
||||
def self.namespace_statistics_attributes
|
||||
%w(storage_size dependency_proxy_size)
|
||||
%w[storage_size dependency_proxy_size]
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -169,7 +169,7 @@ class Note < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
scope :diff_notes, -> { where(type: %w(LegacyDiffNote DiffNote)) }
|
||||
scope :diff_notes, -> { where(type: %w[LegacyDiffNote DiffNote]) }
|
||||
scope :new_diff_notes, -> { where(type: 'DiffNote') }
|
||||
scope :non_diff_notes, -> { where(type: NON_DIFF_NOTE_TYPES) }
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class NotificationSetting < ApplicationRecord
|
|||
end
|
||||
|
||||
def self.allowed_fields(source = nil)
|
||||
NotificationSetting.email_events(source).dup + %i(level notification_email)
|
||||
NotificationSetting.email_events(source).dup + %i[level notification_email]
|
||||
end
|
||||
|
||||
def email_events
|
||||
|
|
|
|||
|
|
@ -1,36 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module PerformanceMonitoring
|
||||
class PrometheusPanelGroup
|
||||
include ActiveModel::Model
|
||||
|
||||
attr_accessor :group, :priority, :panels
|
||||
|
||||
validates :group, presence: true
|
||||
validates :panels, array_members: { member_class: PerformanceMonitoring::PrometheusPanel }
|
||||
|
||||
class << self
|
||||
def from_json(json_content)
|
||||
build_from_hash(json_content).tap(&:validate!)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def build_from_hash(attributes)
|
||||
return new unless attributes.is_a?(Hash)
|
||||
|
||||
new(
|
||||
group: attributes['group'],
|
||||
priority: attributes['priority'],
|
||||
panels: initialize_children_collection(attributes['panels'])
|
||||
)
|
||||
end
|
||||
|
||||
def initialize_children_collection(children)
|
||||
return unless children.is_a?(Array)
|
||||
|
||||
children.map { |panels| PerformanceMonitoring::PrometheusPanel.from_json(panels) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -68,10 +68,10 @@ class Project < ApplicationRecord
|
|||
}.freeze
|
||||
|
||||
VALID_IMPORT_PORTS = [80, 443].freeze
|
||||
VALID_IMPORT_PROTOCOLS = %w(http https git).freeze
|
||||
VALID_IMPORT_PROTOCOLS = %w[http https git].freeze
|
||||
|
||||
VALID_MIRROR_PORTS = [22, 80, 443].freeze
|
||||
VALID_MIRROR_PROTOCOLS = %w(http https ssh git).freeze
|
||||
VALID_MIRROR_PROTOCOLS = %w[http https ssh git].freeze
|
||||
|
||||
SORTING_PREFERENCE_FIELD = :projects_sort
|
||||
MAX_BUILD_TIMEOUT = 1.month
|
||||
|
|
@ -1667,7 +1667,7 @@ class Project < ApplicationRecord
|
|||
return unless Gitlab::Email::IncomingEmail.supports_issue_creation? && author
|
||||
|
||||
# check since this can come from a request parameter
|
||||
return unless %w(issue merge_request).include?(address_type)
|
||||
return unless %w[issue merge_request].include?(address_type)
|
||||
|
||||
author.ensure_incoming_email_token!
|
||||
|
||||
|
|
@ -3495,11 +3495,11 @@ class Project < ApplicationRecord
|
|||
end
|
||||
|
||||
def sync_project_namespace?
|
||||
(changes.keys & %w(name path namespace_id namespace visibility_level shared_runners_enabled)).any? && project_namespace.present?
|
||||
(changes.keys & %w[name path namespace_id namespace visibility_level shared_runners_enabled]).any? && project_namespace.present?
|
||||
end
|
||||
|
||||
def reload_project_namespace_details
|
||||
return unless (previous_changes.keys & %w(description description_html cached_markdown_version)).any? && project_namespace.namespace_details.present?
|
||||
return unless (previous_changes.keys & %w[description description_html cached_markdown_version]).any? && project_namespace.namespace_details.present?
|
||||
|
||||
project_namespace.namespace_details.reset
|
||||
end
|
||||
|
|
|
|||
|
|
@ -201,11 +201,11 @@ class ProjectFeature < ApplicationRecord
|
|||
self.errors.add(field, "cannot have higher visibility level than repository access level") if not_allowed
|
||||
end
|
||||
|
||||
%i(merge_requests_access_level builds_access_level).each(&validator)
|
||||
%i[merge_requests_access_level builds_access_level].each(&validator)
|
||||
end
|
||||
|
||||
def feature_validation_exclusion
|
||||
%i(pages package_registry)
|
||||
%i[pages package_registry]
|
||||
end
|
||||
|
||||
override :resource_member?
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ class ProjectSetting < ApplicationRecord
|
|||
include EachBatch
|
||||
include IgnorableColumns
|
||||
|
||||
ALLOWED_TARGET_PLATFORMS = %w(ios osx tvos watchos android).freeze
|
||||
ALLOWED_TARGET_PLATFORMS = %w[ios osx tvos watchos android].freeze
|
||||
|
||||
belongs_to :project, inverse_of: :project_setting
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module Releases
|
|||
FILEPATH_REGEX = %r{\A\/[^\/](?!.*\/\/.*)[\-\.\w\/]+[\da-zA-Z]+\z}.freeze
|
||||
FILEPATH_MAX_LENGTH = 128
|
||||
|
||||
validates :url, presence: true, addressable_url: { schemes: %w(http https ftp) }, uniqueness: { scope: :release }
|
||||
validates :url, presence: true, addressable_url: { schemes: %w[http https ftp] }, uniqueness: { scope: :release }
|
||||
validates :name, presence: true, uniqueness: { scope: :release }
|
||||
validates :filepath, uniqueness: { scope: :release }, allow_blank: true
|
||||
validate :filepath_format_valid?
|
||||
|
|
|
|||
|
|
@ -47,20 +47,20 @@ class Repository
|
|||
#
|
||||
# For example, for entry `:commit_count` there's a method called `commit_count` which
|
||||
# stores its data in the `commit_count` cache key.
|
||||
CACHED_METHODS = %i(size recent_objects_size commit_count readme_path contribution_guide
|
||||
CACHED_METHODS = %i[size recent_objects_size commit_count readme_path contribution_guide
|
||||
changelog license_blob license_gitaly gitignore
|
||||
gitlab_ci_yml branch_names tag_names branch_count
|
||||
tag_count avatar exists? root_ref merged_branch_names
|
||||
has_visible_content? issue_template_names_hash merge_request_template_names_hash
|
||||
xcode_project? has_ambiguous_refs?).freeze
|
||||
xcode_project? has_ambiguous_refs?].freeze
|
||||
|
||||
# Certain method caches should be refreshed when certain types of files are
|
||||
# changed. This Hash maps file types (as returned by Gitlab::FileDetector) to
|
||||
# the corresponding methods to call for refreshing caches.
|
||||
METHOD_CACHES_FOR_FILE_TYPES = {
|
||||
readme: %i(readme_path),
|
||||
readme: %i[readme_path],
|
||||
changelog: :changelog,
|
||||
license: %i(license_blob license_gitaly),
|
||||
license: %i[license_blob license_gitaly],
|
||||
contributing: :contribution_guide,
|
||||
gitignore: :gitignore,
|
||||
gitlab_ci: :gitlab_ci_yml,
|
||||
|
|
@ -343,13 +343,13 @@ class Repository
|
|||
end
|
||||
|
||||
def expire_tags_cache
|
||||
expire_method_caches(%i(tag_names tag_count has_ambiguous_refs?))
|
||||
expire_method_caches(%i[tag_names tag_count has_ambiguous_refs?])
|
||||
@tags = nil
|
||||
@tag_names_include = nil
|
||||
end
|
||||
|
||||
def expire_branches_cache
|
||||
expire_method_caches(%i(branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?))
|
||||
expire_method_caches(%i[branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?])
|
||||
expire_protected_branches_cache
|
||||
|
||||
@local_branches = nil
|
||||
|
|
@ -362,7 +362,7 @@ class Repository
|
|||
end
|
||||
|
||||
def expire_statistics_caches
|
||||
expire_method_caches(%i(size recent_objects_size commit_count))
|
||||
expire_method_caches(%i[size recent_objects_size commit_count])
|
||||
end
|
||||
|
||||
def expire_all_method_caches
|
||||
|
|
@ -370,7 +370,7 @@ class Repository
|
|||
end
|
||||
|
||||
def expire_avatar_cache
|
||||
expire_method_caches(%i(avatar))
|
||||
expire_method_caches(%i[avatar])
|
||||
end
|
||||
|
||||
# Refreshes the method caches of this repository.
|
||||
|
|
@ -411,19 +411,19 @@ class Repository
|
|||
end
|
||||
|
||||
def expire_root_ref_cache
|
||||
expire_method_caches(%i(root_ref))
|
||||
expire_method_caches(%i[root_ref])
|
||||
end
|
||||
|
||||
# Expires the cache(s) used to determine if a repository is empty or not.
|
||||
def expire_emptiness_caches
|
||||
return unless empty?
|
||||
|
||||
expire_method_caches(%i(has_visible_content?))
|
||||
expire_method_caches(%i[has_visible_content?])
|
||||
raw_repository.expire_has_local_branches_cache
|
||||
end
|
||||
|
||||
def expire_exists_cache
|
||||
expire_method_caches(%i(exists?))
|
||||
expire_method_caches(%i[exists?])
|
||||
end
|
||||
|
||||
# expire cache that doesn't depend on repository data (when expiring)
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class ResourceLabelEvent < ResourceEvent
|
|||
}
|
||||
|
||||
def self.issuable_attrs
|
||||
%i(issue merge_request).freeze
|
||||
%i[issue merge_request].freeze
|
||||
end
|
||||
|
||||
def self.preload_label_subjects(events)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class ResourceStateEvent < ResourceEvent
|
|||
after_create :issue_usage_metrics
|
||||
|
||||
def self.issuable_attrs
|
||||
%i(issue merge_request).freeze
|
||||
%i[issue merge_request].freeze
|
||||
end
|
||||
|
||||
def issuable
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class ResourceTimeboxEvent < ResourceEvent
|
|||
after_create :issue_usage_metrics
|
||||
|
||||
def self.issuable_attrs
|
||||
%i(issue merge_request).freeze
|
||||
%i[issue merge_request].freeze
|
||||
end
|
||||
|
||||
def issuable
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ class User < MainClusterwide::ApplicationRecord
|
|||
:public_email
|
||||
].freeze
|
||||
|
||||
FORBIDDEN_SEARCH_STATES = %w(blocked banned ldap_blocked).freeze
|
||||
FORBIDDEN_SEARCH_STATES = %w[blocked banned ldap_blocked].freeze
|
||||
|
||||
INCOMING_MAIL_TOKEN_PREFIX = 'glimt-'
|
||||
FEED_TOKEN_PREFIX = 'glft-'
|
||||
|
|
@ -1925,7 +1925,7 @@ class User < MainClusterwide::ApplicationRecord
|
|||
|
||||
def access_level=(new_level)
|
||||
new_level = new_level.to_s
|
||||
return unless %w(admin regular).include?(new_level)
|
||||
return unless %w[admin regular].include?(new_level)
|
||||
|
||||
self.admin = (new_level == 'admin')
|
||||
end
|
||||
|
|
@ -2425,7 +2425,7 @@ class User < MainClusterwide::ApplicationRecord
|
|||
def update_highest_role?
|
||||
return false unless persisted?
|
||||
|
||||
(previous_changes.keys & %w(state user_type)).any?
|
||||
(previous_changes.keys & %w[state user_type]).any?
|
||||
end
|
||||
|
||||
def update_highest_role_attribute
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class UserInteractedProject < ApplicationRecord
|
|||
}
|
||||
|
||||
cached_exists?(**attributes) do
|
||||
where(attributes).exists? || UserInteractedProject.insert_all([attributes], unique_by: %w(project_id user_id))
|
||||
where(attributes).exists? || UserInteractedProject.insert_all([attributes], unique_by: %w[project_id user_id])
|
||||
true
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -93,52 +93,52 @@ module DevOpsReport
|
|||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Idea',
|
||||
features: %w(issues)
|
||||
features: %w[issues]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Issue',
|
||||
features: %w(issues notes)
|
||||
features: %w[issues notes]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Plan',
|
||||
features: %w(milestones boards)
|
||||
features: %w[milestones boards]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Code',
|
||||
features: %w(merge_requests)
|
||||
features: %w[merge_requests]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Commit',
|
||||
features: %w(merge_requests)
|
||||
features: %w[merge_requests]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Test',
|
||||
features: %w(ci_pipelines)
|
||||
features: %w[ci_pipelines]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Review',
|
||||
features: %w(ci_pipelines environments)
|
||||
features: %w[ci_pipelines environments]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Staging',
|
||||
features: %w(environments deployments)
|
||||
features: %w[environments deployments]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Production',
|
||||
features: %w(deployments)
|
||||
features: %w[deployments]
|
||||
),
|
||||
IdeaToProductionStep.new(
|
||||
metric: metric,
|
||||
title: 'Feedback',
|
||||
features: %w(projects_prometheus_active service_desk_issues)
|
||||
features: %w[projects_prometheus_active service_desk_issues]
|
||||
)
|
||||
]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class SearchServicePresenter < Gitlab::View::Presenter::Delegated
|
|||
blobs: :with_web_entity_associations
|
||||
}.freeze
|
||||
|
||||
SORT_ENABLED_SCOPES = %w(issues merge_requests epics).freeze
|
||||
SORT_ENABLED_SCOPES = %w[issues merge_requests epics].freeze
|
||||
|
||||
delegator_override :search_objects
|
||||
def search_objects
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ class PipelineSerializer < BaseSerializer
|
|||
{
|
||||
manual_actions: :metadata,
|
||||
scheduled_actions: :metadata,
|
||||
failed_builds: %i(project metadata),
|
||||
failed_builds: %i[project metadata],
|
||||
merge_request: {
|
||||
source_project: [:route, { namespace: :route }],
|
||||
target_project: [:route, { namespace: :route }]
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module ApplicationSettings
|
|||
|
||||
attr_reader :params, :application_setting
|
||||
|
||||
MARKDOWN_CACHE_INVALIDATING_PARAMS = %w(asset_proxy_enabled asset_proxy_url asset_proxy_secret_key asset_proxy_whitelist).freeze
|
||||
MARKDOWN_CACHE_INVALIDATING_PARAMS = %w[asset_proxy_enabled asset_proxy_url asset_proxy_secret_key asset_proxy_whitelist].freeze
|
||||
|
||||
def execute
|
||||
result = update_settings
|
||||
|
|
|
|||
|
|
@ -39,11 +39,11 @@ module Auth
|
|||
end
|
||||
|
||||
def self.full_access_token(*names)
|
||||
access_token(%w(*), names)
|
||||
access_token(%w[*], names)
|
||||
end
|
||||
|
||||
def self.import_access_token
|
||||
access_token(%w(*), ['import'], 'registry')
|
||||
access_token(%w[*], ['import'], 'registry')
|
||||
end
|
||||
|
||||
def self.pull_access_token(*names)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
module Boards
|
||||
class UpdateService < Boards::BaseService
|
||||
PERMITTED_PARAMS = %i(name hide_backlog_list hide_closed_list).freeze
|
||||
PERMITTED_PARAMS = %i[name hide_backlog_list hide_closed_list].freeze
|
||||
|
||||
def execute(board)
|
||||
filter_params
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ module BulkImports
|
|||
|
||||
ServiceError = Class.new(StandardError)
|
||||
|
||||
DEFAULT_ALLOWED_CONTENT_TYPES = %w(application/gzip application/octet-stream).freeze
|
||||
DEFAULT_ALLOWED_CONTENT_TYPES = %w[application/gzip application/octet-stream].freeze
|
||||
|
||||
def initialize(
|
||||
configuration:,
|
||||
|
|
@ -120,7 +120,7 @@ module BulkImports
|
|||
http_client.resource_url(relative_url),
|
||||
allow_localhost: allow_local_requests?,
|
||||
allow_local_network: allow_local_requests?,
|
||||
schemes: %w(http https)
|
||||
schemes: %w[http https]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
module Ci
|
||||
class UpdateInstanceVariablesService
|
||||
UNASSIGNABLE_KEYS = %w(id _destroy).freeze
|
||||
UNASSIGNABLE_KEYS = %w[id _destroy].freeze
|
||||
|
||||
def initialize(params)
|
||||
@params = params[:variables_attributes]
|
||||
|
|
|
|||
|
|
@ -137,9 +137,9 @@ module Clusters
|
|||
name: Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME,
|
||||
namespace: service_account_namespace,
|
||||
rules: [{
|
||||
apiGroups: %w(serving.knative.dev),
|
||||
resources: %w(configurations configurationgenerations routes revisions revisionuids autoscalers services),
|
||||
verbs: %w(get list create update delete patch watch)
|
||||
apiGroups: %w[serving.knative.dev],
|
||||
resources: %w[configurations configurationgenerations routes revisions revisionuids autoscalers services],
|
||||
verbs: %w[get list create update delete patch watch]
|
||||
}]
|
||||
).generate
|
||||
end
|
||||
|
|
@ -159,9 +159,9 @@ module Clusters
|
|||
name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME,
|
||||
namespace: service_account_namespace,
|
||||
rules: [{
|
||||
apiGroups: %w(database.crossplane.io),
|
||||
resources: %w(postgresqlinstances),
|
||||
verbs: %w(get list create watch)
|
||||
apiGroups: %w[database.crossplane.io],
|
||||
resources: %w[postgresqlinstances],
|
||||
verbs: %w[get list create watch]
|
||||
}]
|
||||
).generate
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ module FeatureFlags
|
|||
class BaseService < ::BaseService
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
AUDITABLE_ATTRIBUTES = %w(name description active).freeze
|
||||
AUDITABLE_ATTRIBUTES = %w[name description active].freeze
|
||||
|
||||
def success(**args)
|
||||
sync_to_jira(args[:feature_flag])
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
module Files
|
||||
class MultiService < Files::BaseService
|
||||
UPDATE_FILE_ACTIONS = %w(update move delete chmod).freeze
|
||||
UPDATE_FILE_ACTIONS = %w[update move delete chmod].freeze
|
||||
|
||||
def create_commit!
|
||||
transformer = Lfs::FileTransformer.new(project, repository, @branch_name)
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ module Import
|
|||
url,
|
||||
allow_localhost: allow_local_requests?,
|
||||
allow_local_network: allow_local_requests?,
|
||||
schemes: %w(http https)
|
||||
schemes: %w[http https]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ module Import
|
|||
url,
|
||||
allow_localhost: allow_local_requests?,
|
||||
allow_local_network: allow_local_requests?,
|
||||
schemes: %w(http https)
|
||||
schemes: %w[http https]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ module Import
|
|||
url,
|
||||
allow_localhost: allow_local_requests?,
|
||||
allow_local_network: allow_local_requests?,
|
||||
schemes: %w(http https)
|
||||
schemes: %w[http https]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module Import
|
|||
end
|
||||
|
||||
validates :file_url, addressable_url: {
|
||||
schemes: %w(https),
|
||||
schemes: %w[https],
|
||||
allow_localhost: allow_local_requests?,
|
||||
allow_local_network: allow_local_requests?,
|
||||
dns_rebind_protection: true
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ module Import
|
|||
|
||||
validates_presence_of :region, :bucket_name, :file_key, :access_key_id, :secret_access_key
|
||||
validates :file_url, addressable_url: {
|
||||
schemes: %w(https),
|
||||
schemes: %w[https],
|
||||
allow_localhost: allow_local_requests?,
|
||||
allow_local_network: allow_local_requests?,
|
||||
dns_rebind_protection: true
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ class ImportExportCleanUpService
|
|||
end
|
||||
|
||||
def directories_cmd
|
||||
%W(find #{path} -mindepth #{DIR_DEPTH} -maxdepth #{DIR_DEPTH} -type d -not -path #{path} -mmin +#{mmin})
|
||||
%W[find #{path} -mindepth #{DIR_DEPTH} -maxdepth #{DIR_DEPTH} -type d -not -path #{path} -mmin +#{mmin}]
|
||||
end
|
||||
|
||||
def logger
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ module IncidentManagement
|
|||
PAGER_DUTY_PAYLOAD_SIZE_LIMIT = 55.kilobytes
|
||||
|
||||
# https://developer.pagerduty.com/docs/db0fa8c8984fc-overview#event-types
|
||||
PAGER_DUTY_PROCESSABLE_EVENT_TYPES = %w(incident.triggered).freeze
|
||||
PAGER_DUTY_PROCESSABLE_EVENT_TYPES = %w[incident.triggered].freeze
|
||||
|
||||
def initialize(project, payload)
|
||||
super(project: project)
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ module Issuable
|
|||
end
|
||||
|
||||
def permitted_attrs(type)
|
||||
attrs = %i(state_event milestone_id add_label_ids remove_label_ids subscription_event)
|
||||
attrs = %i[state_event milestone_id add_label_ids remove_label_ids subscription_event]
|
||||
|
||||
if type == 'issue'
|
||||
attrs.push(:assignee_ids, :confidential)
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ module MergeRequests
|
|||
end
|
||||
|
||||
def track_title_and_desc_edits(changed_fields)
|
||||
tracked_fields = %w(title description)
|
||||
tracked_fields = %w[title description]
|
||||
|
||||
return unless changed_fields.any? { |field| tracked_fields.include?(field) }
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ module Packages
|
|||
DEFAULT_LEASE_TIMEOUT = 1.hour.to_i.freeze
|
||||
|
||||
# From https://salsa.debian.org/ftp-team/dak/-/blob/991aaa27a7f7aa773bb9c0cf2d516e383d9cffa0/setup/core-init.d/080_metadatakeys#L9
|
||||
METADATA_KEYS = %w(
|
||||
METADATA_KEYS = %w[
|
||||
Package
|
||||
Source
|
||||
Binary
|
||||
|
|
@ -60,7 +60,7 @@ module Packages
|
|||
Tag
|
||||
Package-Type
|
||||
Installer-Menu-Item
|
||||
).freeze
|
||||
].freeze
|
||||
|
||||
def initialize(distribution)
|
||||
@distribution = distribution
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class PreviewMarkdownService < BaseService
|
|||
private
|
||||
|
||||
def quick_action_types
|
||||
%w(Issue MergeRequest Commit WorkItem)
|
||||
%w[Issue MergeRequest Commit WorkItem]
|
||||
end
|
||||
|
||||
def explain_quick_actions(text)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ module Projects
|
|||
# > AppleTargetPlatformDetectorService.new(multiplatform_project).execute
|
||||
# => [:ios, :osx, :tvos, :watchos]
|
||||
class AppleTargetPlatformDetectorService < BaseService
|
||||
BUILD_CONFIG_FILENAMES = %w(project.pbxproj *.xcconfig).freeze
|
||||
BUILD_CONFIG_FILENAMES = %w[project.pbxproj *.xcconfig].freeze
|
||||
|
||||
# For the current iteration, we only want to detect when the project targets
|
||||
# iOS. In the future, we can use the same logic to detect projects that
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ module Projects
|
|||
end
|
||||
|
||||
def http?(url)
|
||||
url =~ /\A#{URI::DEFAULT_PARSER.make_regexp(%w(http https))}\z/
|
||||
url =~ /\A#{URI::DEFAULT_PARSER.make_regexp(%w[http https])}\z/
|
||||
end
|
||||
|
||||
def valid_domain?(url)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module Projects
|
|||
extend ::Gitlab::Utils::Override
|
||||
|
||||
# List of paths that can be excluded while evaluation if a target can be discarded
|
||||
DISCARDABLE_PATHS = %w(tmp tmp/cache tmp/work).freeze
|
||||
DISCARDABLE_PATHS = %w[tmp tmp/cache tmp/work].freeze
|
||||
|
||||
def initialize(project:, old_disk_path:, logger: nil)
|
||||
super
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ module Projects
|
|||
# The import url must end with '.git' here we ensure it is
|
||||
def default_endpoint_uri
|
||||
@default_endpoint_uri ||= import_uri.dup.tap do |uri|
|
||||
path = uri.path.gsub(%r(/$), '')
|
||||
path = uri.path.gsub(%r{/$}, '')
|
||||
path += '.git' unless path.ends_with?('.git')
|
||||
uri.path = path + LFS_BATCH_API_ENDPOINT
|
||||
end
|
||||
|
|
|
|||
|
|
@ -120,11 +120,11 @@ module Projects
|
|||
end
|
||||
|
||||
def after_update
|
||||
todos_features_changes = %w(
|
||||
todos_features_changes = %w[
|
||||
issues_access_level
|
||||
merge_requests_access_level
|
||||
repository_access_level
|
||||
)
|
||||
]
|
||||
project_changed_feature_keys = project.project_feature.previous_changes.keys
|
||||
|
||||
if project.visibility_level_previous_changes && project.private?
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ class Repositories::BaseService < BaseService
|
|||
end
|
||||
|
||||
def move_error(path)
|
||||
error = %{Repository "#{path}" could not be moved}
|
||||
error = %(Repository "#{path}" could not be moved)
|
||||
|
||||
log_error(error)
|
||||
error(error)
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ class RepositoryArchiveCleanUpService
|
|||
private
|
||||
|
||||
def clean_up_old_archives
|
||||
run(%W(find #{path} -mindepth 1 -maxdepth #{MAX_ARCHIVE_DEPTH} -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -mmin +#{mmin} -delete))
|
||||
run(%W[find #{path} -mindepth 1 -maxdepth #{MAX_ARCHIVE_DEPTH} -type f \( -name \*.tar -o -name \*.bz2 -o -name \*.tar.gz -o -name \*.zip \) -mmin +#{mmin} -delete])
|
||||
end
|
||||
|
||||
def clean_up_empty_directories
|
||||
|
|
@ -45,7 +45,7 @@ class RepositoryArchiveCleanUpService
|
|||
end
|
||||
|
||||
def clean_up_empty_directories_with_depth(depth)
|
||||
run(%W(find #{path} -mindepth #{depth} -maxdepth #{depth} -type d -empty -delete))
|
||||
run(%W[find #{path} -mindepth #{depth} -maxdepth #{depth} -type d -empty -delete])
|
||||
end
|
||||
|
||||
def run(cmd)
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ module ResourceAccessTokens
|
|||
strong_memoize_attr :username_and_email_generator
|
||||
|
||||
def has_permission_to_create?
|
||||
%w(project group).include?(resource_type) && can?(current_user, :create_resource_access_tokens, resource)
|
||||
%w[project group].include?(resource_type) && can?(current_user, :create_resource_access_tokens, resource)
|
||||
end
|
||||
|
||||
def create_user
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ module ResourceAccessTokens
|
|||
end
|
||||
|
||||
def can_destroy_token?
|
||||
%w(project group).include?(resource.class.name.downcase) && can?(current_user, :destroy_resource_access_tokens, resource)
|
||||
%w[project group].include?(resource.class.name.downcase) && can?(current_user, :destroy_resource_access_tokens, resource)
|
||||
end
|
||||
|
||||
def find_member
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module Search
|
|||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
DEFAULT_SCOPE = 'projects'
|
||||
ALLOWED_SCOPES = %w(projects issues merge_requests milestones users).freeze
|
||||
ALLOWED_SCOPES = %w[projects issues merge_requests milestones users].freeze
|
||||
|
||||
attr_accessor :current_user, :params
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ module Search
|
|||
include Search::Filter
|
||||
include Gitlab::Utils::StrongMemoize
|
||||
|
||||
ALLOWED_SCOPES = %w(blobs issues merge_requests wiki_blobs commits notes milestones users).freeze
|
||||
ALLOWED_SCOPES = %w[blobs issues merge_requests wiki_blobs commits notes milestones users].freeze
|
||||
|
||||
attr_accessor :project, :current_user, :params
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
module Snippets
|
||||
class UpdateService < Snippets::BaseService
|
||||
COMMITTABLE_ATTRIBUTES = %w(file_name content).freeze
|
||||
COMMITTABLE_ATTRIBUTES = %w[file_name content].freeze
|
||||
|
||||
UpdateError = Class.new(StandardError)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module Todos
|
|||
# Since we are moving towards work items, in some instances we create todos with
|
||||
# `target_type: WorkItem` in other instances we still create todos with `target_type: Issue`
|
||||
# So when an issue/work item is deleted, we just make sure to delete todos for both target types
|
||||
BOUND_TARGET_TYPES = %w(Issue WorkItem).freeze
|
||||
BOUND_TARGET_TYPES = %w[Issue WorkItem].freeze
|
||||
|
||||
def initialize(target_id, target_type)
|
||||
@target_id = target_id
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module Todos
|
|||
attr_reader :user, :entity
|
||||
|
||||
def initialize(user_id, entity_id, entity_type)
|
||||
unless %w(Group Project).include?(entity_type)
|
||||
unless %w[Group Project].include?(entity_type)
|
||||
raise ArgumentError, "#{entity_type} is not an entity user can leave"
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ module DesignManagement
|
|||
#
|
||||
# We currently choose not to resize `image/svg+xml` for security reasons.
|
||||
# See https://gitlab.com/gitlab-org/gitlab/issues/207740#note_302766171
|
||||
MIME_TYPE_ALLOWLIST = %w(image/png image/jpeg image/bmp image/gif).freeze
|
||||
MIME_TYPE_ALLOWLIST = %w[image/png image/jpeg image/bmp image/gif].freeze
|
||||
|
||||
process resize_to_fit: [432, 230]
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ class GitlabUploader < CarrierWave::Uploader::Base
|
|||
|
||||
class_attribute :storage_location_identifier
|
||||
|
||||
PROTECTED_METHODS = %i(filename cache_dir work_dir store_dir).freeze
|
||||
PROTECTED_METHODS = %i[filename cache_dir work_dir store_dir].freeze
|
||||
|
||||
ObjectNotReadyError = Class.new(StandardError)
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ class AddressableUrlValidator < ActiveModel::EachValidator
|
|||
# tasks that uses that url won't work.
|
||||
# See https://gitlab.com/gitlab-org/gitlab-foss/issues/66723
|
||||
BLOCKER_VALIDATE_OPTIONS = {
|
||||
schemes: %w(http https),
|
||||
schemes: %w[http https],
|
||||
ports: [],
|
||||
allow_localhost: true,
|
||||
allow_local_network: true,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module Gitlab
|
|||
# @example usage
|
||||
# validates :url, 'gitlab/zoom_url': true
|
||||
class ZoomUrlValidator < ActiveModel::EachValidator
|
||||
ALLOWED_SCHEMES = %w(https).freeze
|
||||
ALLOWED_SCHEMES = %w[https].freeze
|
||||
|
||||
def validate_each(record, attribute, value)
|
||||
links_count = Gitlab::ZoomLinkExtractor.new(value).links.size
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@
|
|||
class JsonSchemaValidator < ActiveModel::EachValidator
|
||||
FILENAME_ALLOWED = /\A[a-z0-9_-]*\Z/.freeze
|
||||
FilenameError = Class.new(StandardError)
|
||||
BASE_DIRECTORY = %w(app validators json_schemas).freeze
|
||||
BASE_DIRECTORY = %w[app validators json_schemas].freeze
|
||||
|
||||
def initialize(options)
|
||||
raise ArgumentError, "Expected 'filename' as an argument" unless options[:filename]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
- page_title _("Blame"), @blob.path, @ref
|
||||
- add_page_specific_style 'page_bundles/tree'
|
||||
- add_page_specific_style 'page_bundles/projects'
|
||||
- blame_streaming_url = blame_pages_streaming_url(@id, @project)
|
||||
|
||||
- if @blame_mode.streaming? && @blame_pagination.total_extra_pages > 0
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
- project = @project.present(current_user: current_user)
|
||||
- ref = local_assigns[:ref] || @ref
|
||||
- expanded = params[:expanded].present?
|
||||
- add_page_specific_style 'page_bundles/projects'
|
||||
-# If the blob has a RichViewer we preload the content except for GeoJSON since it is handled by Vue
|
||||
- if blob.rich_viewer && blob.extension != 'geojson'
|
||||
- add_page_startup_api_call local_assigns.fetch(:viewer_url) { url_for(safe_params.merge(viewer: blob.rich_viewer.type, format: :json)) }
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
- breadcrumb_title _("Commits")
|
||||
- add_page_specific_style 'page_bundles/tree'
|
||||
- add_page_specific_style 'page_bundles/merge_request'
|
||||
- add_page_specific_style 'page_bundles/projects'
|
||||
- page_title _("Commits"), @ref
|
||||
|
||||
= content_for :meta_tags do
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
- page_title _("Find File"), @ref
|
||||
- add_page_specific_style 'page_bundles/tree'
|
||||
- add_page_specific_style 'page_bundles/projects'
|
||||
|
||||
.file-finder-holder.tree-holder.clearfix.js-file-finder.gl-pt-4{ 'data-file-find-url': "#{escape_javascript(project_files_path(@project, @ref, format: :json))}", 'data-find-tree-url': escape_javascript(project_tree_path(@project, @ref)), 'data-blob-url-template': escape_javascript(project_blob_path(@project, @ref)) }
|
||||
.nav-block.gl-xs-mr-0
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
- page_title s_("UsageQuota|Usage")
|
||||
- add_page_specific_style 'page_bundles/projects_usage_quotas'
|
||||
- @force_desktop_expanded_sidebar = true
|
||||
|
||||
= render Pajamas::AlertComponent.new(title: _('Repository usage recalculation started'),
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ module MembersDestroyer
|
|||
|
||||
sidekiq_options retry: 3
|
||||
|
||||
ENTITY_TYPES = %w(Group Project).freeze
|
||||
ENTITY_TYPES = %w[Group Project].freeze
|
||||
|
||||
queue_namespace :unassign_issuables
|
||||
feature_category :user_management
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ module Projects
|
|||
include ExclusiveLeaseGuard
|
||||
|
||||
LEASE_TIMEOUT = 1.hour.to_i
|
||||
APPLE_PLATFORM_LANGUAGES = %w(swift objective-c).freeze
|
||||
APPLE_PLATFORM_LANGUAGES = %w[swift objective-c].freeze
|
||||
|
||||
feature_category :experimentation_activation
|
||||
data_consistency :always
|
||||
|
|
|
|||
|
|
@ -341,7 +341,6 @@ module Gitlab
|
|||
config.assets.precompile << "page_bundles/project.css"
|
||||
config.assets.precompile << "page_bundles/projects.css"
|
||||
config.assets.precompile << "page_bundles/projects_edit.css"
|
||||
config.assets.precompile << "page_bundles/projects_usage_quotas.css"
|
||||
config.assets.precompile << "page_bundles/promotions.css"
|
||||
config.assets.precompile << "page_bundles/releases.css"
|
||||
config.assets.precompile << "page_bundles/remote_development.css"
|
||||
|
|
|
|||
|
|
@ -11,12 +11,17 @@ MSG
|
|||
CH_UNREVIEWED_LABEL = 'clickhouse::review pending'
|
||||
CH_APPROVED_LABEL = 'clickhouse::approved'
|
||||
|
||||
CH_URL =
|
||||
'https://gitlab.com/groups/gitlab-org/maintainers/clickhouse/-/group_members?with_inherited_permissions=exclude'
|
||||
|
||||
return if stable_branch.valid_stable_branch?
|
||||
return if helper.mr_labels.include?(CH_UNREVIEWED_LABEL)
|
||||
|
||||
helper.labels_to_add << 'clickhouse' if clickhouse.changes.any?
|
||||
|
||||
if helper.mr_labels.include?('clickhouse') || clickhouse.changes.any?
|
||||
message 'This merge request adds or changes files that require a ' \
|
||||
'review from the [GitLab ClickHouse team](https://gitlab.com/groups/gl-clickhouse/-/group_members).'
|
||||
'review from the [GitLab ClickHouse team](CH_URL).'
|
||||
|
||||
markdown(CH_MESSAGE)
|
||||
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative '../../tooling/danger/clickhouse'
|
||||
|
||||
module Danger
|
||||
class Clickhouse < ::Danger::Plugin
|
||||
# Put the helper code somewhere it can be tested
|
||||
include Tooling::Danger::Clickhouse
|
||||
end
|
||||
end
|
||||
|
|
@ -381,21 +381,7 @@ An alternative approach to building Organizations is to convert top-level Groups
|
|||
|
||||
## Frequently Asked Questions
|
||||
|
||||
### Do we expect large SaaS customers to be licensed at the Organization level, for example to have the ability to include multiple top-level Groups under on license?
|
||||
|
||||
Yes, this has been discussed with Fulfillment and is part of the post MVC roadmap for Organizations.
|
||||
See also [Alignment between Organization and Fulfillment](#alignment-between-organization-and-fulfillment).
|
||||
|
||||
### Do we expect to be able to configure alternate GitLab domain names for Organizations (such as `customer.gitlab.com`)?
|
||||
|
||||
There is no plan at this point to allow configuration of alternate GitLab domain names.
|
||||
We have previously heard that sub-domains bring administrative challenges.
|
||||
GitLab Dedicated will be a much better fit for that at this moment.
|
||||
|
||||
### Do we expect Organizations to have visibility settings (public/private) of their own? Will visibility remain a property of top-level Groups?
|
||||
|
||||
Organizations are public for now but will have their own independent visibility settings.
|
||||
See also [When can Users see an Organization?](#when-can-users-see-an-organization).
|
||||
See [Organization: Frequently Asked Questions](organization-faq.md).
|
||||
|
||||
## Decision Log
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
stage: enablement
|
||||
group: Tenant Scale
|
||||
description: 'Organization: FAQ'
|
||||
---
|
||||
|
||||
# Organization: Frequently Asked Questions
|
||||
|
||||
## Do we expect large SaaS customers to be licensed at the Organization level, for example to have the ability to include multiple top-level Groups under on license?
|
||||
|
||||
Yes, this has been discussed with Fulfillment and is part of the post MVC roadmap for Organizations.
|
||||
See also [Alignment between Organization and Fulfillment](index.md#alignment-between-organization-and-fulfillment).
|
||||
|
||||
## Do we expect to be able to configure alternate GitLab domain names for Organizations (such as `customer.gitlab.com`)?
|
||||
|
||||
There is no plan at this point to allow configuration of alternate GitLab domain names.
|
||||
We have previously heard that sub-domains bring administrative challenges.
|
||||
GitLab Dedicated will be a much better fit for that at this moment.
|
||||
|
||||
## Do we expect Organizations to have visibility settings (public/private) of their own? Will visibility remain a property of top-level Groups?
|
||||
|
||||
Organizations are public for now but will have their own independent visibility settings.
|
||||
See also [When can Users see an Organization?](index.md#when-can-users-see-an-organization).
|
||||
|
||||
## What would the migration of a feature from the top-level Group to the Organization look like?
|
||||
|
||||
One of our requirements is that everything needs to be mapped to an Organization.
|
||||
Only that way will we achieve the isolation we are striving for.
|
||||
For SaaS, all existing Groups and Projects are already mapped to `Org_ID = 1` in the backend.
|
||||
`Org_ID = 1` corresponds to the `Default Organization`, meaning that upon Organization rollout, all existing Groups and Projects will be part of the default Organization and will be seen in that context.
|
||||
Because we want to achieve as much parity as possible between SaaS and self-managed, self-managed customers would also get everything mapped to the default Organization.
|
||||
The difference between SaaS and self-managed is that for SaaS we expect users to create many Organizations, and for self-managed we do not.
|
||||
We will control this via a `can_create_organization` application setting that will be enabled by default on SaaS and disabled by default for self-managed users.
|
||||
|
||||
Consider whether your feature can support cascading, or in other words, whether the functionality is capable of existing on multiple nested levels without causing conflicts.
|
||||
If your feature can support cascading:
|
||||
|
||||
- Today, you should add your feature to the top-level Group for both SaaS and self-managed, and to the instance for self-managed.
|
||||
- Once the Organization is ready, you would migrate your instance level feature over the Organization object at which point it would be available at both the Organization and top-level Group for all customers.
|
||||
|
||||
If your feature cannot support cascading:
|
||||
|
||||
- Today, you should add your feature to the top-level Group for SaaS only, and to the instance for self-managed. The top-level Group functionality would be hidden for self-managed users.
|
||||
- Once the Organization is ready, you would migrate instance functionality to the Organization for self-managed customers, but hide it at the Organization level for SaaS. On SaaS, users would continue to manage their functionality at the top-level Group, and not at the Organization level. At some point in the future when 99% of paying customers have moved to their own Organization, you could clean things up by introducing a breaking change and unhiding it from the Organization level for all customers (SaaS and self-managed) and removing the functionality from the top-level Group.
|
||||
|
|
@ -66,7 +66,7 @@ Prerequisite:
|
|||
This service account is associated with the entire instance, not a specific group
|
||||
or project in the instance.
|
||||
|
||||
1. [Create a personal access token](../../api/groups.md#create-personal-access-token-for-service-account-user)
|
||||
1. [Create a personal access token](../../api/users.md#create-a-personal-access-token)
|
||||
for the service account user.
|
||||
|
||||
You define the scopes for the service account by [setting the scopes for the personal access token](personal_access_tokens.md#personal-access-token-scopes).
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@
|
|||
"gettext-parser": "^6.0.0",
|
||||
"graphql": "^15.7.2",
|
||||
"graphql-tag": "^2.11.0",
|
||||
"gridstack": "^9.1.1",
|
||||
"gridstack": "^9.2.0",
|
||||
"highlight.js": "^11.8.0",
|
||||
"immer": "^9.0.15",
|
||||
"ipaddr.js": "^1.9.1",
|
||||
|
|
|
|||
|
|
@ -75,7 +75,6 @@ module QA
|
|||
|
||||
def created_access_token
|
||||
within_element(:access_token_section) do
|
||||
click_element(:toggle_visibility_button, wait: 30)
|
||||
find_element(:created_access_token_field).value
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,24 +23,6 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures, feature_cate
|
|||
visit user_confirmation_path(confirmation_token: new_user_token)
|
||||
end
|
||||
|
||||
def fill_in_sign_up_form(new_user, submit_button_text = 'Register')
|
||||
fill_in 'new_user_first_name', with: new_user.first_name
|
||||
fill_in 'new_user_last_name', with: new_user.last_name
|
||||
fill_in 'new_user_username', with: new_user.username
|
||||
fill_in 'new_user_email', with: new_user.email
|
||||
fill_in 'new_user_password', with: new_user.password
|
||||
|
||||
wait_for_all_requests
|
||||
|
||||
expect_username_to_be_validated
|
||||
|
||||
click_button submit_button_text
|
||||
end
|
||||
|
||||
def expect_username_to_be_validated
|
||||
expect(page).to have_selector('[data-testid="new_user_username_field"].gl-field-success-outline')
|
||||
end
|
||||
|
||||
def fill_in_welcome_form
|
||||
select 'Software Developer', from: 'user_role'
|
||||
click_button 'Get started!'
|
||||
|
|
|
|||
|
|
@ -48,7 +48,8 @@ RSpec.describe 'Batch diffs', :js, feature_category: :code_review_workflow do
|
|||
|
||||
context 'when user visits a URL with a link directly to to a discussion' do
|
||||
context 'which is in the first batched page of diffs' do
|
||||
it 'scrolls to the correct discussion' do
|
||||
it 'scrolls to the correct discussion',
|
||||
quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/410029' } do
|
||||
page.within get_first_diff do
|
||||
click_link('just now')
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1174,4 +1174,43 @@ describe('common_utils', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('cloneWithoutReferences', () => {
|
||||
it('clones the provided object', () => {
|
||||
const obj = {
|
||||
foo: 'bar',
|
||||
cool: 1337,
|
||||
nested: {
|
||||
peanut: 'butter',
|
||||
},
|
||||
arrays: [0, 1, 2],
|
||||
};
|
||||
|
||||
const cloned = commonUtils.cloneWithoutReferences(obj);
|
||||
|
||||
expect(cloned).toMatchObject({
|
||||
foo: 'bar',
|
||||
cool: 1337,
|
||||
nested: {
|
||||
peanut: 'butter',
|
||||
},
|
||||
arrays: [0, 1, 2],
|
||||
});
|
||||
});
|
||||
|
||||
it('does not persist object references after cloning', () => {
|
||||
const ref = {
|
||||
foo: 'bar',
|
||||
};
|
||||
|
||||
const obj = {
|
||||
ref,
|
||||
};
|
||||
|
||||
const cloned = commonUtils.cloneWithoutReferences(obj);
|
||||
|
||||
expect(cloned.ref).toMatchObject({ foo: 'bar' });
|
||||
expect(cloned.ref === ref).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
|
|||
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import ProjectStorageApp from '~/usage_quotas/storage/components/project_storage_app.vue';
|
||||
import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
|
||||
import SectionedPercentageBar from '~/usage_quotas/components/sectioned_percentage_bar.vue';
|
||||
import {
|
||||
descendingStorageUsageSort,
|
||||
getStorageTypesFromProjectStatistics,
|
||||
|
|
@ -56,7 +56,7 @@ describe('ProjectStorageApp', () => {
|
|||
const findAlert = () => wrapper.findComponent(GlAlert);
|
||||
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
|
||||
const findUsagePercentage = () => wrapper.findByTestId('total-usage');
|
||||
const findUsageGraph = () => wrapper.findComponent(UsageGraph);
|
||||
const findSectionedPercentageBar = () => wrapper.findComponent(SectionedPercentageBar);
|
||||
const findProjectDetailsTable = () => wrapper.findByTestId('usage-quotas-project-usage-details');
|
||||
const findNamespaceDetailsTable = () =>
|
||||
wrapper.findByTestId('usage-quotas-namespace-usage-details');
|
||||
|
|
@ -157,7 +157,7 @@ describe('ProjectStorageApp', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('rendering <usage-graph />', () => {
|
||||
describe('rendering <sectioned-percentage-bar />', () => {
|
||||
let mockApollo;
|
||||
|
||||
beforeEach(async () => {
|
||||
|
|
@ -168,16 +168,23 @@ describe('ProjectStorageApp', () => {
|
|||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('renders usage-graph component if project.statistics exists', () => {
|
||||
expect(findUsageGraph().exists()).toBe(true);
|
||||
it('renders sectioned-percentage-bar component if project.statistics exists', () => {
|
||||
expect(findSectionedPercentageBar().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('passes project.statistics to usage-graph component', () => {
|
||||
const {
|
||||
__typename,
|
||||
...statistics
|
||||
} = mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics;
|
||||
expect(findUsageGraph().props('rootStorageStatistics')).toMatchObject(statistics);
|
||||
it('passes processed project statistics to sectioned-percentage-bar component', () => {
|
||||
expect(findSectionedPercentageBar().props('sections')).toMatchObject([
|
||||
{ formattedValue: '4.58 MiB', id: 'lfsObjects', label: 'LFS', value: 4800000 },
|
||||
{ formattedValue: '3.72 MiB', id: 'repository', label: 'Repository', value: 3900000 },
|
||||
{ formattedValue: '3.62 MiB', id: 'packages', label: 'Packages', value: 3800000 },
|
||||
{
|
||||
formattedValue: '390.63 KiB',
|
||||
id: 'buildArtifacts',
|
||||
label: 'Job artifacts',
|
||||
value: 400000,
|
||||
},
|
||||
{ formattedValue: '292.97 KiB', id: 'wiki', label: 'Wiki', value: 300000 },
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,125 +0,0 @@
|
|||
import { shallowMount } from '@vue/test-utils';
|
||||
import { numberToHumanSize } from '~/lib/utils/number_utils';
|
||||
import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
|
||||
|
||||
let data;
|
||||
let wrapper;
|
||||
|
||||
function mountComponent({ rootStorageStatistics, limit }) {
|
||||
wrapper = shallowMount(UsageGraph, {
|
||||
propsData: {
|
||||
rootStorageStatistics,
|
||||
limit,
|
||||
},
|
||||
});
|
||||
}
|
||||
function findStorageTypeUsagesSerialized() {
|
||||
return wrapper
|
||||
.findAll('[data-testid="storage-type-usage"]')
|
||||
.wrappers.map((wp) => wp.element.style.flex);
|
||||
}
|
||||
|
||||
describe('UsageGraph', () => {
|
||||
beforeEach(() => {
|
||||
data = {
|
||||
rootStorageStatistics: {
|
||||
wikiSize: 5000,
|
||||
repositorySize: 4000,
|
||||
packagesSize: 3000,
|
||||
containerRegistrySize: 2500,
|
||||
lfsObjectsSize: 2000,
|
||||
buildArtifactsSize: 700,
|
||||
snippetsSize: 2000,
|
||||
storageSize: 17000,
|
||||
},
|
||||
limit: 2000,
|
||||
};
|
||||
mountComponent(data);
|
||||
});
|
||||
|
||||
it('renders the legend in order', () => {
|
||||
const types = wrapper.findAll('[data-testid="storage-type-legend"]');
|
||||
|
||||
const {
|
||||
buildArtifactsSize,
|
||||
lfsObjectsSize,
|
||||
packagesSize,
|
||||
repositorySize,
|
||||
wikiSize,
|
||||
snippetsSize,
|
||||
} = data.rootStorageStatistics;
|
||||
|
||||
expect(types.at(0).text()).toMatchInterpolatedText(`Wiki ${numberToHumanSize(wikiSize)}`);
|
||||
expect(types.at(1).text()).toMatchInterpolatedText(
|
||||
`Repository ${numberToHumanSize(repositorySize)}`,
|
||||
);
|
||||
expect(types.at(2).text()).toMatchInterpolatedText(
|
||||
`Packages ${numberToHumanSize(packagesSize)}`,
|
||||
);
|
||||
expect(types.at(3).text()).toMatchInterpolatedText(`LFS ${numberToHumanSize(lfsObjectsSize)}`);
|
||||
expect(types.at(4).text()).toMatchInterpolatedText(
|
||||
`Snippets ${numberToHumanSize(snippetsSize)}`,
|
||||
);
|
||||
expect(types.at(5).text()).toMatchInterpolatedText(
|
||||
`Job artifacts ${numberToHumanSize(buildArtifactsSize)}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('when storage type is not used', () => {
|
||||
beforeEach(() => {
|
||||
data.rootStorageStatistics.wikiSize = 0;
|
||||
mountComponent(data);
|
||||
});
|
||||
|
||||
it('filters the storage type', () => {
|
||||
expect(wrapper.text()).not.toContain('Wikis');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when there is no storage usage', () => {
|
||||
beforeEach(() => {
|
||||
data.rootStorageStatistics.storageSize = 0;
|
||||
mountComponent(data);
|
||||
});
|
||||
|
||||
it('does not render', () => {
|
||||
expect(wrapper.html()).toEqual('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when limit is 0', () => {
|
||||
beforeEach(() => {
|
||||
data.limit = 0;
|
||||
mountComponent(data);
|
||||
});
|
||||
|
||||
it('sets correct flex values', () => {
|
||||
expect(findStorageTypeUsagesSerialized()).toStrictEqual([
|
||||
'0.29411764705882354',
|
||||
'0.23529411764705882',
|
||||
'0.17647058823529413',
|
||||
'0.11764705882352941',
|
||||
'0.11764705882352941',
|
||||
'0.041176470588235294',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when storage exceeds limit', () => {
|
||||
beforeEach(() => {
|
||||
data.limit = data.rootStorageStatistics.storageSize - 1;
|
||||
mountComponent(data);
|
||||
});
|
||||
|
||||
it('does render correclty', () => {
|
||||
expect(findStorageTypeUsagesSerialized()).toStrictEqual([
|
||||
'0.29411764705882354',
|
||||
'0.23529411764705882',
|
||||
'0.17647058823529413',
|
||||
'0.11764705882352941',
|
||||
'0.11764705882352941',
|
||||
'0.041176470588235294',
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe PerformanceMonitoring::PrometheusPanelGroup do
|
||||
let(:json_content) do
|
||||
{
|
||||
"group" => "Group Title",
|
||||
"panels" => [{
|
||||
"type" => "area-chart",
|
||||
"title" => "Chart Title",
|
||||
"y_label" => "Y-Axis",
|
||||
"metrics" => [{
|
||||
"id" => "metric_of_ages",
|
||||
"unit" => "count",
|
||||
"label" => "Metric of Ages",
|
||||
"query_range" => "http_requests_total"
|
||||
}]
|
||||
}]
|
||||
}
|
||||
end
|
||||
|
||||
describe '.from_json' do
|
||||
subject { described_class.from_json(json_content) }
|
||||
|
||||
it 'creates a PrometheusPanelGroup object' do
|
||||
expect(subject).to be_a described_class
|
||||
expect(subject.group).to eq(json_content['group'])
|
||||
expect(subject.panels).to all(be_a PerformanceMonitoring::PrometheusPanel)
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
context 'json_content is not a hash' do
|
||||
let(:json_content) { nil }
|
||||
|
||||
subject { described_class.from_json(json_content) }
|
||||
|
||||
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
|
||||
end
|
||||
|
||||
context 'when group is missing' do
|
||||
before do
|
||||
json_content.delete('group')
|
||||
end
|
||||
|
||||
subject { described_class.from_json(json_content) }
|
||||
|
||||
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
|
||||
end
|
||||
|
||||
context 'when panels are missing' do
|
||||
before do
|
||||
json_content['panels'] = []
|
||||
end
|
||||
|
||||
subject { described_class.from_json(json_content) }
|
||||
|
||||
it { expect { subject }.to raise_error(ActiveModel::ValidationError) }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -30,17 +30,6 @@ RSpec.describe PerformanceMonitoring::PrometheusPanel do
|
|||
end
|
||||
|
||||
describe '.from_json' do
|
||||
subject { described_class.from_json(json_content) }
|
||||
|
||||
it 'creates a PrometheusPanelGroup object' do
|
||||
expect(subject).to be_a described_class
|
||||
expect(subject.type).to eq(json_content['type'])
|
||||
expect(subject.title).to eq(json_content['title'])
|
||||
expect(subject.y_label).to eq(json_content['y_label'])
|
||||
expect(subject.weight).to eq(json_content['weight'])
|
||||
expect(subject.metrics).to all(be_a PerformanceMonitoring::PrometheusMetric)
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
context 'json_content is not a hash' do
|
||||
let(:json_content) { nil }
|
||||
|
|
|
|||
|
|
@ -178,6 +178,7 @@ RSpec.configure do |config|
|
|||
config.include Devise::Test::IntegrationHelpers, type: :feature
|
||||
config.include Devise::Test::IntegrationHelpers, type: :request
|
||||
config.include LoginHelpers, type: :feature
|
||||
config.include SignUpHelpers, type: :feature
|
||||
config.include SearchHelpers, type: :feature
|
||||
config.include WaitHelpers, type: :feature
|
||||
config.include WaitForRequests, type: :feature
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
module SignUpHelpers
|
||||
def fill_in_sign_up_form(new_user, submit_button_text = 'Register')
|
||||
fill_in 'new_user_first_name', with: new_user.first_name
|
||||
fill_in 'new_user_last_name', with: new_user.last_name
|
||||
fill_in 'new_user_username', with: new_user.username
|
||||
fill_in 'new_user_email', with: new_user.email
|
||||
fill_in 'new_user_password', with: new_user.password
|
||||
|
||||
wait_for_all_requests
|
||||
|
||||
expect_username_to_be_validated
|
||||
|
||||
yield if block_given?
|
||||
|
||||
click_button submit_button_text
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def expect_username_to_be_validated
|
||||
expect(page).to have_selector('[data-testid="new_user_username_field"].gl-field-success-outline')
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.shared_examples Gitlab::Import::AdvanceStage do |factory:|
|
||||
let_it_be(:project) { create(:project) }
|
||||
let_it_be_with_reload(:import_state) { create(factory, :started, project: project, jid: '123') }
|
||||
let(:worker) { described_class.new }
|
||||
let(:next_stage) { :finish }
|
||||
|
||||
describe '#perform', :clean_gitlab_redis_shared_state do
|
||||
context 'when the project no longer exists' do
|
||||
it 'does not perform any work' do
|
||||
expect(worker).not_to receive(:wait_for_jobs)
|
||||
|
||||
worker.perform(non_existing_record_id, { '123' => 2 }, next_stage)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are remaining jobs' do
|
||||
it 'reschedules itself' do
|
||||
expect(worker)
|
||||
.to receive(:wait_for_jobs)
|
||||
.with({ '123' => 2 })
|
||||
.and_return({ '123' => 1 })
|
||||
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
.with(described_class::INTERVAL, project.id, { '123' => 1 }, next_stage)
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, next_stage)
|
||||
end
|
||||
|
||||
context 'when the project import is not running' do
|
||||
before do
|
||||
import_state.update_column(:status, :failed)
|
||||
end
|
||||
|
||||
it 'does not perform any work' do
|
||||
expect(worker).not_to receive(:wait_for_jobs)
|
||||
expect(described_class).not_to receive(:perform_in)
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, next_stage)
|
||||
end
|
||||
|
||||
it 'clears the JobWaiter cache' do
|
||||
expect(Gitlab::JobWaiter).to receive(:delete_key).with('123')
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, next_stage)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no remaining jobs' do
|
||||
before do
|
||||
allow(worker)
|
||||
.to receive(:wait_for_jobs)
|
||||
.with({ '123' => 2 })
|
||||
.and_return({})
|
||||
end
|
||||
|
||||
it 'schedules the next stage' do
|
||||
next_worker = described_class::STAGES[next_stage]
|
||||
|
||||
expect_next_found_instance_of(import_state.class) do |state|
|
||||
expect(state).to receive(:refresh_jid_expiration)
|
||||
end
|
||||
|
||||
expect(next_worker).to receive(:perform_async).with(project.id)
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, next_stage)
|
||||
end
|
||||
|
||||
it 'raises KeyError when the stage name is invalid' do
|
||||
expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
|
||||
.to raise_error(KeyError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#wait_for_jobs' do
|
||||
it 'waits for jobs to complete and returns a new pair of keys to wait for' do
|
||||
waiter1 = instance_double("Gitlab::JobWaiter", jobs_remaining: 1, key: '123')
|
||||
waiter2 = instance_double("Gitlab::JobWaiter", jobs_remaining: 0, key: '456')
|
||||
|
||||
expect(Gitlab::JobWaiter)
|
||||
.to receive(:new)
|
||||
.ordered
|
||||
.with(2, '123')
|
||||
.and_return(waiter1)
|
||||
|
||||
expect(Gitlab::JobWaiter)
|
||||
.to receive(:new)
|
||||
.ordered
|
||||
.with(1, '456')
|
||||
.and_return(waiter2)
|
||||
|
||||
expect(waiter1)
|
||||
.to receive(:wait)
|
||||
.with(described_class::BLOCKING_WAIT_TIME)
|
||||
|
||||
expect(waiter2)
|
||||
.to receive(:wait)
|
||||
.with(described_class::BLOCKING_WAIT_TIME)
|
||||
|
||||
new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
|
||||
|
||||
expect(new_waiters).to eq({ '123' => 1 })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::BitbucketServerImport::AdvanceStageWorker, feature_category: :importers do
|
||||
it_behaves_like Gitlab::Import::AdvanceStage, factory: :import_state
|
||||
end
|
||||
|
|
@ -2,125 +2,6 @@
|
|||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::GithubImport::AdvanceStageWorker, :clean_gitlab_redis_shared_state, feature_category: :importers do
|
||||
let(:project) { create(:project) }
|
||||
let(:import_state) { create(:import_state, project: project, jid: '123') }
|
||||
let(:worker) { described_class.new }
|
||||
|
||||
describe '#perform' do
|
||||
context 'when the project no longer exists' do
|
||||
it 'does not perform any work' do
|
||||
expect(worker).not_to receive(:wait_for_jobs)
|
||||
|
||||
worker.perform(-1, { '123' => 2 }, :finish)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are remaining jobs' do
|
||||
before do
|
||||
allow(worker)
|
||||
.to receive(:find_import_state)
|
||||
.and_return(import_state)
|
||||
end
|
||||
|
||||
it 'reschedules itself' do
|
||||
expect(worker)
|
||||
.to receive(:wait_for_jobs)
|
||||
.with({ '123' => 2 })
|
||||
.and_return({ '123' => 1 })
|
||||
|
||||
expect(described_class)
|
||||
.to receive(:perform_in)
|
||||
.with(described_class::INTERVAL, project.id, { '123' => 1 }, :finish)
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, :finish)
|
||||
end
|
||||
|
||||
context 'when import state is nil' do
|
||||
let(:import_state) { nil }
|
||||
|
||||
it 'clears the JobWaiter cache and does not perform any work' do
|
||||
expect(Gitlab::JobWaiter).to receive(:delete_key).with('123')
|
||||
expect(worker).not_to receive(:wait_for_jobs)
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, :finish)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are no remaining jobs' do
|
||||
before do
|
||||
allow(worker)
|
||||
.to receive(:find_import_state)
|
||||
.and_return(import_state)
|
||||
|
||||
allow(worker)
|
||||
.to receive(:wait_for_jobs)
|
||||
.with({ '123' => 2 })
|
||||
.and_return({})
|
||||
end
|
||||
|
||||
it 'schedules the next stage' do
|
||||
expect(import_state)
|
||||
.to receive(:refresh_jid_expiration)
|
||||
|
||||
expect(Gitlab::GithubImport::Stage::FinishImportWorker)
|
||||
.to receive(:perform_async)
|
||||
.with(project.id)
|
||||
|
||||
worker.perform(project.id, { '123' => 2 }, :finish)
|
||||
end
|
||||
|
||||
it 'raises KeyError when the stage name is invalid' do
|
||||
expect { worker.perform(project.id, { '123' => 2 }, :kittens) }
|
||||
.to raise_error(KeyError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#wait_for_jobs' do
|
||||
it 'waits for jobs to complete and returns a new pair of keys to wait for' do
|
||||
waiter1 = double(:waiter1, jobs_remaining: 1, key: '123')
|
||||
waiter2 = double(:waiter2, jobs_remaining: 0, key: '456')
|
||||
|
||||
expect(Gitlab::JobWaiter)
|
||||
.to receive(:new)
|
||||
.ordered
|
||||
.with(2, '123')
|
||||
.and_return(waiter1)
|
||||
|
||||
expect(Gitlab::JobWaiter)
|
||||
.to receive(:new)
|
||||
.ordered
|
||||
.with(1, '456')
|
||||
.and_return(waiter2)
|
||||
|
||||
expect(waiter1)
|
||||
.to receive(:wait)
|
||||
.with(described_class::BLOCKING_WAIT_TIME)
|
||||
|
||||
expect(waiter2)
|
||||
.to receive(:wait)
|
||||
.with(described_class::BLOCKING_WAIT_TIME)
|
||||
|
||||
new_waiters = worker.wait_for_jobs({ '123' => 2, '456' => 1 })
|
||||
|
||||
expect(new_waiters).to eq({ '123' => 1 })
|
||||
end
|
||||
end
|
||||
|
||||
describe '#find_import_state' do
|
||||
it 'returns a ProjectImportState' do
|
||||
import_state.update_column(:status, 'started')
|
||||
|
||||
found = worker.find_import_state(project.id)
|
||||
|
||||
expect(found).to be_an_instance_of(ProjectImportState)
|
||||
expect(found.attributes.keys).to match_array(%w(id jid))
|
||||
end
|
||||
|
||||
it 'returns nil if the project import is not running' do
|
||||
expect(worker.find_import_state(project.id)).to be_nil
|
||||
end
|
||||
end
|
||||
RSpec.describe Gitlab::GithubImport::AdvanceStageWorker, feature_category: :importers do
|
||||
it_behaves_like Gitlab::Import::AdvanceStage, factory: :import_state
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'spec_helper'
|
||||
|
||||
RSpec.describe Gitlab::JiraImport::AdvanceStageWorker, feature_category: :importers do
|
||||
it_behaves_like Gitlab::Import::AdvanceStage, factory: :jira_import_state
|
||||
end
|
||||
|
|
@ -98,6 +98,10 @@ module Tooling
|
|||
\.gitlab/ci/frontend\.gitlab-ci\.yml
|
||||
)\z}x => %i[frontend tooling],
|
||||
|
||||
%r{\A(ee/)?db/click_house/} => :clickhouse,
|
||||
%r{\Agems/click_house-client/} => :clickhouse,
|
||||
%r{click((-)?|(_)?)house} => :clickhouse,
|
||||
|
||||
%r{\A((ee|jh)/)?db/(geo/)?(migrate|post_migrate)/} => [:database],
|
||||
%r{\A((ee|jh)/)?db/(?!fixtures)[^/]+} => [:database],
|
||||
%r{\A((ee|jh)/)?lib/gitlab/(database|background_migration|sql)(/|\.rb)} => [:database, :backend],
|
||||
|
|
@ -106,10 +110,6 @@ module Tooling
|
|||
%r{\A((ee|jh)/)?app/finders/} => [:database, :backend],
|
||||
%r{\Arubocop/cop/migration(/|\.rb)} => :database,
|
||||
|
||||
%r{\A(ee/)?db/click_house/} => :clickhouse,
|
||||
%r{\Agems/click_house-client/} => :clickhouse,
|
||||
%r{click(-)?house} => :clickhouse,
|
||||
|
||||
%r{\Alib/gitlab/ci/templates} => :ci_template,
|
||||
|
||||
%r{\A((ee|jh)/)?spec/features/} => :test,
|
||||
|
|
|
|||
|
|
@ -7171,10 +7171,10 @@ graphql@^15.7.2:
|
|||
resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.7.2.tgz#85ab0eeb83722977151b3feb4d631b5f2ab287ef"
|
||||
integrity sha512-AnnKk7hFQFmU/2I9YSQf3xw44ctnSFCfp3zE0N6W174gqe9fWG/2rKaKxROK7CcI3XtERpjEKFqts8o319Kf7A==
|
||||
|
||||
gridstack@^9.1.1:
|
||||
version "9.1.1"
|
||||
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-9.1.1.tgz#76695d6eec4ed693e4c53ed410e043c12e19af24"
|
||||
integrity sha512-DgRLROhJ2JPOOzfn+irsLpnkwgUuY3w0fX7HinEjAk5SbvNYBKKEuN89Di22CM2IX2ehNY85OzxxletnBuAnlQ==
|
||||
gridstack@^9.2.0:
|
||||
version "9.2.0"
|
||||
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-9.2.0.tgz#1a415185649a8ebe8a92f9aebea14183f2996d25"
|
||||
integrity sha512-+uWb4p1Za3j6OfvumXzuWQ4EcDh3kZZFLr0vONLPdrtGPJuxb73TjqttEu4igW7iP2Y80kewfYnNT6kQ0blJQQ==
|
||||
|
||||
gzip-size@^6.0.0:
|
||||
version "6.0.0"
|
||||
|
|
|
|||
Loading…
Reference in New Issue