Add latest changes from gitlab-org/gitlab@master
This commit is contained in:
parent
d89147da04
commit
2d560e614a
|
|
@ -1110,3 +1110,12 @@ Database/AvoidUsingPluckWithoutLimit:
|
|||
- 'spec/**/*.rb'
|
||||
- 'ee/spec/**/*.rb'
|
||||
- 'qa/qa/specs/**/*.rb'
|
||||
|
||||
Style/SymbolProc:
|
||||
AllowedMethods:
|
||||
- define_method
|
||||
- mail
|
||||
- respond_to
|
||||
# See https://gitlab.com/gitlab-org/gitlab/-/issues/434151
|
||||
- each_batch
|
||||
- each_sub_batch
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ Style/SymbolProc:
|
|||
- 'app/models/integrations/prometheus.rb'
|
||||
- 'app/models/label_note.rb'
|
||||
- 'app/models/members/project_member.rb'
|
||||
- 'app/models/namespace.rb'
|
||||
- 'app/models/preloaders/merge_request_diff_preloader.rb'
|
||||
- 'app/models/release.rb'
|
||||
- 'app/models/remote_mirror.rb'
|
||||
|
|
@ -45,14 +44,12 @@ Style/SymbolProc:
|
|||
- 'app/services/ci/find_exposed_artifacts_service.rb'
|
||||
- 'app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb'
|
||||
- 'app/services/ci/update_build_state_service.rb'
|
||||
- 'app/services/clusters/agents/delete_expired_events_service.rb'
|
||||
- 'app/services/feature_flags/update_service.rb'
|
||||
- 'app/services/merge_requests/base_service.rb'
|
||||
- 'app/services/notes/destroy_service.rb'
|
||||
- 'app/services/packages/debian/generate_distribution_service.rb'
|
||||
- 'app/services/resource_events/synthetic_label_notes_builder_service.rb'
|
||||
- 'app/services/two_factor/destroy_service.rb'
|
||||
- 'app/workers/bulk_import_worker.rb'
|
||||
- 'app/workers/bulk_imports/stuck_import_worker.rb'
|
||||
- 'app/workers/ci/build_trace_chunk_flush_worker.rb'
|
||||
- 'app/workers/gitlab/import/stuck_import_job.rb'
|
||||
|
|
@ -63,7 +60,6 @@ Style/SymbolProc:
|
|||
- 'config/initializers/doorkeeper_openid_connect.rb'
|
||||
- 'config/initializers/mail_encoding_patch.rb'
|
||||
- 'config/settings.rb'
|
||||
- 'ee/app/helpers/ee/mirror_helper.rb'
|
||||
- 'ee/app/helpers/ee/registrations_helper.rb'
|
||||
- 'ee/app/models/concerns/epic_tree_sorting.rb'
|
||||
- 'ee/app/models/ee/issue.rb'
|
||||
|
|
@ -76,10 +72,8 @@ Style/SymbolProc:
|
|||
- 'ee/app/serializers/integrations/jira_serializers/issue_entity.rb'
|
||||
- 'ee/app/serializers/linked_epic_entity.rb'
|
||||
- 'ee/app/services/analytics/cycle_analytics/data_loader_service.rb'
|
||||
- 'ee/app/services/geo/verification_state_backfill_service.rb'
|
||||
- 'ee/app/services/security/scanned_resources_counting_service.rb'
|
||||
- 'ee/app/services/timebox_report_service.rb'
|
||||
- 'ee/app/services/vulnerabilities/historical_statistics/deletion_service.rb'
|
||||
- 'ee/app/workers/geo/sync_timeout_cron_worker.rb'
|
||||
- 'ee/app/workers/geo/verification_cron_worker.rb'
|
||||
- 'ee/lib/api/entities/pending_member.rb'
|
||||
|
|
@ -107,10 +101,8 @@ Style/SymbolProc:
|
|||
- 'lib/api/entities/issuable_references.rb'
|
||||
- 'lib/api/entities/merge_request_approvals.rb'
|
||||
- 'lib/api/entities/package.rb'
|
||||
- 'lib/api/entities/protected_ref_access.rb'
|
||||
- 'lib/api/go_proxy.rb'
|
||||
- 'lib/api/helpers/internal_helpers.rb'
|
||||
- 'lib/api/package_files.rb'
|
||||
- 'lib/atlassian/jira_connect/serializers/base_entity.rb'
|
||||
- 'lib/bulk_imports/common/pipelines/entity_finisher.rb'
|
||||
- 'lib/bulk_imports/ndjson_pipeline.rb'
|
||||
|
|
@ -118,7 +110,6 @@ Style/SymbolProc:
|
|||
- 'lib/container_registry/gitlab_api_client.rb'
|
||||
- 'lib/gitlab/analytics/cycle_analytics/stage_events.rb'
|
||||
- 'lib/gitlab/auth/o_auth/auth_hash.rb'
|
||||
- 'lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings.rb'
|
||||
- 'lib/gitlab/blob_helper.rb'
|
||||
- 'lib/gitlab/cache/ci/project_pipeline_status.rb'
|
||||
- 'lib/gitlab/checks/changes_access.rb'
|
||||
|
|
@ -127,7 +118,6 @@ Style/SymbolProc:
|
|||
- 'lib/gitlab/config/entry/node.rb'
|
||||
- 'lib/gitlab/database/async_indexes/migration_helpers.rb'
|
||||
- 'lib/gitlab/database/consistency_checker.rb'
|
||||
- 'lib/gitlab/database/migrations/background_migration_helpers.rb'
|
||||
- 'lib/gitlab/database/migrations/instrumentation.rb'
|
||||
- 'lib/gitlab/diff/file_collection/base.rb'
|
||||
- 'lib/gitlab/diff/rendered/notebook/diff_file_helper.rb'
|
||||
|
|
@ -145,18 +135,12 @@ Style/SymbolProc:
|
|||
- 'lib/gitlab/slash_commands/deploy.rb'
|
||||
- 'lib/gitlab/ssh_public_key.rb'
|
||||
- 'lib/gitlab/suggestions/suggestion_set.rb'
|
||||
- 'lib/gitlab/task_helpers.rb'
|
||||
- 'lib/tasks/gitlab/praefect.rake'
|
||||
- 'qa/qa/ee/page/group/settings/general.rb'
|
||||
- 'qa/qa/ee/page/operations_dashboard.rb'
|
||||
- 'qa/qa/page/group/settings/package_registries.rb'
|
||||
- 'qa/qa/page/profile/two_factor_auth.rb'
|
||||
- 'qa/qa/resource/project_snippet.rb'
|
||||
- 'qa/qa/runtime/ip_address.rb'
|
||||
- 'qa/qa/specs/features/browser_ui/3_create/merge_request/rebase_merge_request_spec.rb'
|
||||
- 'qa/qa/specs/features/ee/browser_ui/13_secure/enable_scanning_from_configuration_spec.rb'
|
||||
- 'qa/qa/specs/features/ee/browser_ui/3_create/merge_request/approval_rules_spec.rb'
|
||||
- 'qa/qa/specs/features/ee/browser_ui/3_create/repository/file_locking_spec.rb'
|
||||
- 'qa/qa/specs/features/ee/browser_ui/4_verify/pipeline_subscription_with_group_owned_project_spec.rb'
|
||||
- 'rubocop/cop/gitlab/mark_used_feature_flags.rb'
|
||||
- 'rubocop/cop/gitlab/namespaced_class.rb'
|
||||
|
|
@ -184,8 +168,6 @@ Style/SymbolProc:
|
|||
- 'spec/helpers/instance_configuration_helper_spec.rb'
|
||||
- 'spec/helpers/members_helper_spec.rb'
|
||||
- 'spec/lib/backup/gitaly_backup_spec.rb'
|
||||
- 'spec/lib/gitlab/database/dynamic_model_helpers_spec.rb'
|
||||
- 'spec/lib/gitlab/database/loose_foreign_keys_spec.rb'
|
||||
- 'spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb'
|
||||
- 'spec/lib/gitlab/git/commit_spec.rb'
|
||||
- 'spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb'
|
||||
|
|
@ -193,7 +175,6 @@ Style/SymbolProc:
|
|||
- 'spec/lib/gitlab/graphql/markdown_field_spec.rb'
|
||||
- 'spec/lib/gitlab/instrumentation/redis_spec.rb'
|
||||
- 'spec/lib/gitlab/optimistic_locking_spec.rb'
|
||||
- 'spec/lib/gitlab/pagination/keyset/in_operator_optimization/query_builder_spec.rb'
|
||||
- 'spec/lib/gitlab/quick_actions/dsl_spec.rb'
|
||||
- 'spec/lib/gitlab/relative_positioning/item_context_spec.rb'
|
||||
- 'spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb'
|
||||
|
|
|
|||
2
Gemfile
2
Gemfile
|
|
@ -386,7 +386,7 @@ gem 'snowplow-tracker', '~> 0.8.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
|||
|
||||
# Metrics
|
||||
gem 'webrick', '~> 1.8.1', require: false # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
gem 'prometheus-client-mmap', '~> 1.0', '>= 1.0.1', require: 'prometheus/client' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
gem 'prometheus-client-mmap', '~> 1.0', '>= 1.0.2', require: 'prometheus/client' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
|
||||
gem 'warning', '~> 1.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
|
||||
|
||||
|
|
|
|||
|
|
@ -457,11 +457,11 @@
|
|||
{"name":"prime","version":"0.1.2","platform":"ruby","checksum":"d4e956cadfaf04de036dc7dc74f95bf6a285a62cc509b28b7a66b245d19fe3a4"},
|
||||
{"name":"prism","version":"0.18.0","platform":"ruby","checksum":"bae73ccaed950e830e136be38cdb9461f9f645f8ef306217ff1d66ff83eb589c"},
|
||||
{"name":"proc_to_ast","version":"0.1.0","platform":"ruby","checksum":"92a73fa66e2250a83f8589f818b0751bcf227c68f85916202df7af85082f8691"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.1","platform":"aarch64-linux","checksum":"12eedc9e0915686a00aa65a03e3a36f42c7d4d26803ba7fe5826441a2c3b7471"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.1","platform":"arm64-darwin","checksum":"ff8d4577e761b0dc8b4b706bce1034d587df021a6216a218b015bdb1c0ef87e6"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.1","platform":"ruby","checksum":"d894cc6aa68044d8018252971793763c855234b75dc51e2bc51abc42df0c00c5"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.1","platform":"x86_64-darwin","checksum":"c482d5b00b7dcae95e61971e27133145712f6f3e036f745acff170c20f8ccdf2"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.1","platform":"x86_64-linux","checksum":"612df1eafdb2eefda7d0dd69b8400fe9139dfca74a6678527d42334dbd33b2b4"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.2","platform":"aarch64-linux","checksum":"1cec0954f54e47760f56c4fb9cf98de30e5a80f1a803726239590d008c976847"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.2","platform":"arm64-darwin","checksum":"a9911e1963bbdb170f07af125efa2f1fb38aa6f49b442ac31abd2e13cf3599b4"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.2","platform":"ruby","checksum":"f88ef1d375f24b651970bef567101a53edcedd1f5c21922c0c0b3fbec623def5"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.2","platform":"x86_64-darwin","checksum":"17b6266135394fa187d939ab900263837f8b50240ea4fd7946d6ede825511e00"},
|
||||
{"name":"prometheus-client-mmap","version":"1.0.2","platform":"x86_64-linux","checksum":"f03c746b1afbd583902e249b347297a8065ec0db06dae61da4c9952dcedc65d5"},
|
||||
{"name":"protocol","version":"2.0.0","platform":"ruby","checksum":"dcd7c509e53b8cd6284e965a2e2e71d5291ca9e2d50acfa3d7ee0561c0df16b9"},
|
||||
{"name":"pry","version":"0.14.2","platform":"java","checksum":"fd780670977ba04ff7ee32dabd4d02fe4bf02e977afe8809832d5dca1412862e"},
|
||||
{"name":"pry","version":"0.14.2","platform":"ruby","checksum":"c4fe54efedaca1d351280b45b8849af363184696fcac1c72e0415f9bdac4334d"},
|
||||
|
|
|
|||
|
|
@ -1251,7 +1251,7 @@ GEM
|
|||
coderay
|
||||
parser
|
||||
unparser
|
||||
prometheus-client-mmap (1.0.1)
|
||||
prometheus-client-mmap (1.0.2)
|
||||
rb_sys (~> 0.9)
|
||||
protocol (2.0.0)
|
||||
ruby_parser (~> 3.0)
|
||||
|
|
@ -2006,7 +2006,7 @@ DEPENDENCIES
|
|||
pg_query (~> 4.2.3)
|
||||
png_quantizator (~> 0.2.1)
|
||||
premailer-rails (~> 1.10.3)
|
||||
prometheus-client-mmap (~> 1.0, >= 1.0.1)
|
||||
prometheus-client-mmap (~> 1.0, >= 1.0.2)
|
||||
pry-byebug
|
||||
pry-rails (~> 0.3.9)
|
||||
pry-shell (~> 0.6.4)
|
||||
|
|
|
|||
|
|
@ -2,10 +2,10 @@
|
|||
import { GlLoadingIcon } from '@gitlab/ui';
|
||||
import { s__ } from '~/locale';
|
||||
import {
|
||||
PHASE_RUNNING,
|
||||
PHASE_PENDING,
|
||||
PHASE_SUCCEEDED,
|
||||
PHASE_FAILED,
|
||||
STATUS_RUNNING,
|
||||
STATUS_PENDING,
|
||||
STATUS_SUCCEEDED,
|
||||
STATUS_FAILED,
|
||||
STATUS_LABELS,
|
||||
} from '~/kubernetes_dashboard/constants';
|
||||
import WorkloadStats from '~/kubernetes_dashboard/components/workload_stats.vue';
|
||||
|
|
@ -58,20 +58,20 @@ export default {
|
|||
|
||||
return [
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_RUNNING),
|
||||
title: STATUS_LABELS[PHASE_RUNNING],
|
||||
value: this.countPodsByPhase(STATUS_RUNNING),
|
||||
title: STATUS_LABELS[STATUS_RUNNING],
|
||||
},
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_PENDING),
|
||||
title: STATUS_LABELS[PHASE_PENDING],
|
||||
value: this.countPodsByPhase(STATUS_PENDING),
|
||||
title: STATUS_LABELS[STATUS_PENDING],
|
||||
},
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_SUCCEEDED),
|
||||
title: STATUS_LABELS[PHASE_SUCCEEDED],
|
||||
value: this.countPodsByPhase(STATUS_SUCCEEDED),
|
||||
title: STATUS_LABELS[STATUS_SUCCEEDED],
|
||||
},
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_FAILED),
|
||||
title: STATUS_LABELS[PHASE_FAILED],
|
||||
value: this.countPodsByPhase(STATUS_FAILED),
|
||||
title: STATUS_LABELS[STATUS_FAILED],
|
||||
},
|
||||
];
|
||||
},
|
||||
|
|
@ -83,7 +83,7 @@ export default {
|
|||
countPodsByPhase(phase) {
|
||||
const filteredPods = this.k8sPods.filter((item) => item.status.phase === phase);
|
||||
|
||||
const hasFailedState = Boolean(phase === PHASE_FAILED && filteredPods.length);
|
||||
const hasFailedState = Boolean(phase === STATUS_FAILED && filteredPods.length);
|
||||
this.$emit('update-failed-state', { pods: hasFailedState });
|
||||
|
||||
return filteredPods.length;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import { calculateDeploymentStatus } from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
|
||||
import { PHASE_READY, PHASE_FAILED } from '~/kubernetes_dashboard/constants';
|
||||
import {
|
||||
calculateDeploymentStatus,
|
||||
calculateStatefulSetStatus,
|
||||
} from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
|
||||
import { STATUS_READY, STATUS_FAILED } from '~/kubernetes_dashboard/constants';
|
||||
import { CLUSTER_AGENT_ERROR_MESSAGES } from '../constants';
|
||||
|
||||
export function generateServicePortsString(ports) {
|
||||
|
|
@ -22,10 +25,10 @@ export function getDeploymentsStatuses(items) {
|
|||
const status = calculateDeploymentStatus(item);
|
||||
|
||||
switch (status) {
|
||||
case PHASE_READY:
|
||||
case STATUS_READY:
|
||||
ready.push(item);
|
||||
break;
|
||||
case PHASE_FAILED:
|
||||
case STATUS_FAILED:
|
||||
failed.push(item);
|
||||
break;
|
||||
default:
|
||||
|
|
@ -63,10 +66,10 @@ export function getDaemonSetStatuses(items) {
|
|||
|
||||
export function getStatefulSetStatuses(items) {
|
||||
const failed = items.filter((item) => {
|
||||
return item.status?.readyReplicas < item.spec?.replicas;
|
||||
return calculateStatefulSetStatus(item) === STATUS_FAILED;
|
||||
});
|
||||
const ready = items.filter((item) => {
|
||||
return item.status?.readyReplicas === item.spec?.replicas;
|
||||
return calculateStatefulSetStatus(item) === STATUS_READY;
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -1,25 +1,25 @@
|
|||
import { s__ } from '~/locale';
|
||||
|
||||
export const PHASE_RUNNING = 'Running';
|
||||
export const PHASE_PENDING = 'Pending';
|
||||
export const PHASE_SUCCEEDED = 'Succeeded';
|
||||
export const PHASE_FAILED = 'Failed';
|
||||
export const PHASE_READY = 'Ready';
|
||||
export const STATUS_RUNNING = 'Running';
|
||||
export const STATUS_PENDING = 'Pending';
|
||||
export const STATUS_SUCCEEDED = 'Succeeded';
|
||||
export const STATUS_FAILED = 'Failed';
|
||||
export const STATUS_READY = 'Ready';
|
||||
|
||||
export const STATUS_LABELS = {
|
||||
[PHASE_RUNNING]: s__('KubernetesDashboard|Running'),
|
||||
[PHASE_PENDING]: s__('KubernetesDashboard|Pending'),
|
||||
[PHASE_SUCCEEDED]: s__('KubernetesDashboard|Succeeded'),
|
||||
[PHASE_FAILED]: s__('KubernetesDashboard|Failed'),
|
||||
[PHASE_READY]: s__('KubernetesDashboard|Ready'),
|
||||
[STATUS_RUNNING]: s__('KubernetesDashboard|Running'),
|
||||
[STATUS_PENDING]: s__('KubernetesDashboard|Pending'),
|
||||
[STATUS_SUCCEEDED]: s__('KubernetesDashboard|Succeeded'),
|
||||
[STATUS_FAILED]: s__('KubernetesDashboard|Failed'),
|
||||
[STATUS_READY]: s__('KubernetesDashboard|Ready'),
|
||||
};
|
||||
|
||||
export const WORKLOAD_STATUS_BADGE_VARIANTS = {
|
||||
[PHASE_RUNNING]: 'info',
|
||||
[PHASE_PENDING]: 'warning',
|
||||
[PHASE_SUCCEEDED]: 'success',
|
||||
[PHASE_FAILED]: 'danger',
|
||||
[PHASE_READY]: 'success',
|
||||
[STATUS_RUNNING]: 'info',
|
||||
[STATUS_PENDING]: 'warning',
|
||||
[STATUS_SUCCEEDED]: 'success',
|
||||
[STATUS_FAILED]: 'danger',
|
||||
[STATUS_READY]: 'success',
|
||||
};
|
||||
|
||||
export const PAGE_SIZE = 20;
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import createDefaultClient from '~/lib/graphql';
|
|||
import typeDefs from '~/environments/graphql/typedefs.graphql';
|
||||
import k8sPodsQuery from './queries/k8s_dashboard_pods.query.graphql';
|
||||
import k8sDeploymentsQuery from './queries/k8s_dashboard_deployments.query.graphql';
|
||||
import k8sStatefulSetsQuery from './queries/k8s_dashboard_stateful_sets.query.graphql';
|
||||
import { resolvers } from './resolvers';
|
||||
|
||||
export const apolloProvider = () => {
|
||||
|
|
@ -43,6 +44,25 @@ export const apolloProvider = () => {
|
|||
},
|
||||
});
|
||||
|
||||
cache.writeQuery({
|
||||
query: k8sStatefulSetsQuery,
|
||||
data: {
|
||||
metadata: {
|
||||
name: null,
|
||||
namespace: null,
|
||||
creationTimestamp: null,
|
||||
labels: null,
|
||||
annotations: null,
|
||||
},
|
||||
status: {
|
||||
readyReplicas: null,
|
||||
},
|
||||
spec: {
|
||||
replicas: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return new VueApollo({
|
||||
defaultClient,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -25,6 +25,24 @@ export const mapWorkloadItem = (item) => {
|
|||
return { status: item.status };
|
||||
};
|
||||
|
||||
export const mapSetItem = (item) => {
|
||||
const status = {
|
||||
...item.status,
|
||||
readyReplicas: item.status?.readyReplicas || null,
|
||||
};
|
||||
|
||||
const metadata =
|
||||
{
|
||||
...item.metadata,
|
||||
annotations: item.metadata?.annotations || {},
|
||||
labels: item.metadata?.labels || {},
|
||||
} || null;
|
||||
|
||||
const spec = item.spec || null;
|
||||
|
||||
return { status, metadata, spec };
|
||||
};
|
||||
|
||||
export const watchWorkloadItems = ({
|
||||
client,
|
||||
query,
|
||||
|
|
@ -32,6 +50,7 @@ export const watchWorkloadItems = ({
|
|||
namespace,
|
||||
watchPath,
|
||||
queryField,
|
||||
mapFn = mapWorkloadItem,
|
||||
}) => {
|
||||
const config = new Configuration(configuration);
|
||||
const watcherApi = new WatchApi(config);
|
||||
|
|
@ -42,7 +61,7 @@ export const watchWorkloadItems = ({
|
|||
let result = [];
|
||||
|
||||
watcher.on(EVENT_DATA, (data) => {
|
||||
result = data.map(mapWorkloadItem);
|
||||
result = data.map(mapFn);
|
||||
|
||||
client.writeQuery({
|
||||
query,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
query getK8sDashboardStatefulSets($configuration: LocalConfiguration) {
|
||||
k8sStatefulSets(configuration: $configuration) @client {
|
||||
metadata {
|
||||
name
|
||||
namespace
|
||||
creationTimestamp
|
||||
labels
|
||||
annotations
|
||||
}
|
||||
status {
|
||||
readyReplicas
|
||||
}
|
||||
spec {
|
||||
replicas
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -4,11 +4,13 @@ import {
|
|||
getK8sPods,
|
||||
handleClusterError,
|
||||
mapWorkloadItem,
|
||||
mapSetItem,
|
||||
buildWatchPath,
|
||||
watchWorkloadItems,
|
||||
} from '../helpers/resolver_helpers';
|
||||
import k8sDashboardPodsQuery from '../queries/k8s_dashboard_pods.query.graphql';
|
||||
import k8sDashboardDeploymentsQuery from '../queries/k8s_dashboard_deployments.query.graphql';
|
||||
import k8sDashboardStatefulSetsQuery from '../queries/k8s_dashboard_stateful_sets.query.graphql';
|
||||
|
||||
export default {
|
||||
k8sPods(_, { configuration }, { client }) {
|
||||
|
|
@ -52,4 +54,41 @@ export default {
|
|||
}
|
||||
});
|
||||
},
|
||||
|
||||
k8sStatefulSets(_, { configuration, namespace = '' }, { client }) {
|
||||
const config = new Configuration(configuration);
|
||||
|
||||
const appsV1api = new AppsV1Api(config);
|
||||
const deploymentsApi = namespace
|
||||
? appsV1api.listAppsV1NamespacedStatefulSet({ namespace })
|
||||
: appsV1api.listAppsV1StatefulSetForAllNamespaces();
|
||||
return deploymentsApi
|
||||
.then((res) => {
|
||||
const watchPath = buildWatchPath({
|
||||
resource: 'statefulsets',
|
||||
api: 'apis/apps/v1',
|
||||
namespace,
|
||||
});
|
||||
watchWorkloadItems({
|
||||
client,
|
||||
query: k8sDashboardStatefulSetsQuery,
|
||||
configuration,
|
||||
namespace,
|
||||
watchPath,
|
||||
queryField: 'k8sStatefulSets',
|
||||
mapFn: mapSetItem,
|
||||
});
|
||||
|
||||
const data = res?.items || [];
|
||||
|
||||
return data.map(mapSetItem);
|
||||
})
|
||||
.catch(async (err) => {
|
||||
try {
|
||||
await handleClusterError(err);
|
||||
} catch (error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,11 @@
|
|||
import { differenceInSeconds } from '~/lib/utils/datetime_utility';
|
||||
import { STATUS_TRUE, STATUS_FALSE, PHASE_PENDING, PHASE_READY, PHASE_FAILED } from '../constants';
|
||||
import {
|
||||
STATUS_TRUE,
|
||||
STATUS_FALSE,
|
||||
STATUS_PENDING,
|
||||
STATUS_READY,
|
||||
STATUS_FAILED,
|
||||
} from '../constants';
|
||||
|
||||
export function getAge(creationTimestamp) {
|
||||
if (!creationTimestamp) return '';
|
||||
|
|
@ -28,10 +34,17 @@ export function getAge(creationTimestamp) {
|
|||
export function calculateDeploymentStatus(item) {
|
||||
const [available, progressing] = item.status?.conditions ?? [];
|
||||
if (available?.status === STATUS_TRUE) {
|
||||
return PHASE_READY;
|
||||
return STATUS_READY;
|
||||
}
|
||||
if (available?.status === STATUS_FALSE && progressing?.status !== STATUS_TRUE) {
|
||||
return PHASE_FAILED;
|
||||
return STATUS_FAILED;
|
||||
}
|
||||
return PHASE_PENDING;
|
||||
return STATUS_PENDING;
|
||||
}
|
||||
|
||||
export function calculateStatefulSetStatus(item) {
|
||||
if (item.status?.readyReplicas === item.spec?.replicas) {
|
||||
return STATUS_READY;
|
||||
}
|
||||
return STATUS_FAILED;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { s__ } from '~/locale';
|
|||
import { getAge, calculateDeploymentStatus } from '../helpers/k8s_integration_helper';
|
||||
import WorkloadLayout from '../components/workload_layout.vue';
|
||||
import k8sDeploymentsQuery from '../graphql/queries/k8s_dashboard_deployments.query.graphql';
|
||||
import { PHASE_FAILED, PHASE_READY, PHASE_PENDING, STATUS_LABELS } from '../constants';
|
||||
import { STATUS_FAILED, STATUS_READY, STATUS_PENDING, STATUS_LABELS } from '../constants';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
|
|
@ -48,16 +48,16 @@ export default {
|
|||
deploymentsStats() {
|
||||
return [
|
||||
{
|
||||
value: this.countDeploymentsByStatus(PHASE_READY),
|
||||
title: STATUS_LABELS[PHASE_READY],
|
||||
value: this.countDeploymentsByStatus(STATUS_READY),
|
||||
title: STATUS_LABELS[STATUS_READY],
|
||||
},
|
||||
{
|
||||
value: this.countDeploymentsByStatus(PHASE_FAILED),
|
||||
title: STATUS_LABELS[PHASE_FAILED],
|
||||
value: this.countDeploymentsByStatus(STATUS_FAILED),
|
||||
title: STATUS_LABELS[STATUS_FAILED],
|
||||
},
|
||||
{
|
||||
value: this.countDeploymentsByStatus(PHASE_PENDING),
|
||||
title: STATUS_LABELS[PHASE_PENDING],
|
||||
value: this.countDeploymentsByStatus(STATUS_PENDING),
|
||||
title: STATUS_LABELS[STATUS_PENDING],
|
||||
},
|
||||
];
|
||||
},
|
||||
|
|
|
|||
|
|
@ -4,10 +4,10 @@ import { getAge } from '../helpers/k8s_integration_helper';
|
|||
import WorkloadLayout from '../components/workload_layout.vue';
|
||||
import k8sPodsQuery from '../graphql/queries/k8s_dashboard_pods.query.graphql';
|
||||
import {
|
||||
PHASE_RUNNING,
|
||||
PHASE_PENDING,
|
||||
PHASE_SUCCEEDED,
|
||||
PHASE_FAILED,
|
||||
STATUS_RUNNING,
|
||||
STATUS_PENDING,
|
||||
STATUS_SUCCEEDED,
|
||||
STATUS_FAILED,
|
||||
STATUS_LABELS,
|
||||
} from '../constants';
|
||||
|
||||
|
|
@ -54,20 +54,20 @@ export default {
|
|||
podStats() {
|
||||
return [
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_RUNNING),
|
||||
title: STATUS_LABELS[PHASE_RUNNING],
|
||||
value: this.countPodsByPhase(STATUS_RUNNING),
|
||||
title: STATUS_LABELS[STATUS_RUNNING],
|
||||
},
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_PENDING),
|
||||
title: STATUS_LABELS[PHASE_PENDING],
|
||||
value: this.countPodsByPhase(STATUS_PENDING),
|
||||
title: STATUS_LABELS[STATUS_PENDING],
|
||||
},
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_SUCCEEDED),
|
||||
title: STATUS_LABELS[PHASE_SUCCEEDED],
|
||||
value: this.countPodsByPhase(STATUS_SUCCEEDED),
|
||||
title: STATUS_LABELS[STATUS_SUCCEEDED],
|
||||
},
|
||||
{
|
||||
value: this.countPodsByPhase(PHASE_FAILED),
|
||||
title: STATUS_LABELS[PHASE_FAILED],
|
||||
value: this.countPodsByPhase(STATUS_FAILED),
|
||||
title: STATUS_LABELS[STATUS_FAILED],
|
||||
},
|
||||
];
|
||||
},
|
||||
|
|
|
|||
|
|
@ -0,0 +1,81 @@
|
|||
<script>
|
||||
import { s__ } from '~/locale';
|
||||
import { getAge, calculateStatefulSetStatus } from '../helpers/k8s_integration_helper';
|
||||
import WorkloadLayout from '../components/workload_layout.vue';
|
||||
import k8sStatefulSetsQuery from '../graphql/queries/k8s_dashboard_stateful_sets.query.graphql';
|
||||
import { STATUS_FAILED, STATUS_READY, STATUS_LABELS } from '../constants';
|
||||
|
||||
export default {
|
||||
components: {
|
||||
WorkloadLayout,
|
||||
},
|
||||
inject: ['configuration'],
|
||||
apollo: {
|
||||
k8sStatefulSets: {
|
||||
query: k8sStatefulSetsQuery,
|
||||
variables() {
|
||||
return {
|
||||
configuration: this.configuration,
|
||||
};
|
||||
},
|
||||
update(data) {
|
||||
return (
|
||||
data?.k8sStatefulSets?.map((statefulSet) => {
|
||||
return {
|
||||
name: statefulSet.metadata?.name,
|
||||
namespace: statefulSet.metadata?.namespace,
|
||||
status: calculateStatefulSetStatus(statefulSet),
|
||||
age: getAge(statefulSet.metadata?.creationTimestamp),
|
||||
labels: statefulSet.metadata?.labels,
|
||||
annotations: statefulSet.metadata?.annotations,
|
||||
kind: s__('KubernetesDashboard|StatefulSet'),
|
||||
};
|
||||
}) || []
|
||||
);
|
||||
},
|
||||
error(err) {
|
||||
this.errorMessage = err?.message;
|
||||
},
|
||||
},
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
k8sStatefulSets: [],
|
||||
errorMessage: '',
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
statefulSetsStats() {
|
||||
return [
|
||||
{
|
||||
value: this.countStatefulSetsByStatus(STATUS_READY),
|
||||
title: STATUS_LABELS[STATUS_READY],
|
||||
},
|
||||
{
|
||||
value: this.countStatefulSetsByStatus(STATUS_FAILED),
|
||||
title: STATUS_LABELS[STATUS_FAILED],
|
||||
},
|
||||
];
|
||||
},
|
||||
loading() {
|
||||
return this.$apollo.queries.k8sStatefulSets.loading;
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
countStatefulSetsByStatus(phase) {
|
||||
const filteredStatefulSets =
|
||||
this.k8sStatefulSets.filter((item) => item.status === phase) || [];
|
||||
|
||||
return filteredStatefulSets.length;
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
<template>
|
||||
<workload-layout
|
||||
:loading="loading"
|
||||
:error-message="errorMessage"
|
||||
:stats="statefulSetsStats"
|
||||
:items="k8sStatefulSets"
|
||||
/>
|
||||
</template>
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
export const PODS_ROUTE_NAME = 'pods';
|
||||
export const DEPLOYMENTS_ROUTE_NAME = 'deployments';
|
||||
export const STATEFUL_SETS_ROUTE_NAME = 'statefulSets';
|
||||
|
||||
export const PODS_ROUTE_PATH = '/pods';
|
||||
export const DEPLOYMENTS_ROUTE_PATH = '/deployments';
|
||||
export const STATEFUL_SETS_ROUTE_PATH = '/statefulsets';
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
import { s__ } from '~/locale';
|
||||
import PodsPage from '../pages/pods_page.vue';
|
||||
import DeploymentsPage from '../pages/deployments_page.vue';
|
||||
import StatefulSetsPage from '../pages/stateful_sets_page.vue';
|
||||
import {
|
||||
PODS_ROUTE_NAME,
|
||||
PODS_ROUTE_PATH,
|
||||
DEPLOYMENTS_ROUTE_NAME,
|
||||
DEPLOYMENTS_ROUTE_PATH,
|
||||
STATEFUL_SETS_ROUTE_NAME,
|
||||
STATEFUL_SETS_ROUTE_PATH,
|
||||
} from './constants';
|
||||
|
||||
export default [
|
||||
|
|
@ -25,4 +28,12 @@ export default [
|
|||
title: s__('KubernetesDashboard|Deployments'),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: STATEFUL_SETS_ROUTE_NAME,
|
||||
path: STATEFUL_SETS_ROUTE_PATH,
|
||||
component: StatefulSetsPage,
|
||||
meta: {
|
||||
title: s__('KubernetesDashboard|StatefulSets'),
|
||||
},
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -293,7 +293,7 @@ export default {
|
|||
:multiple-approval-rules-available="mr.multipleApprovalRulesAvailable"
|
||||
/>
|
||||
</div>
|
||||
<div v-if="hasInvalidRules" class="gl-text-gray-400 gl-mt-2" data-testid="invalid-rules">
|
||||
<div v-if="hasInvalidRules" class="gl-text-secondary gl-mt-2" data-testid="invalid-rules">
|
||||
<gl-sprintf :message="pluralizedRuleText">
|
||||
<template #danger="{ content }">
|
||||
<span class="gl-font-weight-bold text-danger">{{ content }}</span>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ module Pajamas
|
|||
class BannerComponent < Pajamas::Component
|
||||
# @param [String] button_text
|
||||
# @param [String] button_link
|
||||
# @param [Boolean] embedded
|
||||
# @param [Symbol] variant
|
||||
# @param [String] svg_path
|
||||
# @param [Hash] banner_options
|
||||
|
|
@ -13,7 +12,6 @@ module Pajamas
|
|||
def initialize(
|
||||
button_text: 'OK',
|
||||
button_link: '#',
|
||||
embedded: false,
|
||||
variant: :promotion,
|
||||
svg_path: nil,
|
||||
banner_options: {},
|
||||
|
|
@ -22,7 +20,6 @@ module Pajamas
|
|||
)
|
||||
@button_text = button_text
|
||||
@button_link = button_link
|
||||
@embedded = embedded
|
||||
@variant = filter_attribute(variant.to_sym, VARIANT_OPTIONS, default: :promotion)
|
||||
@svg_path = svg_path.to_s
|
||||
@banner_options = banner_options
|
||||
|
|
@ -38,7 +35,6 @@ module Pajamas
|
|||
classes = []
|
||||
classes.push('gl-bg-gray-10!') unless introduction?
|
||||
classes.push('gl-banner-introduction') if introduction?
|
||||
classes.push('gl-border-none!') if @embedded
|
||||
classes.join(' ')
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ module DesignManagement
|
|||
attr_reader :issue, :current_user, :params
|
||||
|
||||
def init_collection
|
||||
return ::DesignManagement::Design.none unless can?(current_user, :read_design, issue)
|
||||
return DesignManagement::Design.none unless can?(current_user, :read_design, issue)
|
||||
|
||||
issue.designs
|
||||
end
|
||||
|
|
@ -43,14 +43,14 @@ module DesignManagement
|
|||
|
||||
def by_filename(items)
|
||||
return items if params[:filenames].nil?
|
||||
return ::DesignManagement::Design.none if params[:filenames].empty?
|
||||
return DesignManagement::Design.none if params[:filenames].empty?
|
||||
|
||||
items.with_filename(params[:filenames])
|
||||
end
|
||||
|
||||
def by_id(items)
|
||||
return items if params[:ids].nil?
|
||||
return ::DesignManagement::Design.none if params[:ids].empty?
|
||||
return DesignManagement::Design.none if params[:ids].empty?
|
||||
|
||||
items.id_in(params[:ids])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ module DesignManagement
|
|||
|
||||
def execute
|
||||
unless Ability.allowed?(current_user, :read_design, design_or_collection)
|
||||
return ::DesignManagement::Version.none
|
||||
return DesignManagement::Version.none
|
||||
end
|
||||
|
||||
items = design_or_collection.versions
|
||||
|
|
|
|||
|
|
@ -82,9 +82,9 @@ module DesignManagement
|
|||
# As a query, we ascertain this by finding the last event prior to
|
||||
# (or equal to) the cut-off, and seeing whether that version was a deletion.
|
||||
scope :visible_at_version, -> (version) do
|
||||
deletion = ::DesignManagement::Action.events[:deletion]
|
||||
deletion = DesignManagement::Action.events[:deletion]
|
||||
designs = arel_table
|
||||
actions = ::DesignManagement::Action
|
||||
actions = DesignManagement::Action
|
||||
.most_recent.up_to_version(version)
|
||||
.arel.as('most_recent_actions')
|
||||
|
||||
|
|
@ -253,7 +253,7 @@ module DesignManagement
|
|||
|
||||
def user_notes_count_service
|
||||
strong_memoize(:user_notes_count_service) do
|
||||
::DesignManagement::DesignUserNotesCountService.new(self) # rubocop: disable CodeReuse/ServiceClass
|
||||
DesignManagement::DesignUserNotesCountService.new(self) # rubocop: disable CodeReuse/ServiceClass
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -53,11 +53,11 @@ module DesignManagement
|
|||
design_ids = pairs.map(&:first).uniq
|
||||
version_ids = pairs.map(&:second).uniq
|
||||
|
||||
designs = ::DesignManagement::Design
|
||||
designs = DesignManagement::Design
|
||||
.where(id: design_ids)
|
||||
.index_by(&:id)
|
||||
|
||||
versions = ::DesignManagement::Version
|
||||
versions = DesignManagement::Version
|
||||
.where(id: version_ids)
|
||||
.index_by(&:id)
|
||||
|
||||
|
|
@ -93,7 +93,7 @@ module DesignManagement
|
|||
|
||||
def action
|
||||
strong_memoize(:most_recent_action) do
|
||||
::DesignManagement::Action
|
||||
DesignManagement::Action
|
||||
.most_recent.up_to_version(version)
|
||||
.find_by(design: design)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module DesignManagement
|
|||
delegate :lfs_enabled?, :storage, :repository_storage, :run_after_commit, to: :project
|
||||
|
||||
def repository
|
||||
::DesignManagement::GitRepository.new(
|
||||
DesignManagement::GitRepository.new(
|
||||
full_path,
|
||||
self,
|
||||
shard: repository_storage,
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ module DesignManagement
|
|||
delegate :project, to: :issue
|
||||
|
||||
scope :for_designs, -> (designs) do
|
||||
where(id: ::DesignManagement::Action.where(design_id: designs).select(:version_id)).distinct
|
||||
where(id: DesignManagement::Action.where(design_id: designs).select(:version_id)).distinct
|
||||
end
|
||||
scope :earlier_or_equal_to, -> (version) { where("(#{table_name}.id) <= ?", version) } # rubocop:disable GitlabSecurity/SqlInjection
|
||||
scope :ordered, -> { order(id: :desc) }
|
||||
|
|
@ -88,7 +88,7 @@ module DesignManagement
|
|||
|
||||
rows = design_actions.map { |action| action.row_attrs(version) }
|
||||
|
||||
ApplicationRecord.legacy_bulk_insert(::DesignManagement::Action.table_name, rows) # rubocop:disable Gitlab/BulkInsert
|
||||
ApplicationRecord.legacy_bulk_insert(DesignManagement::Action.table_name, rows) # rubocop:disable Gitlab/BulkInsert
|
||||
version.designs.reset
|
||||
version.validate!
|
||||
design_actions.each(&:performed)
|
||||
|
|
|
|||
|
|
@ -639,7 +639,7 @@ class Issue < ApplicationRecord
|
|||
end
|
||||
|
||||
def design_collection
|
||||
@design_collection ||= ::DesignManagement::DesignCollection.new(self)
|
||||
@design_collection ||= DesignManagement::DesignCollection.new(self)
|
||||
end
|
||||
|
||||
def from_service_desk?
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ class ProjectExportJob < ApplicationRecord
|
|||
|
||||
class << self
|
||||
def prune_expired_jobs
|
||||
prunable.each_batch do |relation| # rubocop:disable Style/SymbolProc
|
||||
prunable.each_batch do |relation|
|
||||
relation.delete_all
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -172,7 +172,7 @@ module DesignManagement
|
|||
def copy_designs!
|
||||
design_attributes = attributes_config[:design_attributes]
|
||||
|
||||
::DesignManagement::Design.with_project_iid_supply(target_project) do |supply|
|
||||
DesignManagement::Design.with_project_iid_supply(target_project) do |supply|
|
||||
new_rows = designs.each_with_index.map do |design, i|
|
||||
design.attributes.slice(*design_attributes).merge(
|
||||
issue_id: target_issue.id,
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ module DesignManagement
|
|||
actions: actions.map(&:gitaly_action)
|
||||
)
|
||||
|
||||
::DesignManagement::Version
|
||||
DesignManagement::Version
|
||||
.create_for_designs(actions, sha, current_user)
|
||||
.tap { |version| post_process(version, skip_system_notes) }
|
||||
end
|
||||
|
|
@ -31,7 +31,7 @@ module DesignManagement
|
|||
|
||||
def post_process(version, skip_system_notes)
|
||||
version.run_after_commit_or_now do
|
||||
::DesignManagement::NewVersionWorker.perform_async(id, skip_system_notes)
|
||||
DesignManagement::NewVersionWorker.perform_async(id, skip_system_notes)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ module DesignManagement
|
|||
attr_reader :files
|
||||
|
||||
def upload_designs!
|
||||
::DesignManagement::Version.with_lock(project.id, repository) do
|
||||
DesignManagement::Version.with_lock(project.id, repository) do
|
||||
actions = build_actions
|
||||
|
||||
[
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ module Users
|
|||
# Load the records. Groups are unavailable after membership is destroyed.
|
||||
solo_owned_groups = user.solo_owned_groups.load
|
||||
|
||||
user.members.each_batch { |batch| batch.destroy_all } # rubocop:disable Style/SymbolProc, Cop/DestroyAll
|
||||
user.members.each_batch { |batch| batch.destroy_all } # rubocop:disable Cop/DestroyAll
|
||||
|
||||
solo_owned_groups.each do |group|
|
||||
Groups::DestroyService.new(group, current_user).execute
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ module ContainerRegistry
|
|||
# Deleting stale ongoing repair details would put the project back to the analysis pool
|
||||
ContainerRegistry::DataRepairDetail
|
||||
.ongoing_since(STALE_REPAIR_DETAIL_THRESHOLD.ago)
|
||||
.each_batch(of: BATCH_SIZE) do |batch| # rubocop:disable Style/SymbolProc
|
||||
.each_batch(of: BATCH_SIZE) do |batch|
|
||||
batch.delete_all
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ module Pages
|
|||
PagesDeployment
|
||||
.versioned
|
||||
.ci_build_id_in(build_ids)
|
||||
.each_batch do |batch| # rubocop: disable Style/SymbolProc -- deactivate does not accept the index argument
|
||||
.each_batch do |batch|
|
||||
batch.deactivate
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ module Pages
|
|||
feature_category :pages
|
||||
|
||||
def perform
|
||||
PagesDeployment.deactivated.each_batch do |deployments| # rubocop: disable Style/SymbolProc
|
||||
PagesDeployment.deactivated.each_batch do |deployments|
|
||||
deployments.each { |deployment| deployment.file.remove! }
|
||||
deployments.delete_all
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8136,7 +8136,7 @@ Input type: `VulnerabilitiesRemoveAllFromProjectInput`
|
|||
| Name | Type | Description |
|
||||
| ---- | ---- | ----------- |
|
||||
| <a id="mutationvulnerabilitiesremoveallfromprojectclientmutationid"></a>`clientMutationId` | [`String`](#string) | A unique identifier for the client performing the mutation. |
|
||||
| <a id="mutationvulnerabilitiesremoveallfromprojectprojectids"></a>`projectIds` | [`[ProjectID!]!`](#projectid) | IDs of project for which all Vulnerabilities should be removed. The deletion will happen in the background so the changes will not be visible immediately. Does not work if `enable_remove_all_vulnerabilties_from_project_mutation` feature flag is disabled. |
|
||||
| <a id="mutationvulnerabilitiesremoveallfromprojectprojectids"></a>`projectIds` | [`[ProjectID!]!`](#projectid) | IDs of project for which all Vulnerabilities should be removed. The deletion will happen in the background so the changes will not be visible immediately. |
|
||||
|
||||
#### Fields
|
||||
|
||||
|
|
|
|||
|
|
@ -16,10 +16,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
> - [Feature flag `admin_merge_request` removed](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132578) in GitLab 16.5.
|
||||
> - [Admin group members introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/131914) in GitLab 16.5 [with a flag](../administration/feature_flags.md) named `admin_group_member`. Disabled by default. The feature flag has been removed in GitLab 16.6.
|
||||
> - [Manage project access tokens introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/132342) in GitLab 16.5 in [with a flag](../administration/feature_flags.md) named `manage_project_access_tokens`. Disabled by default.
|
||||
> - [Archive project introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134998) in GitLab 16.6 in [with a flag](../administration/feature_flags.md) named `archive_project`. Disabled by default.
|
||||
> - [Archive project introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/134998) in GitLab 16.7.
|
||||
|
||||
FLAG:
|
||||
On self-managed GitLab, by default these features are not available. To make them available, an administrator can [enable the feature flags](../administration/feature_flags.md) named `admin_group_member`, `manage_project_access_tokens` and `archive_project`.
|
||||
On self-managed GitLab, by default these features are not available. To make them available, an administrator can [enable the feature flags](../administration/feature_flags.md) named `admin_group_member` and `manage_project_access_tokens`.
|
||||
On GitLab.com, these features are not available.
|
||||
|
||||
## List all member roles of a group
|
||||
|
|
|
|||
|
|
@ -0,0 +1,84 @@
|
|||
---
|
||||
stage: none
|
||||
group: unassigned
|
||||
info: Any user with at least the Maintainer role can merge updates to this content. For details, see https://docs.gitlab.com/ee/development/development_processes.html#development-guidelines-review.
|
||||
---
|
||||
|
||||
# Bitbucket Server importer developer documentation
|
||||
|
||||
## Prerequisites
|
||||
|
||||
To test imports, you need a Bitbucket Server instance running locally. For information on running a local instance, see
|
||||
[these instructions](https://gitlab.com/gitlab-org/manage/import-and-integrate/team/-/blob/main/integrations/bitbucket_server.md).
|
||||
|
||||
## Code structure
|
||||
|
||||
The importer's codebase is broken up into the following directories:
|
||||
|
||||
- `lib/gitlab/bitbucket_server_import`: this directory contains most of the code such as
|
||||
the classes used for importing resources.
|
||||
- `app/workers/gitlab/bitbucket_server_import`: this directory contains the Sidekiq
|
||||
workers.
|
||||
|
||||
## How imports advance
|
||||
|
||||
When a Bitbucket Server project is imported, work is divided into separate stages, with
|
||||
each stage consisting of a set of Sidekiq jobs that are executed.
|
||||
|
||||
Between every stage, a job called `Gitlab::BitbucketServerImport::AdvanceStageWorker`
|
||||
is scheduled that periodically checks if all work of the current stage is completed. If
|
||||
all the work is complete, the job advances the import process to the next stage.
|
||||
|
||||
## Stages
|
||||
|
||||
### 1. Stage::ImportRepositoryWorker
|
||||
|
||||
This worker imports the repository and schedules the next stage when
|
||||
done.
|
||||
|
||||
### 2. Stage::ImportPullRequestsWorker
|
||||
|
||||
This worker imports all pull requests. For every pull request, a job for the
|
||||
`Gitlab::BitbucketImport::ImportPullRequestWorker` worker is scheduled.
|
||||
|
||||
Bitbucket Server keeps tracks of references for open pull requests in
|
||||
`refs/heads/pull-requests`, but closed and merged requests get moved
|
||||
into hidden internal refs under `stash-refs/pull-requests`.
|
||||
|
||||
As a result, they are not fetched by default. To prevent merge requests from not having
|
||||
commits and therefore having empty diffs, we fetch affected source and target
|
||||
commits from the server before importing the pull request.
|
||||
We save the fetched commits as refs so that Git doesn't remove them, which can happen
|
||||
if they are unused.
|
||||
Source commits are saved as `#{commit}:refs/merge-requests/#{pull_request.iid}/head`
|
||||
and target commits are saved as `#{commit}:refs/keep-around/#{commit}`.
|
||||
|
||||
When creating a pull request, we need to match Bitbucket users with GitLab users for
|
||||
the author and reviewers. Whenever a matching user is found, the GitLab user ID is cached
|
||||
for 24 hours so that it doesn't have to be searched for again.
|
||||
|
||||
### 3. Stage::ImportNotesWorker
|
||||
|
||||
This worker imports notes (comments) for all merge requests.
|
||||
|
||||
For every merge request, a job for the `Gitlab::BitbucketServerImport::ImportPullRequestNotesWorker`
|
||||
worker is scheduled which imports all standalone comments, inline comments, merge events, and
|
||||
approved events for the merge request.
|
||||
|
||||
### 4. Stage::ImportLfsObjectsWorker
|
||||
|
||||
Imports LFS objects from the source project by scheduling a
|
||||
`Gitlab::BitbucketServerImport::ImportLfsObjectsWorker` job for every LFS object.
|
||||
|
||||
### 5. Stage::FinishImportWorker
|
||||
|
||||
This worker completes the import process by performing some housekeeping
|
||||
such as marking the import as completed.
|
||||
|
||||
## Pull request mentions
|
||||
|
||||
Pull request descriptions and notes can contain @mentions to users. If a user with the
|
||||
same email does not exist on GitLab, this can lead to incorrect users being tagged.
|
||||
|
||||
To get around this, we build a cache containing all users who have access to the Bitbucket
|
||||
project and then convert mentions in pull request descriptions and notes.
|
||||
|
|
@ -222,8 +222,8 @@ Try to avoid **below** when referring to an example or table in a documentation
|
|||
|
||||
Use uppercase for **Beta**. For example: **The XYZ feature is in Beta.** or **This Beta release is ready to test.**
|
||||
|
||||
You might also want to link to [this section](../../../policy/experiment-beta-support.md#beta)
|
||||
in the handbook when writing about Beta features.
|
||||
You might also want to link to [this topic](../../../policy/experiment-beta-support.md#beta)
|
||||
when writing about Beta features.
|
||||
|
||||
## blacklist
|
||||
|
||||
|
|
@ -626,8 +626,8 @@ Use **expand** instead of **open** when you are talking about expanding or colla
|
|||
|
||||
Use uppercase for **Experiment**. For example: **The XYZ feature is an Experiment.** or **This Experiment is ready to test.**
|
||||
|
||||
You might also want to link to [this section](../../../policy/experiment-beta-support.md#experiment)
|
||||
in the handbook when writing about Experiment features.
|
||||
You might also want to link to [this topic](../../../policy/experiment-beta-support.md#experiment)
|
||||
when writing about Experiment features.
|
||||
|
||||
## export
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
|
|||
> - Ability to create and remove a custom role with the UI [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/393235) in GitLab 16.4.
|
||||
> - Ability to manage group members [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/17364) in GitLab 16.5.
|
||||
> - Ability to manage project access tokens [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/421778) in GitLab 16.5 [with a flag](../administration/feature_flags.md) named `manage_project_access_tokens`.
|
||||
> - Ability to archive projects [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/425957) in GitLab 16.6 in [with a flag](../administration/feature_flags.md) named `archive_project`. Disabled by default.
|
||||
> - Ability to archive projects [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/425957) in GitLab 16.7.
|
||||
> - Ability to use the UI to add a user to your group with a custom role, change a user's custom role, or remove a custom role from a group member [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/393239) in GitLab 16.7.
|
||||
|
||||
Custom roles allow group Owners or instance administrators to create roles
|
||||
|
|
|
|||
|
|
@ -246,7 +246,7 @@ go to `https://gitlab.example.com/projects/<id>`.
|
|||
To copy the project ID:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. On the project overview page, in the upper-right corner, select **Actions** (**{ellipsis_v})**.
|
||||
1. On the project overview page, in the upper-right corner, select **Actions** (**{ellipsis_v}**).
|
||||
1. Select **Copy project ID**.
|
||||
|
||||
For example, if in your personal namespace `alex` you have a project `my-project` with the ID `123456`, you can access the project
|
||||
|
|
@ -288,7 +288,7 @@ Prerequisites:
|
|||
To leave a project:
|
||||
|
||||
1. On the left sidebar, select **Search or go to** and find your project.
|
||||
1. On the project overview page, in the upper-right corner, select **Actions** (**{ellipsis_v})**.
|
||||
1. On the project overview page, in the upper-right corner, select **Actions** (**{ellipsis_v}**).
|
||||
1. Select **Leave project**, then **Leave project** again..
|
||||
|
||||
## Add a compliance framework to a project **(PREMIUM)**
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ module Gitlab
|
|||
|
||||
attr_reader :storage, :gl_repository, :gl_project_path, :container
|
||||
|
||||
delegate :list_all_blobs, to: :gitaly_blob_client
|
||||
delegate :list_all_blobs, :list_blobs, to: :gitaly_blob_client
|
||||
|
||||
# This remote name has to be stable for all types of repositories that
|
||||
# can join an object pool. If it's structure ever changes, a migration
|
||||
|
|
|
|||
|
|
@ -27917,6 +27917,12 @@ msgstr ""
|
|||
msgid "KubernetesDashboard|Running"
|
||||
msgstr ""
|
||||
|
||||
msgid "KubernetesDashboard|StatefulSet"
|
||||
msgstr ""
|
||||
|
||||
msgid "KubernetesDashboard|StatefulSets"
|
||||
msgstr ""
|
||||
|
||||
msgid "KubernetesDashboard|Status"
|
||||
msgstr ""
|
||||
|
||||
|
|
|
|||
|
|
@ -9,4 +9,4 @@ test_package:
|
|||
- "CONAN_LOGIN_USERNAME=ci_user CONAN_PASSWORD=${CI_JOB_TOKEN} conan upload <%= package.name %>/0.1@mycompany/stable --all --remote=gitlab"
|
||||
- conan install <%= package.name %>/0.1@mycompany/stable --remote=gitlab
|
||||
tags:
|
||||
- runner-for-<%= project.name %>
|
||||
- runner-for-<%= project.name %>
|
||||
|
|
|
|||
|
|
@ -91,6 +91,7 @@ module QA
|
|||
args << '--docker-privileged=true'
|
||||
args << "--docker-network-mode=#{network}"
|
||||
args << "--docker-volumes=/certs/client"
|
||||
args << "--docker-extra-hosts=gdk.test:#{gdk_host_ip}" if gdk_network
|
||||
end
|
||||
|
||||
<<~CMD.strip
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ module QA
|
|||
package.remove_via_api!
|
||||
end
|
||||
|
||||
it 'publishes, installs, and deletes a Conan package', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348014' do
|
||||
it 'publishes, installs, and deletes a Conan package', :reliable,
|
||||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348014' do
|
||||
Flow::Login.sign_in
|
||||
|
||||
Support::Retrier.retry_on_exception(max_attempts: 3, sleep_interval: 2) do
|
||||
|
|
@ -48,7 +49,7 @@ module QA
|
|||
end
|
||||
|
||||
Page::Project::Job::Show.perform do |job|
|
||||
expect(job).to be_successful(timeout: 800)
|
||||
expect(job).to be_successful(timeout: 180)
|
||||
end
|
||||
|
||||
Page::Project::Menu.perform(&:go_to_package_registry)
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ module QA
|
|||
end
|
||||
|
||||
Page::Project::Job::Show.perform do |job|
|
||||
expect(job).to be_successful(timeout: 800)
|
||||
expect(job).to be_successful(timeout: 180)
|
||||
|
||||
job.go_to_pipeline
|
||||
end
|
||||
|
|
@ -52,7 +52,7 @@ module QA
|
|||
end
|
||||
|
||||
Page::Project::Job::Show.perform do |job|
|
||||
expect(job).to be_successful(timeout: 800)
|
||||
expect(job).to be_successful(timeout: 180)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -61,7 +61,8 @@ module QA
|
|||
package.remove_via_api!
|
||||
end
|
||||
|
||||
it 'uploads a generic package and downloads it', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348017' do
|
||||
it 'uploads a generic package and downloads it', :reliable,
|
||||
testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/348017' do
|
||||
Page::Project::Menu.perform(&:go_to_package_registry)
|
||||
|
||||
Page::Project::Packages::Index.perform do |index|
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module QA
|
||||
RSpec.describe 'Package', :object_storage, product_group: :package_registry do
|
||||
RSpec.describe 'Package', :object_storage, :reliable, product_group: :package_registry do
|
||||
describe 'Helm Registry', :external_api_calls do
|
||||
using RSpec::Parameterized::TableSyntax
|
||||
include Runtime::Fixtures
|
||||
|
|
@ -65,7 +65,7 @@ module QA
|
|||
end
|
||||
|
||||
Page::Project::Job::Show.perform do |job|
|
||||
expect(job).to be_successful(timeout: 800)
|
||||
expect(job).to be_successful(timeout: 180)
|
||||
end
|
||||
|
||||
Page::Project::Menu.perform(&:go_to_package_registry)
|
||||
|
|
@ -97,7 +97,7 @@ module QA
|
|||
end
|
||||
|
||||
Page::Project::Job::Show.perform do |job|
|
||||
expect(job).to be_successful(timeout: 800)
|
||||
expect(job).to be_successful(timeout: 180)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -62,22 +62,6 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
|
|||
end
|
||||
end
|
||||
|
||||
describe 'embedded' do
|
||||
context 'by default (false)' do
|
||||
it 'keeps the banner\'s borders' do
|
||||
expect(page).not_to have_css ".gl-banner.gl-border-none\\!"
|
||||
end
|
||||
end
|
||||
|
||||
context 'when set to true' do
|
||||
let(:options) { { embedded: true } }
|
||||
|
||||
it 'removes the banner\'s borders' do
|
||||
expect(page).to have_css ".gl-banner.gl-border-none\\!"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'variant' do
|
||||
context 'by default (promotion)' do
|
||||
it 'does not apply introduction class' do
|
||||
|
|
|
|||
|
|
@ -8,19 +8,16 @@ module Pajamas
|
|||
# @param button_text text
|
||||
# @param button_link text
|
||||
# @param content textarea
|
||||
# @param embedded toggle
|
||||
# @param variant select {{ Pajamas::BannerComponent::VARIANT_OPTIONS }}
|
||||
def default(
|
||||
button_text: "Learn more",
|
||||
button_link: "https://about.gitlab.com/",
|
||||
content: "Add your message here.",
|
||||
embedded: false,
|
||||
variant: :promotion
|
||||
)
|
||||
render(Pajamas::BannerComponent.new(
|
||||
button_text: button_text,
|
||||
button_link: button_link,
|
||||
embedded: embedded,
|
||||
svg_path: "illustrations/autodevops.svg",
|
||||
variant: variant
|
||||
)) do |c|
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ FactoryBot.define do
|
|||
trait :design_action_image_v432x230_upload do
|
||||
mount_point { :image_v432x230 }
|
||||
model { association(:design_action) }
|
||||
uploader { ::DesignManagement::DesignV432x230Uploader.name }
|
||||
uploader { DesignManagement::DesignV432x230Uploader.name }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ RSpec.describe 'Upload a design through graphQL', :js, feature_category: :design
|
|||
RSpec.shared_examples 'for a design upload through graphQL' do
|
||||
it 'creates proper objects' do
|
||||
expect { subject }
|
||||
.to change { ::DesignManagement::Design.count }.by(1)
|
||||
.to change { DesignManagement::Design.count }.by(1)
|
||||
.and change { ::LfsObject.count }.by(1)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -223,3 +223,69 @@ export const mockDeploymentsTableItems = [
|
|||
kind: 'Deployment',
|
||||
},
|
||||
];
|
||||
|
||||
const readyStatefulSet = {
|
||||
status: { readyReplicas: 2 },
|
||||
spec: { replicas: 2 },
|
||||
metadata: {
|
||||
name: 'statefulSet-2',
|
||||
namespace: 'default',
|
||||
creationTimestamp: '2023-07-31T11:50:17Z',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
},
|
||||
};
|
||||
const failedStatefulSet = {
|
||||
status: { readyReplicas: 1 },
|
||||
spec: { replicas: 2 },
|
||||
metadata: {
|
||||
name: 'statefulSet-3',
|
||||
namespace: 'default',
|
||||
creationTimestamp: '2023-11-21T11:50:59Z',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
},
|
||||
};
|
||||
|
||||
export const k8sStatefulSetsMock = [readyStatefulSet, readyStatefulSet, failedStatefulSet];
|
||||
|
||||
export const mockStatefulSetsStats = [
|
||||
{
|
||||
title: 'Ready',
|
||||
value: 2,
|
||||
},
|
||||
{
|
||||
title: 'Failed',
|
||||
value: 1,
|
||||
},
|
||||
];
|
||||
|
||||
export const mockStatefulSetsTableItems = [
|
||||
{
|
||||
name: 'statefulSet-2',
|
||||
namespace: 'default',
|
||||
status: 'Ready',
|
||||
age: '114d',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
kind: 'StatefulSet',
|
||||
},
|
||||
{
|
||||
name: 'statefulSet-2',
|
||||
namespace: 'default',
|
||||
status: 'Ready',
|
||||
age: '114d',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
kind: 'StatefulSet',
|
||||
},
|
||||
{
|
||||
name: 'statefulSet-3',
|
||||
namespace: 'default',
|
||||
status: 'Failed',
|
||||
age: '1d',
|
||||
labels: {},
|
||||
annotations: {},
|
||||
kind: 'StatefulSet',
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@ import { CoreV1Api, WatchApi, AppsV1Api } from '@gitlab/cluster-client';
|
|||
import { resolvers } from '~/kubernetes_dashboard/graphql/resolvers';
|
||||
import k8sDashboardPodsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_pods.query.graphql';
|
||||
import k8sDashboardDeploymentsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_deployments.query.graphql';
|
||||
import { k8sPodsMock, k8sDeploymentsMock } from '../mock_data';
|
||||
import k8sDashboardStatefulSetsQuery from '~/kubernetes_dashboard/graphql/queries/k8s_dashboard_stateful_sets.query.graphql';
|
||||
import { k8sPodsMock, k8sDeploymentsMock, k8sStatefulSetsMock } from '../mock_data';
|
||||
|
||||
describe('~/frontend/environments/graphql/resolvers', () => {
|
||||
let mockResolvers;
|
||||
|
|
@ -187,4 +188,92 @@ describe('~/frontend/environments/graphql/resolvers', () => {
|
|||
).rejects.toThrow('API error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('k8sStatefulSets', () => {
|
||||
const client = { writeQuery: jest.fn() };
|
||||
|
||||
const mockWatcher = WatchApi.prototype;
|
||||
const mockStatefulSetsListWatcherFn = jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve(mockWatcher);
|
||||
});
|
||||
|
||||
const mockOnDataFn = jest.fn().mockImplementation((eventName, callback) => {
|
||||
if (eventName === 'data') {
|
||||
callback([]);
|
||||
}
|
||||
});
|
||||
|
||||
const mockStatefulSetsListFn = jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
items: k8sStatefulSetsMock,
|
||||
});
|
||||
});
|
||||
|
||||
const mockAllStatefulSetsListFn = jest.fn().mockImplementation(mockStatefulSetsListFn);
|
||||
|
||||
describe('when the StatefulSets data is present', () => {
|
||||
beforeEach(() => {
|
||||
jest
|
||||
.spyOn(AppsV1Api.prototype, 'listAppsV1StatefulSetForAllNamespaces')
|
||||
.mockImplementation(mockAllStatefulSetsListFn);
|
||||
jest
|
||||
.spyOn(mockWatcher, 'subscribeToStream')
|
||||
.mockImplementation(mockStatefulSetsListWatcherFn);
|
||||
jest.spyOn(mockWatcher, 'on').mockImplementation(mockOnDataFn);
|
||||
});
|
||||
|
||||
it('should request all StatefulSets from the cluster_client library and watch the events', async () => {
|
||||
const StatefulSets = await mockResolvers.Query.k8sStatefulSets(
|
||||
null,
|
||||
{
|
||||
configuration,
|
||||
},
|
||||
{ client },
|
||||
);
|
||||
|
||||
expect(mockAllStatefulSetsListFn).toHaveBeenCalled();
|
||||
expect(mockStatefulSetsListWatcherFn).toHaveBeenCalled();
|
||||
|
||||
expect(StatefulSets).toEqual(k8sStatefulSetsMock);
|
||||
});
|
||||
|
||||
it('should update cache with the new data when received from the library', async () => {
|
||||
await mockResolvers.Query.k8sStatefulSets(
|
||||
null,
|
||||
{ configuration, namespace: '' },
|
||||
{ client },
|
||||
);
|
||||
|
||||
expect(client.writeQuery).toHaveBeenCalledWith({
|
||||
query: k8sDashboardStatefulSetsQuery,
|
||||
variables: { configuration, namespace: '' },
|
||||
data: { k8sStatefulSets: [] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should not watch StatefulSets from the cluster_client library when the StatefulSets data is not present', async () => {
|
||||
jest.spyOn(AppsV1Api.prototype, 'listAppsV1StatefulSetForAllNamespaces').mockImplementation(
|
||||
jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
items: [],
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
await mockResolvers.Query.k8sStatefulSets(null, { configuration }, { client });
|
||||
|
||||
expect(mockStatefulSetsListWatcherFn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if the API call fails', async () => {
|
||||
jest
|
||||
.spyOn(AppsV1Api.prototype, 'listAppsV1StatefulSetForAllNamespaces')
|
||||
.mockRejectedValue(new Error('API error'));
|
||||
|
||||
await expect(
|
||||
mockResolvers.Query.k8sStatefulSets(null, { configuration }, { client }),
|
||||
).rejects.toThrow('API error');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import {
|
||||
getAge,
|
||||
calculateDeploymentStatus,
|
||||
calculateStatefulSetStatus,
|
||||
} from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
|
||||
import { useFakeDate } from 'helpers/fake_date';
|
||||
|
||||
|
|
@ -52,4 +53,23 @@ describe('k8s_integration_helper', () => {
|
|||
expect(calculateDeploymentStatus({ status })).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateStatefulSetStatus', () => {
|
||||
const ready = {
|
||||
status: { readyReplicas: 2 },
|
||||
spec: { replicas: 2 },
|
||||
};
|
||||
const failed = {
|
||||
status: { readyReplicas: 1 },
|
||||
spec: { replicas: 2 },
|
||||
};
|
||||
|
||||
it.each`
|
||||
condition | item | expected
|
||||
${'there are less readyReplicas than replicas in spec'} | ${failed} | ${'Failed'}
|
||||
${'there are the same amount of readyReplicas as in spec'} | ${ready} | ${'Ready'}
|
||||
`('returns status as $expected when $condition', ({ item, expected }) => {
|
||||
expect(calculateStatefulSetStatus(item)).toBe(expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,106 @@
|
|||
import Vue from 'vue';
|
||||
import VueApollo from 'vue-apollo';
|
||||
import { shallowMount } from '@vue/test-utils';
|
||||
import waitForPromises from 'helpers/wait_for_promises';
|
||||
import createMockApollo from 'helpers/mock_apollo_helper';
|
||||
import StatefulSetsPage from '~/kubernetes_dashboard/pages/stateful_sets_page.vue';
|
||||
import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
|
||||
import { useFakeDate } from 'helpers/fake_date';
|
||||
import {
|
||||
k8sStatefulSetsMock,
|
||||
mockStatefulSetsStats,
|
||||
mockStatefulSetsTableItems,
|
||||
} from '../graphql/mock_data';
|
||||
|
||||
Vue.use(VueApollo);
|
||||
|
||||
describe('Kubernetes dashboard statefulSets page', () => {
|
||||
let wrapper;
|
||||
|
||||
const configuration = {
|
||||
basePath: 'kas/tunnel/url',
|
||||
baseOptions: {
|
||||
headers: { 'GitLab-Agent-Id': '1' },
|
||||
},
|
||||
};
|
||||
|
||||
const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
|
||||
|
||||
const createApolloProvider = () => {
|
||||
const mockResolvers = {
|
||||
Query: {
|
||||
k8sStatefulSets: jest.fn().mockReturnValue(k8sStatefulSetsMock),
|
||||
},
|
||||
};
|
||||
|
||||
return createMockApollo([], mockResolvers);
|
||||
};
|
||||
|
||||
const createWrapper = (apolloProvider = createApolloProvider()) => {
|
||||
wrapper = shallowMount(StatefulSetsPage, {
|
||||
provide: { configuration },
|
||||
apolloProvider,
|
||||
});
|
||||
};
|
||||
|
||||
describe('mounted', () => {
|
||||
it('renders WorkloadLayout component', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(findWorkloadLayout().exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('sets loading prop for the WorkloadLayout', () => {
|
||||
createWrapper();
|
||||
|
||||
expect(findWorkloadLayout().props('loading')).toBe(true);
|
||||
});
|
||||
|
||||
it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
|
||||
createWrapper();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findWorkloadLayout().props('loading')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when gets pods data', () => {
|
||||
useFakeDate(2023, 10, 23, 10, 10);
|
||||
|
||||
it('sets correct stats object for the WorkloadLayout', async () => {
|
||||
createWrapper();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findWorkloadLayout().props('stats')).toEqual(mockStatefulSetsStats);
|
||||
});
|
||||
|
||||
it('sets correct table items object for the WorkloadLayout', async () => {
|
||||
createWrapper();
|
||||
await waitForPromises();
|
||||
|
||||
expect(findWorkloadLayout().props('items')).toMatchObject(mockStatefulSetsTableItems);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when gets an error from the cluster_client API', () => {
|
||||
const error = new Error('Error from the cluster_client API');
|
||||
const createErroredApolloProvider = () => {
|
||||
const mockResolvers = {
|
||||
Query: {
|
||||
k8sStatefulSets: jest.fn().mockRejectedValueOnce(error),
|
||||
},
|
||||
};
|
||||
|
||||
return createMockApollo([], mockResolvers);
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
createWrapper(createErroredApolloProvider());
|
||||
await waitForPromises();
|
||||
});
|
||||
|
||||
it('sets errorMessage prop for the WorkloadLayout', () => {
|
||||
expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -139,7 +139,7 @@ RSpec.describe DesignManagement::DeleteDesignsService, feature_category: :design
|
|||
end
|
||||
|
||||
it 'informs the new-version-worker' do
|
||||
expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer, false)
|
||||
expect(DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer, false)
|
||||
|
||||
run_service
|
||||
end
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
|
|||
issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::BLANK_SHA])
|
||||
end
|
||||
|
||||
allow(::DesignManagement::NewVersionWorker)
|
||||
allow(DesignManagement::NewVersionWorker)
|
||||
.to receive(:perform_async).with(Integer, false).and_return(nil)
|
||||
end
|
||||
|
||||
|
|
@ -293,7 +293,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
|
|||
it 'has the correct side-effects' do
|
||||
counter = Gitlab::UsageDataCounters::DesignsCounter
|
||||
|
||||
expect(::DesignManagement::NewVersionWorker)
|
||||
expect(DesignManagement::NewVersionWorker)
|
||||
.to receive(:perform_async).once.with(Integer, false).and_return(nil)
|
||||
|
||||
expect { run_service }
|
||||
|
|
@ -327,7 +327,7 @@ RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_m
|
|||
design_repository.create_if_not_exists
|
||||
design_repository.has_visible_content?
|
||||
|
||||
expect(::DesignManagement::NewVersionWorker)
|
||||
expect(DesignManagement::NewVersionWorker)
|
||||
.to receive(:perform_async).once.with(Integer, false).and_return(nil)
|
||||
|
||||
expect { service.execute }
|
||||
|
|
|
|||
|
|
@ -6,15 +6,15 @@ module DesignManagementTestHelpers
|
|||
end
|
||||
|
||||
def delete_designs(*designs)
|
||||
act_on_designs(designs) { ::DesignManagement::Action.deletion }
|
||||
act_on_designs(designs) { DesignManagement::Action.deletion }
|
||||
end
|
||||
|
||||
def restore_designs(*designs)
|
||||
act_on_designs(designs) { ::DesignManagement::Action.creation }
|
||||
act_on_designs(designs) { DesignManagement::Action.creation }
|
||||
end
|
||||
|
||||
def modify_designs(*designs)
|
||||
act_on_designs(designs) { ::DesignManagement::Action.modification }
|
||||
act_on_designs(designs) { DesignManagement::Action.modification }
|
||||
end
|
||||
|
||||
def path_for_design(design)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ import (
|
|||
"regexp"
|
||||
"runtime"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
|
@ -154,19 +153,6 @@ type UploadClaims struct {
|
|||
jwt.RegisteredClaims
|
||||
}
|
||||
|
||||
func Retry(t testing.TB, timeout time.Duration, fn func() error) {
|
||||
t.Helper()
|
||||
start := time.Now()
|
||||
var err error
|
||||
for ; time.Since(start) < timeout; time.Sleep(time.Millisecond) {
|
||||
err = fn()
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
t.Fatalf("test timeout after %v; last error: %v", timeout, err)
|
||||
}
|
||||
|
||||
func SetupStaticFileHelper(t *testing.T, fpath, content, directory string) string {
|
||||
cwd, err := os.Getwd()
|
||||
require.NoError(t, err, "get working directory")
|
||||
|
|
|
|||
|
|
@ -311,11 +311,7 @@ func TestUploadHandlerMultipartUploadMaximumSizeFromApi(t *testing.T) {
|
|||
response := testUploadArtifacts(t, contentType, ts.URL+Path, &contentBuffer)
|
||||
require.Equal(t, http.StatusRequestEntityTooLarge, response.Code)
|
||||
|
||||
testhelper.Retry(t, 5*time.Second, func() error {
|
||||
if os.GetObjectMD5(test.ObjectPath) == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("file is still present")
|
||||
})
|
||||
require.Eventually(t, func() bool {
|
||||
return os.GetObjectMD5(test.ObjectPath) == ""
|
||||
}, 5*time.Second, time.Millisecond, "file is still present")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ import (
|
|||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
|
||||
)
|
||||
|
||||
|
|
@ -41,14 +40,10 @@ func TestGoCloudObjectUpload(t *testing.T) {
|
|||
|
||||
cancel()
|
||||
|
||||
testhelper.Retry(t, 5*time.Second, func() error {
|
||||
require.Eventually(t, func() bool {
|
||||
exists, err := bucket.Exists(ctx, objectName)
|
||||
require.NoError(t, err)
|
||||
|
||||
if exists {
|
||||
return fmt.Errorf("file %s is still present", objectName)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
return !exists
|
||||
}, 5*time.Second, time.Millisecond, fmt.Sprintf("file %s is still present", objectName))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/config"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
|
||||
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination/objectstore/test"
|
||||
)
|
||||
|
||||
|
|
@ -62,13 +61,9 @@ func TestS3ObjectUpload(t *testing.T) {
|
|||
|
||||
cancel()
|
||||
|
||||
testhelper.Retry(t, 5*time.Second, func() error {
|
||||
if test.S3ObjectDoesNotExist(t, sess, config, objectName) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("file is still present")
|
||||
})
|
||||
require.Eventually(t, func() bool {
|
||||
return (test.S3ObjectDoesNotExist(t, sess, config, objectName))
|
||||
}, 5*time.Second, time.Millisecond, "file is still present")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue