Add latest changes from gitlab-org/gitlab@master

This commit is contained in:
GitLab Bot 2023-11-24 00:10:07 +00:00
parent 0bbbe62b73
commit dc6ae9609f
26 changed files with 763 additions and 200 deletions

View File

@ -1,8 +1,9 @@
<script>
import { GlTabs, GlTab, GlLoadingIcon, GlBadge, GlTable, GlPagination } from '@gitlab/ui';
import { __, s__ } from '~/locale';
import { getAge } from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
import k8sServicesQuery from '../graphql/queries/k8s_services.query.graphql';
import { generateServicePortsString, getServiceAge } from '../helpers/k8s_integration_helper';
import { generateServicePortsString } from '../helpers/k8s_integration_helper';
import { SERVICES_LIMIT_PER_PAGE } from '../constants';
import KubernetesSummary from './kubernetes_summary.vue';
@ -62,7 +63,7 @@ export default {
clusterIP: service?.spec?.clusterIP,
externalIP: service?.spec?.externalIP,
ports: generateServicePortsString(service?.spec?.ports),
age: getServiceAge(service?.metadata?.creationTimestamp),
age: getAge(service?.metadata?.creationTimestamp),
};
});
},

View File

@ -66,8 +66,15 @@ type k8sPodStatus {
phase: String
}
type k8sPodMetadata {
name: String
namespace: String
creationTimestamp: String
}
type LocalK8sPods {
status: k8sPodStatus
metadata: k8sPodMetadata
}
input LocalConfiguration {

View File

@ -1,4 +1,3 @@
import { differenceInSeconds } from '~/lib/utils/datetime_utility';
import { CLUSTER_AGENT_ERROR_MESSAGES, STATUS_TRUE, STATUS_FALSE } from '../constants';
export function generateServicePortsString(ports) {
@ -12,30 +11,6 @@ export function generateServicePortsString(ports) {
.join(', ');
}
export function getServiceAge(creationTimestamp) {
if (!creationTimestamp) return '';
const timeDifference = differenceInSeconds(new Date(creationTimestamp), new Date());
const seconds = Math.floor(timeDifference);
const minutes = Math.floor(seconds / 60) % 60;
const hours = Math.floor(seconds / 60 / 60) % 24;
const days = Math.floor(seconds / 60 / 60 / 24);
let ageString;
if (days > 0) {
ageString = `${days}d`;
} else if (hours > 0) {
ageString = `${hours}h`;
} else if (minutes > 0) {
ageString = `${minutes}m`;
} else {
ageString = `${seconds}s`;
}
return ageString;
}
export function getDeploymentsStatuses(items) {
const failed = [];
const ready = [];

View File

@ -0,0 +1,44 @@
<script>
import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import WorkloadStats from './workload_stats.vue';
import WorkloadTable from './workload_table.vue';
export default {
components: {
GlLoadingIcon,
GlAlert,
WorkloadStats,
WorkloadTable,
},
props: {
loading: {
type: Boolean,
default: false,
required: false,
},
errorMessage: {
type: String,
default: '',
required: false,
},
stats: {
type: Array,
required: true,
},
items: {
type: Array,
required: true,
},
},
};
</script>
<template>
<gl-loading-icon v-if="loading" />
<gl-alert v-else-if="errorMessage" variant="danger" :dismissible="false" class="gl-mb-5">
{{ errorMessage }}
</gl-alert>
<div v-else>
<workload-stats :stats="stats" />
<workload-table :items="items" />
</div>
</template>

View File

@ -0,0 +1,77 @@
<script>
import { GlTable, GlBadge, GlPagination } from '@gitlab/ui';
import {
WORKLOAD_STATUS_BADGE_VARIANTS,
PAGE_SIZE,
TABLE_HEADING_CLASSES,
DEFAULT_WORKLOAD_TABLE_FIELDS,
} from '../constants';
export default {
components: {
GlTable,
GlBadge,
GlPagination,
},
props: {
items: {
type: Array,
required: true,
},
fields: {
type: Array,
default: () => DEFAULT_WORKLOAD_TABLE_FIELDS,
required: false,
},
},
data() {
return {
currentPage: 1,
};
},
computed: {
tableFields() {
return this.fields.map((field) => {
return {
...field,
thClass: TABLE_HEADING_CLASSES,
sortable: true,
};
});
},
},
PAGE_SIZE,
WORKLOAD_STATUS_BADGE_VARIANTS,
TABLE_CELL_CLASSES: 'gl-p-2',
};
</script>
<template>
<div class="gl-mt-8">
<gl-table
:items="items"
:fields="tableFields"
:per-page="$options.PAGE_SIZE"
:current-page="currentPage"
stacked="md"
bordered
>
<template #cell(status)="{ item: { status } }">
<gl-badge
:variant="$options.WORKLOAD_STATUS_BADGE_VARIANTS[status]"
size="sm"
class="gl-ml-2"
>{{ status }}</gl-badge
>
</template>
</gl-table>
<gl-pagination
v-model="currentPage"
:per-page="$options.PAGE_SIZE"
:total-items="items.length"
align="center"
class="gl-mt-6"
/>
</div>
</template>

View File

@ -11,3 +11,33 @@ export const STATUS_LABELS = {
[PHASE_SUCCEEDED]: s__('KubernetesDashboard|Succeeded'),
[PHASE_FAILED]: s__('KubernetesDashboard|Failed'),
};
export const WORKLOAD_STATUS_BADGE_VARIANTS = {
[PHASE_RUNNING]: 'info',
[PHASE_PENDING]: 'warning',
[PHASE_SUCCEEDED]: 'success',
[PHASE_FAILED]: 'danger',
};
export const PAGE_SIZE = 20;
export const TABLE_HEADING_CLASSES = 'gl-bg-gray-50! gl-font-weight-bold gl-white-space-nowrap';
export const DEFAULT_WORKLOAD_TABLE_FIELDS = [
{
key: 'name',
label: s__('KubernetesDashboard|Name'),
},
{
key: 'status',
label: s__('KubernetesDashboard|Status'),
},
{
key: 'namespace',
label: s__('KubernetesDashboard|Namespace'),
},
{
key: 'age',
label: s__('KubernetesDashboard|Age'),
},
];

View File

@ -13,6 +13,11 @@ export const apolloProvider = () => {
cache.writeQuery({
query: k8sPodsQuery,
data: {
metadata: {
name: null,
namespace: null,
creationTimestamp: null,
},
status: {
phase: null,
},

View File

@ -1,5 +1,10 @@
query getK8sDashboardPods($configuration: LocalConfiguration) {
k8sPods(configuration: $configuration) @client {
metadata {
name
namespace
creationTimestamp
}
status {
phase
}

View File

@ -0,0 +1,25 @@
import { differenceInSeconds } from '~/lib/utils/datetime_utility';
export function getAge(creationTimestamp) {
if (!creationTimestamp) return '';
const timeDifference = differenceInSeconds(new Date(creationTimestamp), new Date());
const seconds = Math.floor(timeDifference);
const minutes = Math.floor(seconds / 60) % 60;
const hours = Math.floor(seconds / 60 / 60) % 24;
const days = Math.floor(seconds / 60 / 60 / 24);
let ageString;
if (days > 0) {
ageString = `${days}d`;
} else if (hours > 0) {
ageString = `${hours}h`;
} else if (minutes > 0) {
ageString = `${minutes}m`;
} else {
ageString = `${seconds}s`;
}
return ageString;
}

View File

@ -1,6 +1,6 @@
<script>
import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import WorkloadStats from '../components/workload_stats.vue';
import { getAge } from '../helpers/k8s_integration_helper';
import WorkloadLayout from '../components/workload_layout.vue';
import k8sPodsQuery from '../graphql/queries/k8s_dashboard_pods.query.graphql';
import {
PHASE_RUNNING,
@ -12,9 +12,7 @@ import {
export default {
components: {
GlLoadingIcon,
GlAlert,
WorkloadStats,
WorkloadLayout,
},
inject: ['configuration'],
apollo: {
@ -26,7 +24,16 @@ export default {
};
},
update(data) {
return data?.k8sPods || [];
return (
data?.k8sPods?.map((pod) => {
return {
name: pod.metadata?.name,
namespace: pod.metadata?.namespace,
status: pod.status.phase,
age: getAge(pod.metadata?.creationTimestamp),
};
}) || []
);
},
error(err) {
this.errorMessage = err?.message;
@ -35,6 +42,7 @@ export default {
},
data() {
return {
k8sPods: [],
errorMessage: '',
};
},
@ -65,7 +73,7 @@ export default {
},
methods: {
countPodsByPhase(phase) {
const filteredPods = this.k8sPods?.filter((item) => item.status.phase === phase) || [];
const filteredPods = this.k8sPods?.filter((item) => item.status === phase) || [];
return filteredPods.length;
},
@ -73,9 +81,10 @@ export default {
};
</script>
<template>
<gl-loading-icon v-if="loading" />
<gl-alert v-else-if="errorMessage" variant="danger" :dismissible="false" class="gl-mb-5">
{{ errorMessage }}
</gl-alert>
<workload-stats v-else :stats="podStats" />
<workload-layout
:loading="loading"
:error-message="errorMessage"
:stats="podStats"
:items="k8sPods"
/>
</template>

View File

@ -11,6 +11,14 @@ module BulkImports
mount_uploader :export_file, ExportUploader
# This causes CarrierWave v1 and v3 (but not v2) to upload the file to
# object storage *after* the database entry has been committed to the
# database. This avoids idling in a transaction. Similar to `ImportExportUpload`.
if Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_STORE_EXPORT_FILE_AFTER_COMMIT', true))
skip_callback :save, :after, :store_export_file!
set_callback :commit, :after, :store_export_file!
end
def retrieve_upload(_identifier, paths)
Upload.find_by(model: self, path: paths)
end

View File

@ -2,6 +2,6 @@
module BulkImports
class ExportUploader < ImportExportUploader
EXTENSION_ALLOWLIST = %w[ndjson.gz].freeze
EXTENSION_ALLOWLIST = %w[ndjson.gz tar.gz gz].freeze
end
end

View File

@ -12,6 +12,7 @@ module Pages
def perform
PagesDeployment.deactivated.each_batch do |deployments| # rubocop: disable Style/SymbolProc
deployments.each { |deployment| deployment.file.remove! }
deployments.delete_all
end
end

View File

@ -0,0 +1,19 @@
# frozen_string_literal: true
class DropIndexWebHookLogsPartOnWebHookId < Gitlab::Database::Migration[2.2]
include Gitlab::Database::PartitioningMigrationHelpers
disable_ddl_transaction!
milestone '16.7'
INDEX_NAME = :index_web_hook_logs_part_on_web_hook_id
TABLE_NAME = :web_hook_logs
def up
remove_concurrent_partitioned_index_by_name(TABLE_NAME, INDEX_NAME)
end
def down
add_concurrent_partitioned_index(TABLE_NAME, :web_hook_id, name: INDEX_NAME)
end
end

View File

@ -0,0 +1 @@
e054bf0a60fa2dae5353a4f5efec786f76e457f6d1dc0f38483170a41c45456b

View File

@ -35044,8 +35044,6 @@ CREATE INDEX index_web_hook_logs_on_web_hook_id_and_created_at ON ONLY web_hook_
CREATE INDEX index_web_hook_logs_part_on_created_at_and_web_hook_id ON ONLY web_hook_logs USING btree (created_at, web_hook_id);
CREATE INDEX index_web_hook_logs_part_on_web_hook_id ON ONLY web_hook_logs USING btree (web_hook_id);
CREATE INDEX index_web_hooks_on_group_id ON web_hooks USING btree (group_id) WHERE ((type)::text = 'GroupHook'::text);
CREATE INDEX index_web_hooks_on_integration_id ON web_hooks USING btree (integration_id);

View File

@ -27758,6 +27758,9 @@ msgstr ""
msgid "Kubernetes deployment not found"
msgstr ""
msgid "KubernetesDashboard|Age"
msgstr ""
msgid "KubernetesDashboard|Agent %{name} ID #%{id}"
msgstr ""
@ -27770,6 +27773,12 @@ msgstr ""
msgid "KubernetesDashboard|Failed"
msgstr ""
msgid "KubernetesDashboard|Name"
msgstr ""
msgid "KubernetesDashboard|Namespace"
msgstr ""
msgid "KubernetesDashboard|Pending"
msgstr ""
@ -27779,6 +27788,9 @@ msgstr ""
msgid "KubernetesDashboard|Running"
msgstr ""
msgid "KubernetesDashboard|Status"
msgstr ""
msgid "KubernetesDashboard|Succeeded"
msgstr ""
@ -32855,6 +32867,9 @@ msgstr ""
msgid "Objective"
msgstr ""
msgid "ObservabilityMetrics|Error: Failed to load metrics details. Try reloading the page."
msgstr ""
msgid "ObservabilityMetrics|Metrics"
msgstr ""

View File

@ -0,0 +1,96 @@
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
import WorkloadStats from '~/kubernetes_dashboard/components/workload_stats.vue';
import WorkloadTable from '~/kubernetes_dashboard/components/workload_table.vue';
import { mockPodStats, mockPodsTableItems } from '../graphql/mock_data';
let wrapper;
const defaultProps = {
stats: mockPodStats,
items: mockPodsTableItems,
};
const createWrapper = (propsData = {}) => {
wrapper = shallowMount(WorkloadLayout, {
propsData: {
...defaultProps,
...propsData,
},
});
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findErrorAlert = () => wrapper.findComponent(GlAlert);
const findWorkloadStats = () => wrapper.findComponent(WorkloadStats);
const findWorkloadTable = () => wrapper.findComponent(WorkloadTable);
describe('Workload layout component', () => {
describe('when loading', () => {
beforeEach(() => {
createWrapper({ loading: true, errorMessage: 'error' });
});
it('renders a loading icon', () => {
expect(findLoadingIcon().exists()).toBe(true);
});
it("doesn't render an error message", () => {
expect(findErrorAlert().exists()).toBe(false);
});
it("doesn't render workload stats", () => {
expect(findWorkloadStats().exists()).toBe(false);
});
it("doesn't render workload table", () => {
expect(findWorkloadTable().exists()).toBe(false);
});
});
describe('when received an error', () => {
beforeEach(() => {
createWrapper({ errorMessage: 'error' });
});
it("doesn't render a loading icon", () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it('renders an error alert with the correct message and props', () => {
expect(findErrorAlert().text()).toBe('error');
expect(findErrorAlert().props()).toMatchObject({ variant: 'danger', dismissible: false });
});
it("doesn't render workload stats", () => {
expect(findWorkloadStats().exists()).toBe(false);
});
it("doesn't render workload table", () => {
expect(findWorkloadTable().exists()).toBe(false);
});
});
describe('when received the data', () => {
beforeEach(() => {
createWrapper();
});
it("doesn't render a loading icon", () => {
expect(findLoadingIcon().exists()).toBe(false);
});
it("doesn't render an error message", () => {
expect(findErrorAlert().exists()).toBe(false);
});
it('renders workload-stats component with the correct props', () => {
expect(findWorkloadStats().props('stats')).toBe(mockPodStats);
});
it('renders workload-table component with the correct props', () => {
expect(findWorkloadTable().props('items')).toBe(mockPodsTableItems);
});
});
});

View File

@ -0,0 +1,120 @@
import { mount } from '@vue/test-utils';
import { GlTable, GlBadge, GlPagination } from '@gitlab/ui';
import WorkloadTable from '~/kubernetes_dashboard/components/workload_table.vue';
import { TABLE_HEADING_CLASSES, PAGE_SIZE } from '~/kubernetes_dashboard/constants';
import { mockPodsTableItems } from '../graphql/mock_data';
let wrapper;
const createWrapper = (propsData = {}) => {
wrapper = mount(WorkloadTable, {
propsData,
});
};
const findTable = () => wrapper.findComponent(GlTable);
const findAllRows = () => findTable().find('tbody').findAll('tr');
const findRow = (at) => findAllRows().at(at);
const findAllBadges = () => wrapper.findAllComponents(GlBadge);
const findBadge = (at) => findAllBadges().at(at);
const findPagination = () => wrapper.findComponent(GlPagination);
describe('Workload table component', () => {
it('renders GlTable component with the default fields if no fields specified in props', () => {
createWrapper({ items: mockPodsTableItems });
const defaultFields = [
{
key: 'name',
label: 'Name',
thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
{
key: 'status',
label: 'Status',
thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
{
key: 'namespace',
label: 'Namespace',
thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
{
key: 'age',
label: 'Age',
thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
];
expect(findTable().props('fields')).toEqual(defaultFields);
});
it('renders GlTable component fields specified in props', () => {
const customFields = [
{
key: 'field-1',
label: 'Field-1',
thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
{
key: 'field-2',
label: 'Field-2',
thClass: TABLE_HEADING_CLASSES,
sortable: true,
},
];
createWrapper({ items: mockPodsTableItems, fields: customFields });
expect(findTable().props('fields')).toEqual(customFields);
});
describe('table rows', () => {
beforeEach(() => {
createWrapper({ items: mockPodsTableItems });
});
it('displays the correct number of rows', () => {
expect(findAllRows()).toHaveLength(mockPodsTableItems.length);
});
it('renders correct data for each row', () => {
mockPodsTableItems.forEach((data, index) => {
expect(findRow(index).text()).toContain(data.name);
expect(findRow(index).text()).toContain(data.namespace);
expect(findRow(index).text()).toContain(data.status);
expect(findRow(index).text()).toContain(data.age);
});
});
it('renders a badge for the status', () => {
expect(findAllBadges()).toHaveLength(mockPodsTableItems.length);
});
it.each`
status | variant | index
${'Running'} | ${'info'} | ${0}
${'Running'} | ${'info'} | ${1}
${'Pending'} | ${'warning'} | ${2}
${'Succeeded'} | ${'success'} | ${3}
${'Failed'} | ${'danger'} | ${4}
${'Failed'} | ${'danger'} | ${5}
`(
'renders "$variant" badge for status "$status" at index "$index"',
({ status, variant, index }) => {
expect(findBadge(index).text()).toBe(status);
expect(findBadge(index).props('variant')).toBe(variant);
},
);
it('renders pagination', () => {
expect(findPagination().props()).toMatchObject({
totalItems: mockPodsTableItems.length,
perPage: PAGE_SIZE,
});
});
});
});

View File

@ -1,7 +1,35 @@
const runningPod = { status: { phase: 'Running' } };
const pendingPod = { status: { phase: 'Pending' } };
const succeededPod = { status: { phase: 'Succeeded' } };
const failedPod = { status: { phase: 'Failed' } };
const runningPod = {
status: { phase: 'Running' },
metadata: {
name: 'pod-1',
namespace: 'default',
creationTimestamp: '2023-07-31T11:50:17Z',
},
};
const pendingPod = {
status: { phase: 'Pending' },
metadata: {
name: 'pod-2',
namespace: 'new-namespace',
creationTimestamp: '2023-11-21T11:50:59Z',
},
};
const succeededPod = {
status: { phase: 'Succeeded' },
metadata: {
name: 'pod-3',
namespace: 'default',
creationTimestamp: '2023-07-31T11:50:17Z',
},
};
const failedPod = {
status: { phase: 'Failed' },
metadata: {
name: 'pod-4',
namespace: 'default',
creationTimestamp: '2023-11-21T11:50:59Z',
},
};
export const k8sPodsMock = [runningPod, runningPod, pendingPod, succeededPod, failedPod, failedPod];
@ -23,3 +51,42 @@ export const mockPodStats = [
value: 2,
},
];
export const mockPodsTableItems = [
{
name: 'pod-1',
namespace: 'default',
status: 'Running',
age: '114d',
},
{
name: 'pod-1',
namespace: 'default',
status: 'Running',
age: '114d',
},
{
name: 'pod-2',
namespace: 'new-namespace',
status: 'Pending',
age: '1d',
},
{
name: 'pod-3',
namespace: 'default',
status: 'Succeeded',
age: '114d',
},
{
name: 'pod-4',
namespace: 'default',
status: 'Failed',
age: '1d',
},
{
name: 'pod-4',
namespace: 'default',
status: 'Failed',
age: '1d',
},
];

View File

@ -0,0 +1,22 @@
import { getAge } from '~/kubernetes_dashboard/helpers/k8s_integration_helper';
import { useFakeDate } from 'helpers/fake_date';
describe('k8s_integration_helper', () => {
describe('getAge', () => {
useFakeDate(2023, 10, 23, 10, 10);
it.each`
condition | measures | timestamp | expected
${'timestamp > 1 day'} | ${'days'} | ${'2023-07-31T11:50:59Z'} | ${'114d'}
${'timestamp = 1 day'} | ${'days'} | ${'2023-11-21T11:50:59Z'} | ${'1d'}
${'1 day > timestamp > 1 hour'} | ${'hours'} | ${'2023-11-22T11:50:59Z'} | ${'22h'}
${'timestamp = 1 hour'} | ${'hours'} | ${'2023-11-23T08:50:59Z'} | ${'1h'}
${'1 hour > timestamp >1 minute'} | ${'minutes'} | ${'2023-11-23T09:50:59Z'} | ${'19m'}
${'timestamp = 1 minute'} | ${'minutes'} | ${'2023-11-23T10:08:59Z'} | ${'1m'}
${'1 minute > timestamp'} | ${'seconds'} | ${'2023-11-23T10:09:17Z'} | ${'43s'}
${'timestamp = 1 second'} | ${'seconds'} | ${'2023-11-23T10:09:59Z'} | ${'1s'}
`('returns age in $measures when $condition', ({ timestamp, expected }) => {
expect(getAge(timestamp)).toBe(expected);
});
});
});

View File

@ -1,12 +1,12 @@
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import PodsPage from '~/kubernetes_dashboard/pages/pods_page.vue';
import WorkloadStats from '~/kubernetes_dashboard/components/workload_stats.vue';
import { k8sPodsMock, mockPodStats } from '../graphql/mock_data';
import WorkloadLayout from '~/kubernetes_dashboard/components/workload_layout.vue';
import { useFakeDate } from 'helpers/fake_date';
import { k8sPodsMock, mockPodStats, mockPodsTableItems } from '../graphql/mock_data';
Vue.use(VueApollo);
@ -20,9 +20,7 @@ describe('Kubernetes dashboard pods page', () => {
},
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findWorkloadStats = () => wrapper.findComponent(WorkloadStats);
const findAlert = () => wrapper.findComponent(GlAlert);
const findWorkloadLayout = () => wrapper.findComponent(WorkloadLayout);
const createApolloProvider = () => {
const mockResolvers = {
@ -42,33 +40,41 @@ describe('Kubernetes dashboard pods page', () => {
};
describe('mounted', () => {
it('shows the loading icon', () => {
it('renders WorkloadLayout component', () => {
createWrapper();
expect(findLoadingIcon().exists()).toBe(true);
expect(findWorkloadLayout().exists()).toBe(true);
});
it('hides the loading icon when the list of pods loaded', async () => {
it('sets loading prop for the WorkloadLayout', () => {
createWrapper();
expect(findWorkloadLayout().props('loading')).toBe(true);
});
it('removes loading prop from the WorkloadLayout when the list of pods loaded', async () => {
createWrapper();
await waitForPromises();
expect(findLoadingIcon().exists()).toBe(false);
expect(findWorkloadLayout().props('loading')).toBe(false);
});
});
describe('when gets pods data', () => {
it('renders stats', async () => {
useFakeDate(2023, 10, 23, 10, 10);
it('sets correct stats object for the WorkloadLayout', async () => {
createWrapper();
await waitForPromises();
expect(findWorkloadStats().exists()).toBe(true);
expect(findWorkloadLayout().props('stats')).toEqual(mockPodStats);
});
it('provides correct data for stats', async () => {
it('sets correct table items object for the WorkloadLayout', async () => {
createWrapper();
await waitForPromises();
expect(findWorkloadStats().props('stats')).toEqual(mockPodStats);
expect(findWorkloadLayout().props('items')).toEqual(mockPodsTableItems);
});
});
@ -89,12 +95,8 @@ describe('Kubernetes dashboard pods page', () => {
await waitForPromises();
});
it("doesn't show pods stats", () => {
expect(findWorkloadStats().exists()).toBe(false);
});
it('renders an alert with the error message', () => {
expect(findAlert().text()).toBe(error.message);
it('sets errorMessage prop for the WorkloadLayout', () => {
expect(findWorkloadLayout().props('errorMessage')).toBe(error.message);
});
});
});

View File

@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe BulkImports::ExportUpload do
RSpec.describe BulkImports::ExportUpload, type: :model, feature_category: :importers do
subject { described_class.new(export: create(:bulk_import_export)) }
describe 'associations' do
@ -20,4 +20,18 @@ RSpec.describe BulkImports::ExportUpload do
expect(subject.public_send(method).url).to eq(url)
end
describe 'ActiveRecord callbacks' do
let(:after_save_callbacks) { described_class._save_callbacks.select { |cb| cb.kind == :after } }
let(:after_commit_callbacks) { described_class._commit_callbacks.select { |cb| cb.kind == :after } }
def find_callback(callbacks, key)
callbacks.find { |cb| cb.filter == key }
end
it 'export file is stored in after_commit callback' do
expect(find_callback(after_commit_callbacks, :store_export_file!)).to be_present
expect(find_callback(after_save_callbacks, :store_export_file!)).to be_nil
end
end
end

View File

@ -216,9 +216,6 @@ vulnerability_finding_signatures:
vulnerability_flags:
index_vulnerability_flags_on_unique_columns:
- index_vulnerability_flags_on_vulnerability_occurrence_id
web_hook_logs:
index_web_hook_logs_on_web_hook_id_and_created_at:
- index_web_hook_logs_part_on_web_hook_id
work_item_hierarchy_restrictions:
index_work_item_hierarchy_restrictions_on_parent_and_child:
- index_work_item_hierarchy_restrictions_on_parent_type_id

View File

@ -8,9 +8,6 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
# We don't need to delete data since we don't modify Postgres data
self.use_transactional_tests = false
let(:migrations_base_dir) { 'click_house/migrations' }
let(:migrations_dirname) { 'undefined' }
let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
let(:verbose) { nil }
let(:target_version) { nil }
let(:step) { nil }
@ -25,68 +22,175 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
stub_env('STEP', step.to_s) if step
end
describe 'migrate' do
subject(:migration) { run_rake_task('gitlab:clickhouse:migrate') }
context 'with real migrations' do
let(:migrations_dir) { File.expand_path(rails_root_join('db', 'click_house', 'migrate')) }
around do |example|
before do
ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
example.run
clear_consts(expand_fixture_path(migrations_base_dir))
end
describe 'when creating a table' do
let(:migrations_dirname) { 'plain_table_creation' }
it 'runs migrations and rollbacks' do
expect { run_rake_task('gitlab:clickhouse:migrate') }.to change { active_schema_migrations_count }.from(0)
.and output.to_stdout
it 'creates a table' do
expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
.and output.to_stdout
expect { run_rake_task('gitlab:clickhouse:rollback') }.to change { active_schema_migrations_count }.by(-1)
.and output.to_stdout
expect(describe_table('some')).to match({
id: a_hash_including(type: 'UInt64'),
date: a_hash_including(type: 'Date')
})
stub_env('VERSION', 0)
expect { run_rake_task('gitlab:clickhouse:rollback') }.to change { active_schema_migrations_count }.to(0)
end
end
context 'with migration fixtures' do
let(:migrations_base_dir) { 'click_house/migrations' }
let(:migrations_dirname) { 'undefined' }
let(:migrations_dir) { expand_fixture_path("#{migrations_base_dir}/#{migrations_dirname}") }
describe 'migrate' do
subject(:migration) { run_rake_task('gitlab:clickhouse:migrate') }
around do |example|
ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
example.run
clear_consts(expand_fixture_path(migrations_base_dir))
end
context 'when VERBOSE is false' do
let(:verbose) { 'false' }
describe 'when creating a table' do
let(:migrations_dirname) { 'plain_table_creation' }
it 'does not write to stdout' do
expect { migration }.not_to output.to_stdout
it 'creates a table' do
expect { migration }.to change { active_schema_migrations_count }.from(0).to(1)
.and output.to_stdout
expect(describe_table('some')).to match({
id: a_hash_including(type: 'UInt64'),
date: a_hash_including(type: 'Date')
})
end
context 'when VERBOSE is false' do
let(:verbose) { 'false' }
it 'does not write to stdout' do
expect { migration }.not_to output.to_stdout
expect(describe_table('some')).to match({
id: a_hash_including(type: 'UInt64'),
date: a_hash_including(type: 'Date')
})
end
end
end
describe 'when dropping a table' do
let(:migrations_dirname) { 'drop_table' }
context 'with VERSION set' do
let(:target_version) { 2 }
it 'drops table' do
stub_env('VERSION', 1)
run_rake_task('gitlab:clickhouse:migrate')
expect(table_names).to include('some')
stub_env('VERSION', target_version)
migration
expect(table_names).not_to include('some')
end
context 'with STEP also set' do
let(:step) { 1 }
it 'ignores STEP and executes both migrations' do
migration
expect(table_names).not_to include('some')
end
end
end
context 'with STEP set to 1' do
let(:step) { 1 }
it 'executes only first step and creates table' do
migration
expect(table_names).to include('some')
end
end
context 'with STEP set to 0' do
let(:step) { 0 }
it 'executes only first step and creates table' do
expect { migration }.to raise_error ArgumentError, 'STEP should be a positive number'
end
end
context 'with STEP set to not-a-number' do
let(:step) { 'NaN' }
it 'raises an error' do
expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): "NaN"'
end
end
context 'with STEP set to empty string' do
let(:step) { '' }
it 'raises an error' do
expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): ""'
end
end
end
context 'with VERSION is invalid' do
let(:migrations_dirname) { 'plain_table_creation' }
let(:target_version) { 'invalid' }
it { expect { migration }.to raise_error RuntimeError, 'Invalid format of target version: `VERSION=invalid`' }
end
end
describe 'when dropping a table' do
let(:migrations_dirname) { 'drop_table' }
describe 'rollback' do
subject(:migration) { run_rake_task('gitlab:clickhouse:rollback') }
let(:migrations_dirname) { 'table_creation_with_down_method' }
around do |example|
ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
# Ensure we start with all migrations up
schema_migration = ClickHouse::MigrationSupport::SchemaMigration
migrate(ClickHouse::MigrationSupport::MigrationContext.new(migrations_dir, schema_migration), nil)
example.run
clear_consts(expand_fixture_path(migrations_base_dir))
end
context 'with VERSION set' do
let(:target_version) { 2 }
context 'when migrating back all the way to 0' do
let(:target_version) { 0 }
it 'drops table' do
stub_env('VERSION', 1)
run_rake_task('gitlab:clickhouse:migrate')
it 'rolls back all migrations' do
expect(table_names).to include('some', 'another')
expect(table_names).to include('some')
stub_env('VERSION', target_version)
migration
expect(table_names).not_to include('some')
end
context 'with STEP also set' do
let(:step) { 1 }
it 'ignores STEP and executes both migrations' do
migration
expect(table_names).not_to include('some', 'another')
end
expect(table_names).not_to include('some')
context 'with STEP also set' do
let(:step) { 1 }
it 'ignores STEP and rolls back all migrations' do
expect(table_names).to include('some', 'another')
migration
expect(table_names).not_to include('some', 'another')
end
end
end
end
@ -94,95 +198,13 @@ RSpec.describe 'gitlab:clickhouse', click_house: :without_migrations, feature_ca
context 'with STEP set to 1' do
let(:step) { 1 }
it 'executes only first step and creates table' do
migration
it 'executes only first step and drops "another" table' do
run_rake_task('gitlab:clickhouse:rollback')
expect(table_names).to include('some')
expect(table_names).not_to include('another')
end
end
context 'with STEP set to 0' do
let(:step) { 0 }
it 'executes only first step and creates table' do
expect { migration }.to raise_error ArgumentError, 'STEP should be a positive number'
end
end
context 'with STEP set to not-a-number' do
let(:step) { 'NaN' }
it 'raises an error' do
expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): "NaN"'
end
end
context 'with STEP set to empty string' do
let(:step) { '' }
it 'raises an error' do
expect { migration }.to raise_error ArgumentError, 'invalid value for Integer(): ""'
end
end
end
context 'with VERSION is invalid' do
let(:migrations_dirname) { 'plain_table_creation' }
let(:target_version) { 'invalid' }
it { expect { migration }.to raise_error RuntimeError, 'Invalid format of target version: `VERSION=invalid`' }
end
end
describe 'rollback' do
subject(:migration) { run_rake_task('gitlab:clickhouse:rollback') }
let(:migrations_dirname) { 'table_creation_with_down_method' }
around do |example|
ClickHouse::MigrationSupport::Migrator.migrations_paths = [migrations_dir]
# Ensure we start with all migrations up
schema_migration = ClickHouse::MigrationSupport::SchemaMigration
migrate(ClickHouse::MigrationSupport::MigrationContext.new(migrations_dir, schema_migration), nil)
example.run
clear_consts(expand_fixture_path(migrations_base_dir))
end
context 'with VERSION set' do
context 'when migrating back all the way to 0' do
let(:target_version) { 0 }
it 'rolls back all migrations' do
expect(table_names).to include('some', 'another')
migration
expect(table_names).not_to include('some', 'another')
end
context 'with STEP also set' do
let(:step) { 1 }
it 'ignores STEP and rolls back all migrations' do
expect(table_names).to include('some', 'another')
migration
expect(table_names).not_to include('some', 'another')
end
end
end
end
context 'with STEP set to 1' do
let(:step) { 1 }
it 'executes only first step and drops "another" table' do
run_rake_task('gitlab:clickhouse:rollback')
expect(table_names).to include('some')
expect(table_names).not_to include('another')
end
end
end

View File

@ -5,11 +5,14 @@ require 'spec_helper'
RSpec.describe Pages::DeactivatedDeploymentsDeleteCronWorker, feature_category: :pages do
subject(:worker) { described_class.new }
it 'deletes all deactivated pages deployments' do
create(:pages_deployment) # active
create(:pages_deployment, deleted_at: 3.minutes.ago) # deactivated
create(:pages_deployment, path_prefix: 'other', deleted_at: 3.minutes.ago) # deactivated
let!(:pages_deployment) { create(:pages_deployment) }
let!(:deactivated_pages_deployment) { create(:pages_deployment, deleted_at: 3.minutes.ago) }
let!(:alt_deactivated_pages_deployment) { create(:pages_deployment, path_prefix: 'other', deleted_at: 3.minutes.ago) }
it 'deletes all deactivated pages deployments and their files from the filesystem' do
file_paths = [deactivated_pages_deployment.file.path, alt_deactivated_pages_deployment.file.path]
expect { worker.perform }.to change { PagesDeployment.count }.by(-2)
.and change { (file_paths.any? { |path| File.exist?(path) }) }.from(true).to(false)
end
end