mirror of https://github.com/grafana/grafana.git
Update tests
This commit is contained in:
parent
88151db5f8
commit
50f666bc7a
|
@ -1,6 +1,7 @@
|
|||
import saveAs from 'file-saver';
|
||||
|
||||
import { dataFrameFromJSON, DataFrameJSON, dateTimeFormat, FieldType, LogRowModel, LogsMetaKind } from '@grafana/data';
|
||||
import { createLogRow } from 'app/features/logs/components/mocks/logRow';
|
||||
|
||||
import { downloadAsJson, downloadDataFrameAsCsv, downloadLogsModelAsTxt } from './download';
|
||||
|
||||
|
@ -33,13 +34,13 @@ describe('inspector download', () => {
|
|||
'should, when logsModel is %s and title is %s, resolve in %s',
|
||||
async (dataFrame, title, expected) => {
|
||||
downloadDataFrameAsCsv(dataFrame, title);
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
// By default the BOM character should not be included
|
||||
expect(await getBomType(blob)).toBeUndefined();
|
||||
expect(blob instanceof Blob ? await getBomType(blob) : undefined).toBeUndefined();
|
||||
expect(text).toEqual(expected);
|
||||
expect(filename).toEqual(`${title}-data-${dateTimeFormat(1400000000000)}.csv`);
|
||||
}
|
||||
|
@ -48,15 +49,18 @@ describe('inspector download', () => {
|
|||
it('should use \t as the delimiter and the file should be utf16le if excelCompatibilityMode is true', async () => {
|
||||
downloadDataFrameAsCsv(dataFrameFromJSON(json), 'test', undefined, undefined, true);
|
||||
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(await getBomType(blob)).toBe('utf-16le');
|
||||
expect(blob.type).toBe('text/csv;charset=utf-16le');
|
||||
if (blob instanceof Blob) {
|
||||
expect(await getBomType(blob)).toBe('utf-16le');
|
||||
expect(blob.type).toBe('text/csv;charset=utf-16le');
|
||||
}
|
||||
expect(text).toEqual('"time"\t"name"\t"value"\r\n100\tÅäö中文العربية\t1');
|
||||
expect(filename).toEqual(`test-data-${dateTimeFormat(1400000000000)}.csv`);
|
||||
expect.assertions(4);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -67,10 +71,10 @@ describe('inspector download', () => {
|
|||
[{ foo: 'bar' }, 'test', '{"foo":"bar"}'],
|
||||
])('should, when logsModel is %s and title is %s, resolve in %s', async (logsModel, title, expected) => {
|
||||
downloadAsJson(logsModel, title);
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(text).toEqual(expected);
|
||||
expect(filename).toEqual(`${title}-${dateTimeFormat(1400000000000)}.json`);
|
||||
|
@ -110,10 +114,10 @@ describe('inspector download', () => {
|
|||
],
|
||||
])('should, when logsModel is %s and title is %s, resolve in %s', async (logsModel, title, expected) => {
|
||||
downloadLogsModelAsTxt(logsModel, title);
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const filename = call[1];
|
||||
const text = await blob.text();
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(text).toEqual(expected);
|
||||
expect(filename).toEqual(`${title}-logs-${dateTimeFormat(1400000000000)}.txt`);
|
||||
|
@ -121,10 +125,32 @@ describe('inspector download', () => {
|
|||
|
||||
it('should, when title is empty, resolve in %s', async () => {
|
||||
downloadLogsModelAsTxt({ meta: [], rows: [] });
|
||||
const call = (saveAs as unknown as jest.Mock).mock.calls[0];
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const filename = call[1];
|
||||
expect(filename).toEqual(`Logs-${dateTimeFormat(1400000000000)}.txt`);
|
||||
});
|
||||
|
||||
it('should, when title is empty, resolve in %s', async () => {
|
||||
downloadLogsModelAsTxt({ meta: [], rows: [] });
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const filename = call[1];
|
||||
expect(filename).toEqual(`Logs-${dateTimeFormat(1400000000000)}.txt`);
|
||||
});
|
||||
|
||||
it('should should download selected fields', async () => {
|
||||
const logsModel = {
|
||||
meta: [],
|
||||
rows: [
|
||||
createLogRow({ timeEpochMs: 100, entry: 'testEntry', labels: { label: 'value', otherLabel: 'other value' } }),
|
||||
],
|
||||
};
|
||||
downloadLogsModelAsTxt(logsModel, undefined, ['label', 'otherLabel']);
|
||||
const call = jest.mocked(saveAs).mock.calls[0];
|
||||
const blob = call[0];
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(text).toContain('value other value');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import saveAs from 'file-saver';
|
||||
|
||||
import {
|
||||
AbsoluteTimeRange,
|
||||
FieldType,
|
||||
|
@ -11,6 +13,7 @@ import {
|
|||
} from '@grafana/data';
|
||||
import { getMockFrames } from 'app/plugins/datasource/loki/mocks/frames';
|
||||
|
||||
import { createLogRow } from './components/mocks/logRow';
|
||||
import { logSeriesToLogsModel } from './logsModel';
|
||||
import {
|
||||
calculateLogsLabelStats,
|
||||
|
@ -25,8 +28,12 @@ import {
|
|||
mergeLogsVolumeDataFrames,
|
||||
sortLogsResult,
|
||||
checkLogsSampled,
|
||||
downloadLogs,
|
||||
DownloadFormat,
|
||||
} from './utils';
|
||||
|
||||
jest.mock('file-saver', () => jest.fn());
|
||||
|
||||
describe('getLoglevel()', () => {
|
||||
it('returns no log level on empty line', () => {
|
||||
expect(getLogLevel('')).toBe(LogLevel.unknown);
|
||||
|
@ -588,3 +595,66 @@ describe('findMatchingRow', () => {
|
|||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('downloadLogs', () => {
|
||||
const logs = [
|
||||
createLogRow({ timeEpochMs: 100, entry: 'test entry', labels: { label: 'value', otherLabel: 'other value' } }),
|
||||
];
|
||||
describe('Text format', () => {
|
||||
beforeEach(() => {
|
||||
jest.mocked(saveAs).mockClear();
|
||||
});
|
||||
|
||||
it('Downloads logs in txt format', async () => {
|
||||
downloadLogs(DownloadFormat.Text, logs);
|
||||
|
||||
const blob = jest.mocked(saveAs).mock.calls[0][0];
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(text).toContain('test entry');
|
||||
});
|
||||
|
||||
it('Downloads selected fields in txt format', async () => {
|
||||
downloadLogs(DownloadFormat.Text, logs, [], ['label', 'otherLabel']);
|
||||
|
||||
const blob = jest.mocked(saveAs).mock.calls[0][0];
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(text).toContain('value other value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('JSON format', () => {
|
||||
beforeEach(() => {
|
||||
jest.mocked(saveAs).mockClear();
|
||||
});
|
||||
|
||||
it('Downloads logs in JSON format', async () => {
|
||||
downloadLogs(DownloadFormat.Json, logs);
|
||||
|
||||
const blob = jest.mocked(saveAs).mock.calls[0][0];
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(JSON.parse(text)[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
line: 'test entry',
|
||||
fields: { label: 'value', otherLabel: 'other value' },
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('Downloads selected fields in JSON format', async () => {
|
||||
downloadLogs(DownloadFormat.Json, logs, [], ['otherLabel']);
|
||||
|
||||
const blob = jest.mocked(saveAs).mock.calls[0][0];
|
||||
const text = typeof blob === 'string' ? blob : await blob.text();
|
||||
|
||||
expect(JSON.parse(text)[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
line: 'test entry',
|
||||
fields: { otherLabel: 'other value' },
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue