mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Lens][LogsDB] Add test suite (#194031)
## Summary Test suite for LogsDB in Kibana Adding tests for the following scenarios: * [x] All Lens operations (aggs) should work fine with a LogsDB stream * Mixed index scenarios * [x] LogsDB stream (upgraded from regular stream) * [x] LogsDB stream without `host.name` field in the mapping * [x] LogsDB stream (upgraded from regular stream) + regular index * [x] LogsDB stream (upgraded from regular stream) + TSDB stream * [x] LogsDB stream (upgraded from regular stream) + another LogsDB stream * [x] LogsDB stream (upgraded from regular stream) + TSDB stream downsampled * [x] Data stream (downgraded from LogsDB stream) * [x] Data stream without `host.name` field in the mapping * [x] Data stream (downgraded from LogsDB stream) + regular index * [x] Data stream (downgraded from LogsDB stream) + TSDB stream * [x] Data stream (downgraded from LogsDB stream) + another LogsDB stream * [x] Data stream (downgraded from LogsDB stream) + TSDB stream downsampled For the mixed scenario suite the following tests are performed: * Create a date histogram vs `count(bytes)` * same test but using the `utc_time` field rather than `@timestamp` * Create a chart with an annotation layer * same test but using the `utc_time` for both data and annotation layer * use the `host.name`, `utc_time`, `@timestamp` as extra field in the annotation tooltip * Create a visualization with ES|QL using `STATS` --------- Co-authored-by: dej611 <dej611@gmail.com>
This commit is contained in:
parent
c4e2a7256b
commit
34fb654303
12 changed files with 2552 additions and 933 deletions
Binary file not shown.
|
@ -0,0 +1,171 @@
|
|||
{
|
||||
"type": "index",
|
||||
"value": {
|
||||
"aliases": {},
|
||||
"index": "kibana_sample_data_logslogsdb",
|
||||
"mappings": {
|
||||
"_data_stream_timestamp": {
|
||||
"enabled": true
|
||||
},
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"agent": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"bytes": {
|
||||
"type": "long"
|
||||
},
|
||||
"bytes_counter": {
|
||||
"time_series_metric": "counter",
|
||||
"type": "long"
|
||||
},
|
||||
"bytes_gauge": {
|
||||
"time_series_metric": "gauge",
|
||||
"type": "long"
|
||||
},
|
||||
"clientip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"event": {
|
||||
"properties": {
|
||||
"dataset": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"extension": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"geo": {
|
||||
"properties": {
|
||||
"coordinates": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"dest": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"src": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"srcdest": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"host": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"index": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"ip": {
|
||||
"type": "ip"
|
||||
},
|
||||
"machine": {
|
||||
"properties": {
|
||||
"os": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"ram": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"memory": {
|
||||
"type": "double"
|
||||
},
|
||||
"message": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"phpmemory": {
|
||||
"type": "long"
|
||||
},
|
||||
"referer": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"request": {
|
||||
"time_series_dimension": true,
|
||||
"type": "keyword"
|
||||
},
|
||||
"response": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"tags": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"ignore_above": 256,
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "text"
|
||||
},
|
||||
"timestamp": {
|
||||
"path": "@timestamp",
|
||||
"type": "alias"
|
||||
},
|
||||
"url": {
|
||||
"time_series_dimension": true,
|
||||
"type": "keyword"
|
||||
},
|
||||
"utc_time": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"index": {
|
||||
"auto_expand_replicas": "0-1",
|
||||
"mode": "time_series",
|
||||
"number_of_replicas": "0",
|
||||
"number_of_shards": "1",
|
||||
"routing_path": "request",
|
||||
"time_series": {
|
||||
"end_time": "2023-06-28T09:17:00.283Z",
|
||||
"start_time": "2023-03-28T09:17:00.283Z"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"attributes": {
|
||||
"fieldFormatMap": "{\"hour_of_day\":{}}",
|
||||
"name": "Kibana Sample Data Logs (LogsDB)",
|
||||
"runtimeFieldMap": "{\"hour_of_day\":{\"type\":\"long\",\"script\":{\"source\":\"emit(doc['timestamp'].value.getHour());\"}}}",
|
||||
"timeFieldName": "timestamp",
|
||||
"title": "kibana_sample_data_logslogsdb"
|
||||
},
|
||||
"coreMigrationVersion": "8.8.0",
|
||||
"created_at": "2023-04-27T13:09:20.333Z",
|
||||
"id": "90943e30-9a47-11e8-b64d-95841ca0c247",
|
||||
"managed": false,
|
||||
"references": [],
|
||||
"sort": [
|
||||
1682600960333,
|
||||
64
|
||||
],
|
||||
"type": "index-pattern",
|
||||
"typeMigrationVersion": "7.11.0",
|
||||
"updated_at": "2023-04-27T13:09:20.333Z",
|
||||
"version": "WzIxLDFd"
|
||||
}
|
|
@ -82,5 +82,6 @@ export default ({ getService, loadTestFile, getPageObjects }: FtrProviderContext
|
|||
loadTestFile(require.resolve('./share')); // 1m 20s
|
||||
// keep it last in the group
|
||||
loadTestFile(require.resolve('./tsdb')); // 1m
|
||||
loadTestFile(require.resolve('./logsdb')); // 1m
|
||||
});
|
||||
};
|
||||
|
|
586
x-pack/test/functional/apps/lens/group4/logsdb.ts
Normal file
586
x-pack/test/functional/apps/lens/group4/logsdb.ts
Normal file
|
@ -0,0 +1,586 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import moment from 'moment';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
import {
|
||||
type ScenarioIndexes,
|
||||
getDataMapping,
|
||||
getDocsGenerator,
|
||||
setupScenarioRunner,
|
||||
TIME_PICKER_FORMAT,
|
||||
} from './tsdb_logsdb_helpers';
|
||||
|
||||
export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
||||
const { common, lens, discover, header } = getPageObjects([
|
||||
'common',
|
||||
'lens',
|
||||
'discover',
|
||||
'header',
|
||||
]);
|
||||
const testSubjects = getService('testSubjects');
|
||||
const find = getService('find');
|
||||
const kibanaServer = getService('kibanaServer');
|
||||
const es = getService('es');
|
||||
const log = getService('log');
|
||||
const dataStreams = getService('dataStreams');
|
||||
const indexPatterns = getService('indexPatterns');
|
||||
const esArchiver = getService('esArchiver');
|
||||
const monacoEditor = getService('monacoEditor');
|
||||
const retry = getService('retry');
|
||||
|
||||
const createDocs = getDocsGenerator(log, es, 'logsdb');
|
||||
|
||||
describe('lens logsdb', function () {
|
||||
const logsdbIndex = 'kibana_sample_data_logslogsdb';
|
||||
const logsdbDataView = logsdbIndex;
|
||||
const logsdbEsArchive = 'test/functional/fixtures/es_archiver/kibana_sample_data_logs_logsdb';
|
||||
const fromTime = 'Apr 16, 2023 @ 00:00:00.000';
|
||||
const toTime = 'Jun 16, 2023 @ 00:00:00.000';
|
||||
|
||||
before(async () => {
|
||||
log.info(`loading ${logsdbIndex} index...`);
|
||||
await esArchiver.loadIfNeeded(logsdbEsArchive);
|
||||
log.info(`creating a data view for "${logsdbDataView}"...`);
|
||||
await indexPatterns.create(
|
||||
{
|
||||
title: logsdbDataView,
|
||||
timeFieldName: '@timestamp',
|
||||
},
|
||||
{ override: true }
|
||||
);
|
||||
log.info(`updating settings to use the "${logsdbDataView}" dataView...`);
|
||||
await kibanaServer.uiSettings.update({
|
||||
'dateFormat:tz': 'UTC',
|
||||
defaultIndex: '0ae0bc7a-e4ca-405c-ab67-f2b5913f2a51',
|
||||
'timepicker:timeDefaults': `{ "from": "${fromTime}", "to": "${toTime}" }`,
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await kibanaServer.savedObjects.cleanStandardList();
|
||||
await kibanaServer.uiSettings.replace({});
|
||||
await es.indices.delete({ index: [logsdbIndex] });
|
||||
});
|
||||
|
||||
describe('smoke testing functions support', () => {
|
||||
before(async () => {
|
||||
await common.navigateToApp('lens');
|
||||
await lens.switchDataPanelIndexPattern(logsdbDataView);
|
||||
await lens.goToTimeRange();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await lens.removeLayer();
|
||||
});
|
||||
|
||||
// skip count for now as it's a special function and will
|
||||
// change automatically the unsupported field to Records when detected
|
||||
const allOperations = [
|
||||
'average',
|
||||
'max',
|
||||
'last_value',
|
||||
'median',
|
||||
'percentile',
|
||||
'percentile_rank',
|
||||
'standard_deviation',
|
||||
'sum',
|
||||
'unique_count',
|
||||
'min',
|
||||
'max',
|
||||
'counter_rate',
|
||||
'last_value',
|
||||
];
|
||||
|
||||
it(`should work with all operations`, async () => {
|
||||
// start from a count() over a date histogram
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// minimum supports all logsdb field types
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'count',
|
||||
field: 'bytes',
|
||||
keepOpen: true,
|
||||
});
|
||||
|
||||
// now check that operations won't show the incompatibility tooltip
|
||||
for (const operation of allOperations) {
|
||||
expect(
|
||||
testSubjects.exists(`lns-indexPatternDimension-${operation} incompatible`, {
|
||||
timeout: 500,
|
||||
})
|
||||
).to.eql(false);
|
||||
}
|
||||
|
||||
for (const operation of allOperations) {
|
||||
// try to change to the provided function and check all is ok
|
||||
await lens.selectOperation(operation);
|
||||
|
||||
expect(
|
||||
await find.existsByCssSelector(
|
||||
'[data-test-subj="indexPattern-field-selection-row"] .euiFormErrorText'
|
||||
)
|
||||
).to.be(false);
|
||||
}
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
|
||||
describe('Scenarios with changing stream type', () => {
|
||||
const getScenarios = (
|
||||
initialIndex: string
|
||||
): Array<{
|
||||
name: string;
|
||||
indexes: ScenarioIndexes[];
|
||||
}> => [
|
||||
{
|
||||
name: 'LogsDB stream with no additional stream/index',
|
||||
indexes: [{ index: initialIndex }],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with no additional stream/index and no host.name field',
|
||||
indexes: [
|
||||
{
|
||||
index: `${initialIndex}_no_host`,
|
||||
removeLogsDBFields: true,
|
||||
create: true,
|
||||
mode: 'logsdb',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional regular index',
|
||||
indexes: [{ index: initialIndex }, { index: 'regular_index', create: true }],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional LogsDB stream',
|
||||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'logsdb_index_2', create: true, mode: 'logsdb' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional TSDB stream',
|
||||
indexes: [{ index: initialIndex }, { index: 'tsdb_index', create: true, mode: 'tsdb' }],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional TSDB stream downsampled',
|
||||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'tsdb_index_downsampled', create: true, mode: 'tsdb', downsample: true },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const { runTestsForEachScenario, toTimeForScenarios, fromTimeForScenarios } =
|
||||
setupScenarioRunner(getService, getPageObjects, getScenarios);
|
||||
|
||||
describe('Data-stream upgraded to LogsDB scenarios', () => {
|
||||
const streamIndex = 'data_stream';
|
||||
// rollover does not allow to change name, it will just change backing index underneath
|
||||
const streamConvertedToLogsDBIndex = streamIndex;
|
||||
|
||||
before(async () => {
|
||||
log.info(`Creating "${streamIndex}" data stream...`);
|
||||
await dataStreams.createDataStream(
|
||||
streamIndex,
|
||||
getDataMapping({ mode: 'logsdb' }),
|
||||
undefined
|
||||
);
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(streamIndex, { isStream: true }, fromTimeForScenarios);
|
||||
|
||||
log.info(`Update settings for "${streamIndex}" dataView...`);
|
||||
await kibanaServer.uiSettings.update({
|
||||
'dateFormat:tz': 'UTC',
|
||||
'timepicker:timeDefaults': '{ "from": "now-1y", "to": "now" }',
|
||||
});
|
||||
log.info(`Upgrade "${streamIndex}" stream to LogsDB...`);
|
||||
|
||||
const logsdbMapping = getDataMapping({ mode: 'logsdb' });
|
||||
await dataStreams.upgradeStream(streamIndex, logsdbMapping, 'logsdb');
|
||||
log.info(
|
||||
`Add more data to new "${streamConvertedToLogsDBIndex}" dataView (now with LogsDB backing index)...`
|
||||
);
|
||||
// add some more data when upgraded
|
||||
await createDocs(streamConvertedToLogsDBIndex, { isStream: true }, toTimeForScenarios);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await dataStreams.deleteDataStream(streamIndex);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(streamConvertedToLogsDBIndex, 'logsdb', (indexes) => {
|
||||
it(`should visualize a date histogram chart`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// check that a basic agg on a field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it(`should visualize a date histogram chart using a different date field`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: '@timestamp',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', 'utc_time'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream using another time field', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: 'utc_time',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', '@timestamp'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize correctly ES|QL queries based on a LogsDB stream', async () => {
|
||||
await common.navigateToApp('discover');
|
||||
await discover.selectTextBaseLang();
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await monacoEditor.setCodeEditorValue(
|
||||
`from ${indexes
|
||||
.map(({ index }) => index)
|
||||
.join(', ')} | stats averageB = avg(bytes) by extension`
|
||||
);
|
||||
await testSubjects.click('querySubmitButton');
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await testSubjects.click('unifiedHistogramEditFlyoutVisualization');
|
||||
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
|
||||
await retry.waitFor('lens flyout', async () => {
|
||||
const dimensions = await testSubjects.findAll('lns-dimensionTrigger-textBased');
|
||||
return (
|
||||
dimensions.length === 2 && (await dimensions[1].getVisibleText()) === 'averageB'
|
||||
);
|
||||
});
|
||||
|
||||
// go back to Lens to not break the wrapping function
|
||||
await common.navigateToApp('lens');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('LogsDB downgraded to regular data stream scenarios', () => {
|
||||
const logsdbStream = 'logsdb_stream_dowgradable';
|
||||
// rollover does not allow to change name, it will just change backing index underneath
|
||||
const logsdbConvertedToStream = logsdbStream;
|
||||
|
||||
before(async () => {
|
||||
log.info(`Creating "${logsdbStream}" data stream...`);
|
||||
await dataStreams.createDataStream(
|
||||
logsdbStream,
|
||||
getDataMapping({ mode: 'logsdb' }),
|
||||
'logsdb'
|
||||
);
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(logsdbStream, { isStream: true }, fromTimeForScenarios);
|
||||
|
||||
log.info(`Update settings for "${logsdbStream}" dataView...`);
|
||||
await kibanaServer.uiSettings.update({
|
||||
'dateFormat:tz': 'UTC',
|
||||
'timepicker:timeDefaults': '{ "from": "now-1y", "to": "now" }',
|
||||
});
|
||||
log.info(
|
||||
`Dowgrade "${logsdbStream}" stream into regular stream "${logsdbConvertedToStream}"...`
|
||||
);
|
||||
|
||||
await dataStreams.downgradeStream(
|
||||
logsdbStream,
|
||||
getDataMapping({ mode: 'logsdb' }),
|
||||
'logsdb'
|
||||
);
|
||||
log.info(
|
||||
`Add more data to new "${logsdbConvertedToStream}" dataView (no longer LogsDB)...`
|
||||
);
|
||||
// add some more data when upgraded
|
||||
await createDocs(logsdbConvertedToStream, { isStream: true }, toTimeForScenarios);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await dataStreams.deleteDataStream(logsdbConvertedToStream);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(logsdbConvertedToStream, 'logsdb', (indexes) => {
|
||||
it(`should visualize a date histogram chart`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// check that a basic agg on a field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it(`should visualize a date histogram chart using a different date field`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: '@timestamp',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', 'utc_time'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream using another time field', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: 'utc_time',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', '@timestamp'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize correctly ES|QL queries based on a LogsDB stream', async () => {
|
||||
await common.navigateToApp('discover');
|
||||
await discover.selectTextBaseLang();
|
||||
|
||||
// Use the lens page object here also for discover: both use the same timePicker object
|
||||
await lens.goToTimeRange(
|
||||
fromTimeForScenarios,
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(2, 'hour')
|
||||
.format(TIME_PICKER_FORMAT)
|
||||
);
|
||||
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await monacoEditor.setCodeEditorValue(
|
||||
`from ${indexes
|
||||
.map(({ index }) => index)
|
||||
.join(', ')} | stats averageB = avg(bytes) by extension`
|
||||
);
|
||||
await testSubjects.click('querySubmitButton');
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await testSubjects.click('unifiedHistogramEditFlyoutVisualization');
|
||||
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
|
||||
await retry.waitFor('lens flyout', async () => {
|
||||
const dimensions = await testSubjects.findAll('lns-dimensionTrigger-textBased');
|
||||
return (
|
||||
dimensions.length === 2 && (await dimensions[1].getVisibleText()) === 'averageB'
|
||||
);
|
||||
});
|
||||
|
||||
// go back to Lens to not break the wrapping function
|
||||
await common.navigateToApp('lens');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -8,234 +8,16 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { partition } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { MappingProperty } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
|
||||
const TEST_DOC_COUNT = 100;
|
||||
const TIME_PICKER_FORMAT = 'MMM D, YYYY [@] HH:mm:ss.SSS';
|
||||
const timeSeriesMetrics: Record<string, 'gauge' | 'counter'> = {
|
||||
bytes_gauge: 'gauge',
|
||||
bytes_counter: 'counter',
|
||||
};
|
||||
const timeSeriesDimensions = ['request', 'url'];
|
||||
|
||||
type TestDoc = Record<string, string | string[] | number | null | Record<string, unknown>>;
|
||||
|
||||
const testDocTemplate: TestDoc = {
|
||||
agent: 'Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1',
|
||||
bytes: 6219,
|
||||
clientip: '223.87.60.27',
|
||||
extension: 'deb',
|
||||
geo: {
|
||||
srcdest: 'US:US',
|
||||
src: 'US',
|
||||
dest: 'US',
|
||||
coordinates: { lat: 39.41042861, lon: -88.8454325 },
|
||||
},
|
||||
host: 'artifacts.elastic.co',
|
||||
index: 'kibana_sample_data_logs',
|
||||
ip: '223.87.60.27',
|
||||
machine: { ram: 8589934592, os: 'win 8' },
|
||||
memory: null,
|
||||
message:
|
||||
'223.87.60.27 - - [2018-07-22T00:39:02.912Z] "GET /elasticsearch/elasticsearch-6.3.2.deb_1 HTTP/1.1" 200 6219 "-" "Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1"',
|
||||
phpmemory: null,
|
||||
referer: 'http://twitter.com/success/wendy-lawrence',
|
||||
request: '/elasticsearch/elasticsearch-6.3.2.deb',
|
||||
response: 200,
|
||||
tags: ['success', 'info'],
|
||||
'@timestamp': '2018-07-22T00:39:02.912Z',
|
||||
url: 'https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.3.2.deb_1',
|
||||
utc_time: '2018-07-22T00:39:02.912Z',
|
||||
event: { dataset: 'sample_web_logs' },
|
||||
bytes_gauge: 0,
|
||||
bytes_counter: 0,
|
||||
};
|
||||
|
||||
function getDataMapping(
|
||||
{ tsdb, removeTSDBFields }: { tsdb: boolean; removeTSDBFields?: boolean } = {
|
||||
tsdb: false,
|
||||
}
|
||||
): Record<string, MappingProperty> {
|
||||
const dataStreamMapping: Record<string, MappingProperty> = {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
},
|
||||
agent: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
bytes: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_counter: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_gauge: {
|
||||
type: 'long',
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip',
|
||||
},
|
||||
event: {
|
||||
properties: {
|
||||
dataset: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
extension: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
geo: {
|
||||
properties: {
|
||||
coordinates: {
|
||||
type: 'geo_point',
|
||||
},
|
||||
dest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
src: {
|
||||
type: 'keyword',
|
||||
},
|
||||
srcdest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
host: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
index: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ip: {
|
||||
type: 'ip',
|
||||
},
|
||||
machine: {
|
||||
properties: {
|
||||
os: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ram: {
|
||||
type: 'long',
|
||||
},
|
||||
},
|
||||
},
|
||||
memory: {
|
||||
type: 'double',
|
||||
},
|
||||
message: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
phpmemory: {
|
||||
type: 'long',
|
||||
},
|
||||
referer: {
|
||||
type: 'keyword',
|
||||
},
|
||||
request: {
|
||||
type: 'keyword',
|
||||
},
|
||||
response: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
tags: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
timestamp: {
|
||||
path: '@timestamp',
|
||||
type: 'alias',
|
||||
},
|
||||
url: {
|
||||
type: 'keyword',
|
||||
},
|
||||
utc_time: {
|
||||
type: 'date',
|
||||
},
|
||||
};
|
||||
|
||||
if (tsdb) {
|
||||
// augment the current mapping
|
||||
for (const [fieldName, fieldMapping] of Object.entries(dataStreamMapping || {})) {
|
||||
if (
|
||||
timeSeriesMetrics[fieldName] &&
|
||||
(fieldMapping.type === 'double' || fieldMapping.type === 'long')
|
||||
) {
|
||||
fieldMapping.time_series_metric = timeSeriesMetrics[fieldName];
|
||||
}
|
||||
|
||||
if (timeSeriesDimensions.includes(fieldName) && fieldMapping.type === 'keyword') {
|
||||
fieldMapping.time_series_dimension = true;
|
||||
}
|
||||
}
|
||||
} else if (removeTSDBFields) {
|
||||
for (const fieldName of Object.keys(timeSeriesMetrics)) {
|
||||
delete dataStreamMapping[fieldName];
|
||||
}
|
||||
}
|
||||
return dataStreamMapping;
|
||||
}
|
||||
|
||||
function sumFirstNValues(n: number, bars: Array<{ y: number }> | undefined): number {
|
||||
const indexes = Array(n)
|
||||
.fill(1)
|
||||
.map((_, i) => i);
|
||||
let countSum = 0;
|
||||
for (const index of indexes) {
|
||||
if (bars?.[index]) {
|
||||
countSum += bars[index].y;
|
||||
}
|
||||
}
|
||||
return countSum;
|
||||
}
|
||||
import {
|
||||
type ScenarioIndexes,
|
||||
TEST_DOC_COUNT,
|
||||
TIME_PICKER_FORMAT,
|
||||
getDataMapping,
|
||||
getDocsGenerator,
|
||||
setupScenarioRunner,
|
||||
sumFirstNValues,
|
||||
} from './tsdb_logsdb_helpers';
|
||||
|
||||
export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
||||
const { common, lens, dashboard } = getPageObjects(['common', 'lens', 'dashboard']);
|
||||
|
@ -245,71 +27,11 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
const es = getService('es');
|
||||
const log = getService('log');
|
||||
const dataStreams = getService('dataStreams');
|
||||
const elasticChart = getService('elasticChart');
|
||||
const indexPatterns = getService('indexPatterns');
|
||||
const esArchiver = getService('esArchiver');
|
||||
const comboBox = getService('comboBox');
|
||||
|
||||
const createDocs = async (
|
||||
esIndex: string,
|
||||
{ isStream, removeTSDBFields }: { isStream: boolean; removeTSDBFields?: boolean },
|
||||
startTime: string
|
||||
) => {
|
||||
log.info(
|
||||
`Adding ${TEST_DOC_COUNT} to ${esIndex} with starting time from ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.format(TIME_PICKER_FORMAT)} to ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(2 * TEST_DOC_COUNT, 'seconds')
|
||||
.format(TIME_PICKER_FORMAT)}`
|
||||
);
|
||||
const docs = Array<TestDoc>(TEST_DOC_COUNT)
|
||||
.fill(testDocTemplate)
|
||||
.map((templateDoc, i) => {
|
||||
const timestamp = moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(TEST_DOC_COUNT + i, 'seconds')
|
||||
.format();
|
||||
const doc: TestDoc = {
|
||||
...templateDoc,
|
||||
'@timestamp': timestamp,
|
||||
utc_time: timestamp,
|
||||
bytes_gauge: Math.floor(Math.random() * 10000 * i),
|
||||
bytes_counter: 5000,
|
||||
};
|
||||
if (removeTSDBFields) {
|
||||
for (const field of Object.keys(timeSeriesMetrics)) {
|
||||
delete doc[field];
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
});
|
||||
|
||||
const result = await es.bulk(
|
||||
{
|
||||
index: esIndex,
|
||||
body: docs.map((d) => `{"${isStream ? 'create' : 'index'}": {}}\n${JSON.stringify(d)}\n`),
|
||||
},
|
||||
{ meta: true }
|
||||
);
|
||||
|
||||
const res = result.body;
|
||||
|
||||
if (res.errors) {
|
||||
const resultsWithErrors = res.items
|
||||
.filter(({ index }) => index?.error)
|
||||
.map(({ index }) => index?.error);
|
||||
for (const error of resultsWithErrors) {
|
||||
log.error(`Error: ${JSON.stringify(error)}`);
|
||||
}
|
||||
const [indexExists, dataStreamExists] = await Promise.all([
|
||||
es.indices.exists({ index: esIndex }),
|
||||
es.indices.getDataStream({ name: esIndex }),
|
||||
]);
|
||||
log.debug(`Index exists: ${indexExists} - Data stream exists: ${dataStreamExists}`);
|
||||
}
|
||||
log.info(`Indexed ${res.items.length} test data docs.`);
|
||||
};
|
||||
const createDocs = getDocsGenerator(log, es, 'tsdb');
|
||||
|
||||
describe('lens tsdb', function () {
|
||||
const tsdbIndex = 'kibana_sample_data_logstsdb';
|
||||
|
@ -592,23 +314,11 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
});
|
||||
|
||||
describe('Scenarios with changing stream type', () => {
|
||||
const now = moment().utc();
|
||||
const fromMoment = now.clone().subtract(1, 'hour');
|
||||
const toMoment = now.clone();
|
||||
const fromTimeForScenarios = fromMoment.format(TIME_PICKER_FORMAT);
|
||||
const toTimeForScenarios = toMoment.format(TIME_PICKER_FORMAT);
|
||||
|
||||
const getScenarios = (
|
||||
initialIndex: string
|
||||
): Array<{
|
||||
name: string;
|
||||
indexes: Array<{
|
||||
index: string;
|
||||
create?: boolean;
|
||||
downsample?: boolean;
|
||||
tsdb?: boolean;
|
||||
removeTSDBFields?: boolean;
|
||||
}>;
|
||||
indexes: ScenarioIndexes[];
|
||||
}> => [
|
||||
{
|
||||
name: 'Dataview with no additional stream/index',
|
||||
|
@ -625,7 +335,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
name: 'Dataview with an additional downsampled TSDB stream',
|
||||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'tsdb_index_2', create: true, tsdb: true, downsample: true },
|
||||
{ index: 'tsdb_index_2', create: true, mode: 'tsdb', downsample: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -633,112 +343,17 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'regular_index', create: true, removeTSDBFields: true },
|
||||
{ index: 'tsdb_index_2', create: true, tsdb: true, downsample: true },
|
||||
{ index: 'tsdb_index_2', create: true, mode: 'tsdb', downsample: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Dataview with an additional TSDB stream',
|
||||
indexes: [{ index: initialIndex }, { index: 'tsdb_index_2', create: true, tsdb: true }],
|
||||
indexes: [{ index: initialIndex }, { index: 'tsdb_index_2', create: true, mode: 'tsdb' }],
|
||||
},
|
||||
];
|
||||
|
||||
function runTestsForEachScenario(
|
||||
initialIndex: string,
|
||||
testingFn: (
|
||||
indexes: Array<{
|
||||
index: string;
|
||||
create?: boolean;
|
||||
downsample?: boolean;
|
||||
tsdb?: boolean;
|
||||
removeTSDBFields?: boolean;
|
||||
}>
|
||||
) => void
|
||||
): void {
|
||||
for (const { name, indexes } of getScenarios(initialIndex)) {
|
||||
describe(name, () => {
|
||||
let dataViewName: string;
|
||||
let downsampledTargetIndex: string = '';
|
||||
|
||||
before(async () => {
|
||||
for (const { index, create, downsample, tsdb, removeTSDBFields } of indexes) {
|
||||
if (create) {
|
||||
if (tsdb) {
|
||||
await dataStreams.createDataStream(
|
||||
index,
|
||||
getDataMapping({ tsdb, removeTSDBFields }),
|
||||
tsdb
|
||||
);
|
||||
} else {
|
||||
log.info(`creating a index "${index}" with mapping...`);
|
||||
await es.indices.create({
|
||||
index,
|
||||
mappings: {
|
||||
properties: getDataMapping({ tsdb: Boolean(tsdb), removeTSDBFields }),
|
||||
},
|
||||
});
|
||||
}
|
||||
// add data to the newly created index
|
||||
await createDocs(
|
||||
index,
|
||||
{ isStream: Boolean(tsdb), removeTSDBFields },
|
||||
fromTimeForScenarios
|
||||
);
|
||||
}
|
||||
if (downsample) {
|
||||
downsampledTargetIndex = await dataStreams.downsampleTSDBIndex(index, {
|
||||
isStream: Boolean(tsdb),
|
||||
});
|
||||
}
|
||||
}
|
||||
dataViewName = `${indexes.map(({ index }) => index).join(',')}${
|
||||
downsampledTargetIndex ? `,${downsampledTargetIndex}` : ''
|
||||
}`;
|
||||
log.info(`creating a data view for "${dataViewName}"...`);
|
||||
await indexPatterns.create(
|
||||
{
|
||||
title: dataViewName,
|
||||
timeFieldName: '@timestamp',
|
||||
},
|
||||
{ override: true }
|
||||
);
|
||||
await common.navigateToApp('lens');
|
||||
await elasticChart.setNewChartUiDebugFlag(true);
|
||||
// go to the
|
||||
await lens.goToTimeRange(
|
||||
fromTimeForScenarios,
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(2, 'hour')
|
||||
.format(TIME_PICKER_FORMAT) // consider also new documents
|
||||
);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
for (const { index, create, tsdb } of indexes) {
|
||||
if (create) {
|
||||
if (tsdb) {
|
||||
await dataStreams.deleteDataStream(index);
|
||||
} else {
|
||||
log.info(`deleting the index "${index}"...`);
|
||||
await es.indices.delete({
|
||||
index,
|
||||
});
|
||||
}
|
||||
}
|
||||
// no need to cleant he specific downsample index as everything linked to the stream
|
||||
// is cleaned up automatically
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await lens.switchDataPanelIndexPattern(dataViewName);
|
||||
await lens.removeLayer();
|
||||
});
|
||||
|
||||
testingFn(indexes);
|
||||
});
|
||||
}
|
||||
}
|
||||
const { runTestsForEachScenario, toTimeForScenarios, fromTimeForScenarios } =
|
||||
setupScenarioRunner(getService, getPageObjects, getScenarios);
|
||||
|
||||
describe('Data-stream upgraded to TSDB scenarios', () => {
|
||||
const streamIndex = 'data_stream';
|
||||
|
@ -747,7 +362,11 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
before(async () => {
|
||||
log.info(`Creating "${streamIndex}" data stream...`);
|
||||
await dataStreams.createDataStream(streamIndex, getDataMapping(), false);
|
||||
await dataStreams.createDataStream(
|
||||
streamIndex,
|
||||
getDataMapping({ mode: 'tsdb' }),
|
||||
undefined
|
||||
);
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(streamIndex, { isStream: true }, fromTimeForScenarios);
|
||||
|
@ -759,8 +378,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
});
|
||||
log.info(`Upgrade "${streamIndex}" stream to TSDB...`);
|
||||
|
||||
const tsdbMapping = getDataMapping({ tsdb: true });
|
||||
await dataStreams.upgradeStreamToTSDB(streamIndex, tsdbMapping);
|
||||
const tsdbMapping = getDataMapping({ mode: 'tsdb' });
|
||||
await dataStreams.upgradeStream(streamIndex, tsdbMapping, 'tsdb');
|
||||
log.info(
|
||||
`Add more data to new "${streamConvertedToTsdbIndex}" dataView (now with TSDB backing index)...`
|
||||
);
|
||||
|
@ -772,7 +391,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await dataStreams.deleteDataStream(streamIndex);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(streamConvertedToTsdbIndex, (indexes) => {
|
||||
runTestsForEachScenario(streamConvertedToTsdbIndex, 'tsdb', (indexes) => {
|
||||
it('should detect the data stream has now been upgraded to TSDB', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
|
@ -850,7 +469,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
before(async () => {
|
||||
log.info(`Creating "${tsdbStream}" data stream...`);
|
||||
await dataStreams.createDataStream(tsdbStream, getDataMapping({ tsdb: true }), true);
|
||||
await dataStreams.createDataStream(tsdbStream, getDataMapping({ mode: 'tsdb' }), 'tsdb');
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(tsdbStream, { isStream: true }, fromTimeForScenarios);
|
||||
|
@ -864,7 +483,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
`Dowgrade "${tsdbStream}" stream into regular stream "${tsdbConvertedToStream}"...`
|
||||
);
|
||||
|
||||
await dataStreams.downgradeTSDBtoStream(tsdbStream, getDataMapping({ tsdb: true }));
|
||||
await dataStreams.downgradeStream(tsdbStream, getDataMapping({ mode: 'tsdb' }), 'tsdb');
|
||||
log.info(`Add more data to new "${tsdbConvertedToStream}" dataView (no longer TSDB)...`);
|
||||
// add some more data when upgraded
|
||||
await createDocs(tsdbConvertedToStream, { isStream: true }, toTimeForScenarios);
|
||||
|
@ -874,7 +493,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await dataStreams.deleteDataStream(tsdbConvertedToStream);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(tsdbConvertedToStream, (indexes) => {
|
||||
runTestsForEachScenario(tsdbConvertedToStream, 'tsdb', (indexes) => {
|
||||
it('should keep TSDB restrictions only if a tsdb stream is in the dataView mix', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
|
@ -893,7 +512,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
testSubjects.exists(`lns-indexPatternDimension-average incompatible`, {
|
||||
timeout: 500,
|
||||
})
|
||||
).to.eql(indexes.some(({ tsdb }) => tsdb));
|
||||
).to.eql(indexes.some(({ mode }) => mode === 'tsdb'));
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
|
||||
|
|
480
x-pack/test/functional/apps/lens/group4/tsdb_logsdb_helpers.ts
Normal file
480
x-pack/test/functional/apps/lens/group4/tsdb_logsdb_helpers.ts
Normal file
|
@ -0,0 +1,480 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import expect from '@kbn/expect';
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { MappingProperty } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import moment from 'moment';
|
||||
import type { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
|
||||
export const TEST_DOC_COUNT = 100;
|
||||
export const TIME_PICKER_FORMAT = 'MMM D, YYYY [@] HH:mm:ss.SSS';
|
||||
export const timeSeriesMetrics: Record<string, 'gauge' | 'counter'> = {
|
||||
bytes_gauge: 'gauge',
|
||||
bytes_counter: 'counter',
|
||||
};
|
||||
export const timeSeriesDimensions = ['request', 'url'];
|
||||
export const logsDBSpecialFields = ['host'];
|
||||
|
||||
export const sharedESArchive =
|
||||
'test/functional/fixtures/es_archiver/kibana_sample_data_logs_logsdb';
|
||||
export const fromTime = 'Apr 16, 2023 @ 00:00:00.000';
|
||||
export const toTime = 'Jun 16, 2023 @ 00:00:00.000';
|
||||
|
||||
export type TestDoc = Record<string, string | string[] | number | null | Record<string, unknown>>;
|
||||
|
||||
export function testDocTemplate(mode: 'tsdb' | 'logsdb'): TestDoc {
|
||||
return {
|
||||
agent: 'Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1',
|
||||
bytes: 6219,
|
||||
clientip: '223.87.60.27',
|
||||
extension: 'deb',
|
||||
geo: {
|
||||
srcdest: 'US:US',
|
||||
src: 'US',
|
||||
dest: 'US',
|
||||
coordinates: { lat: 39.41042861, lon: -88.8454325 },
|
||||
},
|
||||
host: mode === 'tsdb' ? 'artifacts.elastic.co' : { name: 'artifacts.elastic.co' },
|
||||
index: 'kibana_sample_data_logs',
|
||||
ip: '223.87.60.27',
|
||||
machine: { ram: 8589934592, os: 'win 8' },
|
||||
memory: null,
|
||||
message:
|
||||
'223.87.60.27 - - [2018-07-22T00:39:02.912Z] "GET /elasticsearch/elasticsearch-6.3.2.deb_1 HTTP/1.1" 200 6219 "-" "Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1"',
|
||||
phpmemory: null,
|
||||
referer: 'http://twitter.com/success/wendy-lawrence',
|
||||
request: '/elasticsearch/elasticsearch-6.3.2.deb',
|
||||
response: 200,
|
||||
tags: ['success', 'info'],
|
||||
'@timestamp': '2018-07-22T00:39:02.912Z',
|
||||
url: 'https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.3.2.deb_1',
|
||||
utc_time: '2018-07-22T00:39:02.912Z',
|
||||
event: { dataset: 'sample_web_logs' },
|
||||
bytes_gauge: 0,
|
||||
bytes_counter: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function getDataMapping({
|
||||
mode,
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
}: {
|
||||
mode: 'tsdb' | 'logsdb';
|
||||
removeTSDBFields?: boolean;
|
||||
removeLogsDBFields?: boolean;
|
||||
}): Record<string, MappingProperty> {
|
||||
const dataStreamMapping: Record<string, MappingProperty> = {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
},
|
||||
agent: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
bytes: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_counter: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_gauge: {
|
||||
type: 'long',
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip',
|
||||
},
|
||||
event: {
|
||||
properties: {
|
||||
dataset: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
extension: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
geo: {
|
||||
properties: {
|
||||
coordinates: {
|
||||
type: 'geo_point',
|
||||
},
|
||||
dest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
src: {
|
||||
type: 'keyword',
|
||||
},
|
||||
srcdest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
host:
|
||||
mode === 'tsdb'
|
||||
? {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
}
|
||||
: {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
index: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ip: {
|
||||
type: 'ip',
|
||||
},
|
||||
machine: {
|
||||
properties: {
|
||||
os: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ram: {
|
||||
type: 'long',
|
||||
},
|
||||
},
|
||||
},
|
||||
memory: {
|
||||
type: 'double',
|
||||
},
|
||||
message: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
phpmemory: {
|
||||
type: 'long',
|
||||
},
|
||||
referer: {
|
||||
type: 'keyword',
|
||||
},
|
||||
request: {
|
||||
type: 'keyword',
|
||||
},
|
||||
response: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
tags: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
timestamp: {
|
||||
path: '@timestamp',
|
||||
type: 'alias',
|
||||
},
|
||||
url: {
|
||||
type: 'keyword',
|
||||
},
|
||||
utc_time: {
|
||||
type: 'date',
|
||||
},
|
||||
};
|
||||
|
||||
if (mode === 'tsdb') {
|
||||
// augment the current mapping
|
||||
for (const [fieldName, fieldMapping] of Object.entries(dataStreamMapping || {})) {
|
||||
if (
|
||||
timeSeriesMetrics[fieldName] &&
|
||||
(fieldMapping.type === 'double' || fieldMapping.type === 'long')
|
||||
) {
|
||||
fieldMapping.time_series_metric = timeSeriesMetrics[fieldName];
|
||||
}
|
||||
|
||||
if (timeSeriesDimensions.includes(fieldName) && fieldMapping.type === 'keyword') {
|
||||
fieldMapping.time_series_dimension = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (removeTSDBFields) {
|
||||
for (const fieldName of Object.keys(timeSeriesMetrics)) {
|
||||
delete dataStreamMapping[fieldName];
|
||||
}
|
||||
}
|
||||
if (removeLogsDBFields) {
|
||||
for (const fieldName of logsDBSpecialFields) {
|
||||
delete dataStreamMapping[fieldName];
|
||||
}
|
||||
}
|
||||
return dataStreamMapping;
|
||||
}
|
||||
|
||||
export function sumFirstNValues(n: number, bars: Array<{ y: number }> | undefined): number {
|
||||
const indexes = Array(n)
|
||||
.fill(1)
|
||||
.map((_, i) => i);
|
||||
let countSum = 0;
|
||||
for (const index of indexes) {
|
||||
if (bars?.[index]) {
|
||||
countSum += bars[index].y;
|
||||
}
|
||||
}
|
||||
return countSum;
|
||||
}
|
||||
|
||||
export const getDocsGenerator =
|
||||
(log: ToolingLog, es: Client, mode: 'tsdb' | 'logsdb') =>
|
||||
async (
|
||||
esIndex: string,
|
||||
{
|
||||
isStream,
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
}: { isStream: boolean; removeTSDBFields?: boolean; removeLogsDBFields?: boolean },
|
||||
startTime: string
|
||||
) => {
|
||||
log.info(
|
||||
`Adding ${TEST_DOC_COUNT} to ${esIndex} with starting time from ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.format(TIME_PICKER_FORMAT)} to ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(2 * TEST_DOC_COUNT, 'seconds')
|
||||
.format(TIME_PICKER_FORMAT)}`
|
||||
);
|
||||
const docs = Array<TestDoc>(TEST_DOC_COUNT)
|
||||
.fill(testDocTemplate(mode))
|
||||
.map((templateDoc, i) => {
|
||||
const timestamp = moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(TEST_DOC_COUNT + i, 'seconds')
|
||||
.format();
|
||||
const doc: TestDoc = {
|
||||
...templateDoc,
|
||||
'@timestamp': timestamp,
|
||||
utc_time: timestamp,
|
||||
bytes_gauge: Math.floor(Math.random() * 10000 * i),
|
||||
bytes_counter: 5000,
|
||||
};
|
||||
if (removeTSDBFields) {
|
||||
for (const field of Object.keys(timeSeriesMetrics)) {
|
||||
delete doc[field];
|
||||
}
|
||||
}
|
||||
// do not remove the fields for logsdb - ignore the flag
|
||||
return doc;
|
||||
});
|
||||
|
||||
const result = await es.bulk(
|
||||
{
|
||||
index: esIndex,
|
||||
body: docs.map((d) => `{"${isStream ? 'create' : 'index'}": {}}\n${JSON.stringify(d)}\n`),
|
||||
},
|
||||
{ meta: true }
|
||||
);
|
||||
|
||||
const res = result.body;
|
||||
|
||||
if (res.errors) {
|
||||
const resultsWithErrors = res.items
|
||||
.filter(({ index }) => index?.error)
|
||||
.map(({ index }) => index?.error);
|
||||
for (const error of resultsWithErrors) {
|
||||
log.error(`Error: ${JSON.stringify(error)}`);
|
||||
}
|
||||
const [indexExists, dataStreamExists] = await Promise.all([
|
||||
es.indices.exists({ index: esIndex }),
|
||||
es.indices.getDataStream({ name: esIndex }),
|
||||
]);
|
||||
log.debug(`Index exists: ${indexExists} - Data stream exists: ${dataStreamExists}`);
|
||||
}
|
||||
log.info(`Indexed ${res.items.length} test data docs.`);
|
||||
};
|
||||
|
||||
export interface ScenarioIndexes {
|
||||
index: string;
|
||||
create?: boolean;
|
||||
downsample?: boolean;
|
||||
removeTSDBFields?: boolean;
|
||||
removeLogsDBFields?: boolean;
|
||||
mode?: 'tsdb' | 'logsdb';
|
||||
}
|
||||
type GetScenarioFn = (initialIndex: string) => Array<{
|
||||
name: string;
|
||||
indexes: ScenarioIndexes[];
|
||||
}>;
|
||||
|
||||
export function setupScenarioRunner(
|
||||
getService: FtrProviderContext['getService'],
|
||||
getPageObjects: FtrProviderContext['getPageObjects'],
|
||||
getScenario: GetScenarioFn
|
||||
) {
|
||||
const now = moment().utc();
|
||||
const fromMoment = now.clone().subtract(1, 'hour');
|
||||
const toMoment = now.clone();
|
||||
const fromTimeForScenarios = fromMoment.format(TIME_PICKER_FORMAT);
|
||||
const toTimeForScenarios = toMoment.format(TIME_PICKER_FORMAT);
|
||||
|
||||
function runTestsForEachScenario(
|
||||
initialIndex: string,
|
||||
scenarioMode: 'tsdb' | 'logsdb',
|
||||
testingFn: (indexes: ScenarioIndexes[]) => void
|
||||
): void {
|
||||
const { common, lens } = getPageObjects(['common', 'lens', 'dashboard']);
|
||||
const es = getService('es');
|
||||
const log = getService('log');
|
||||
const dataStreams = getService('dataStreams');
|
||||
const elasticChart = getService('elasticChart');
|
||||
const indexPatterns = getService('indexPatterns');
|
||||
const createDocs = getDocsGenerator(log, es, scenarioMode);
|
||||
|
||||
for (const { name, indexes } of getScenario(initialIndex)) {
|
||||
describe(name, () => {
|
||||
let dataViewName: string;
|
||||
let downsampledTargetIndex: string = '';
|
||||
|
||||
before(async () => {
|
||||
for (const {
|
||||
index,
|
||||
create,
|
||||
downsample,
|
||||
mode,
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
} of indexes) {
|
||||
// Validate the scenario config
|
||||
if (downsample && mode !== 'tsdb') {
|
||||
expect().fail('Cannot create a scenario with downsampled stream without tsdb');
|
||||
}
|
||||
// Kick off the creation
|
||||
const isStream = mode !== undefined;
|
||||
if (create) {
|
||||
if (isStream) {
|
||||
await dataStreams.createDataStream(
|
||||
index,
|
||||
getDataMapping({
|
||||
mode,
|
||||
removeTSDBFields: Boolean(removeTSDBFields || mode === 'logsdb'),
|
||||
removeLogsDBFields,
|
||||
}),
|
||||
mode
|
||||
);
|
||||
} else {
|
||||
log.info(`creating a index "${index}" with mapping...`);
|
||||
await es.indices.create({
|
||||
index,
|
||||
mappings: {
|
||||
properties: getDataMapping({
|
||||
mode: mode === 'logsdb' ? 'logsdb' : 'tsdb', // use tsdb by default in regular index is specified
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
// add data to the newly created index
|
||||
await createDocs(
|
||||
index,
|
||||
{ isStream, removeTSDBFields, removeLogsDBFields },
|
||||
fromTimeForScenarios
|
||||
);
|
||||
}
|
||||
if (downsample) {
|
||||
downsampledTargetIndex = await dataStreams.downsampleTSDBIndex(index, {
|
||||
isStream: mode === 'tsdb',
|
||||
});
|
||||
}
|
||||
}
|
||||
dataViewName = `${indexes.map(({ index }) => index).join(',')}${
|
||||
downsampledTargetIndex ? `,${downsampledTargetIndex}` : ''
|
||||
}`;
|
||||
log.info(`creating a data view for "${dataViewName}"...`);
|
||||
await indexPatterns.create(
|
||||
{
|
||||
title: dataViewName,
|
||||
timeFieldName: '@timestamp',
|
||||
},
|
||||
{ override: true }
|
||||
);
|
||||
await common.navigateToApp('lens');
|
||||
await elasticChart.setNewChartUiDebugFlag(true);
|
||||
// go to the
|
||||
await lens.goToTimeRange(
|
||||
fromTimeForScenarios,
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(2, 'hour')
|
||||
.format(TIME_PICKER_FORMAT) // consider also new documents
|
||||
);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
for (const { index, create, mode: indexMode } of indexes) {
|
||||
if (create) {
|
||||
if (indexMode === 'tsdb' || indexMode === 'logsdb') {
|
||||
await dataStreams.deleteDataStream(index);
|
||||
} else {
|
||||
log.info(`deleting the index "${index}"...`);
|
||||
await es.indices.delete({
|
||||
index,
|
||||
});
|
||||
}
|
||||
}
|
||||
// no need to cleant he specific downsample index as everything linked to the stream
|
||||
// is cleaned up automatically
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await lens.switchDataPanelIndexPattern(dataViewName);
|
||||
await lens.removeLayer();
|
||||
});
|
||||
|
||||
testingFn(indexes);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { runTestsForEachScenario, fromTimeForScenarios, toTimeForScenarios };
|
||||
}
|
|
@ -9,7 +9,7 @@ import type { MappingProperty } from '@elastic/elasticsearch/lib/api/types';
|
|||
import type { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
/**
|
||||
* High level interface to operate with Elasticsearch data stream and TSDS.
|
||||
* High level interface to operate with Elasticsearch data stream and TSDS/LogsDB.
|
||||
*/
|
||||
export function DataStreamProvider({ getService, getPageObject }: FtrProviderContext) {
|
||||
const es = getService('es');
|
||||
|
@ -112,23 +112,45 @@ export function DataStreamProvider({ getService, getPageObject }: FtrProviderCon
|
|||
async function updateDataStreamTemplate(
|
||||
stream: string,
|
||||
mapping: Record<string, MappingProperty>,
|
||||
tsdb?: boolean
|
||||
mode?: 'tsdb' | 'logsdb'
|
||||
) {
|
||||
await es.cluster.putComponentTemplate({
|
||||
name: `${stream}_mapping`,
|
||||
template: {
|
||||
settings: tsdb
|
||||
? {
|
||||
settings: !mode
|
||||
? { mode: undefined }
|
||||
: mode === 'logsdb'
|
||||
? { mode: 'logsdb' }
|
||||
: {
|
||||
mode: 'time_series',
|
||||
routing_path: 'request',
|
||||
}
|
||||
: { mode: undefined },
|
||||
},
|
||||
mappings: {
|
||||
properties: mapping,
|
||||
},
|
||||
},
|
||||
});
|
||||
log.info(`Updating ${stream} index template${tsdb ? ' for TSDB' : ''}...`);
|
||||
// Uncomment only when needed
|
||||
// log.debug(`
|
||||
// PUT _component_template/${stream}_mappings
|
||||
// ${JSON.stringify({
|
||||
// name: `${stream}_mapping`,
|
||||
// template: {
|
||||
// settings: !mode
|
||||
// ? { mode: undefined }
|
||||
// : mode === 'logsdb'
|
||||
// ? { mode: 'logsdb' }
|
||||
// : {
|
||||
// mode: 'time_series',
|
||||
// routing_path: 'request',
|
||||
// },
|
||||
// mappings: {
|
||||
// properties: mapping,
|
||||
// },
|
||||
// },
|
||||
// }, null, 2)}
|
||||
// `);
|
||||
log.info(`Updating ${stream} index template${mode ? ` for ${mode.toUpperCase()}` : ''}...`);
|
||||
await es.indices.putIndexTemplate({
|
||||
name: `${stream}_index_template`,
|
||||
index_patterns: [stream],
|
||||
|
@ -138,71 +160,98 @@ export function DataStreamProvider({ getService, getPageObject }: FtrProviderCon
|
|||
description: `Template for ${stream} testing index`,
|
||||
},
|
||||
});
|
||||
// Uncomment only when needed
|
||||
// log.verbose(`
|
||||
// PUT _index_template/${stream}-index-template
|
||||
// ${JSON.stringify({
|
||||
// name: `${stream}_index_template`,
|
||||
// index_patterns: [stream],
|
||||
// data_stream: {},
|
||||
// composed_of: [`${stream}_mapping`],
|
||||
// _meta: {
|
||||
// description: `Template for ${stream} testing index`,
|
||||
// },
|
||||
// }, null, 2)}
|
||||
// `);
|
||||
}
|
||||
|
||||
/**
|
||||
* "Upgrade" a given data stream into a time series data series (TSDB/TSDS)
|
||||
* "Upgrade" a given data stream into a TSDB or LogsDB data series
|
||||
* @param stream the data stream name
|
||||
* @param newMapping the new mapping already with time series metrics/dimensions configured
|
||||
*/
|
||||
async function upgradeStreamToTSDB(stream: string, newMapping: Record<string, MappingProperty>) {
|
||||
// rollover to upgrade the index type to time_series
|
||||
async function upgradeStream(
|
||||
stream: string,
|
||||
newMapping: Record<string, MappingProperty>,
|
||||
mode: 'tsdb' | 'logsdb'
|
||||
) {
|
||||
// rollover to upgrade the index type
|
||||
// uploading a new mapping for the stream index using the provided metric/dimension list
|
||||
log.info(`Updating ${stream} data stream component template with TSDB stuff...`);
|
||||
await updateDataStreamTemplate(stream, newMapping, true);
|
||||
log.info(`Updating ${stream} data stream component template with ${mode} stuff...`);
|
||||
await updateDataStreamTemplate(stream, newMapping, mode);
|
||||
|
||||
log.info('Rolling over the backing index for TSDB');
|
||||
log.info(`Rolling over the backing index for ${mode}`);
|
||||
await es.indices.rollover({
|
||||
alias: stream,
|
||||
});
|
||||
// Uncomment only when needed
|
||||
// log.verbose(`POST ${stream}/_rollover`);
|
||||
}
|
||||
|
||||
/**
|
||||
* "Downgrade" a TSDB/TSDS data stream into a regular data stream
|
||||
* @param tsdbStream the TSDB/TSDS data stream to "downgrade"
|
||||
* "Downgrade" a TSDB/TSDS/LogsDB data stream into a regular data stream
|
||||
* @param stream the TSDB/TSDS/LogsDB data stream to "downgrade"
|
||||
* @param oldMapping the new mapping already with time series metrics/dimensions already removed
|
||||
*/
|
||||
async function downgradeTSDBtoStream(
|
||||
tsdbStream: string,
|
||||
newMapping: Record<string, MappingProperty>
|
||||
async function downgradeStream(
|
||||
stream: string,
|
||||
newMapping: Record<string, MappingProperty>,
|
||||
mode: 'tsdb' | 'logsdb'
|
||||
) {
|
||||
// strip out any time-series specific mapping
|
||||
for (const fieldMapping of Object.values(newMapping || {})) {
|
||||
if ('time_series_metric' in fieldMapping) {
|
||||
delete fieldMapping.time_series_metric;
|
||||
}
|
||||
if ('time_series_dimension' in fieldMapping) {
|
||||
delete fieldMapping.time_series_dimension;
|
||||
if (mode === 'tsdb') {
|
||||
// strip out any time-series specific mapping
|
||||
for (const fieldMapping of Object.values(newMapping || {})) {
|
||||
if ('time_series_metric' in fieldMapping) {
|
||||
delete fieldMapping.time_series_metric;
|
||||
}
|
||||
if ('time_series_dimension' in fieldMapping) {
|
||||
delete fieldMapping.time_series_dimension;
|
||||
}
|
||||
}
|
||||
log.info(`Updating ${stream} data stream component template with TSDB stuff...`);
|
||||
await updateDataStreamTemplate(stream, newMapping);
|
||||
}
|
||||
log.info(`Updating ${tsdbStream} data stream component template with TSDB stuff...`);
|
||||
await updateDataStreamTemplate(tsdbStream, newMapping, false);
|
||||
|
||||
// rollover to downgrade the index type to regular stream
|
||||
log.info(`Rolling over the ${tsdbStream} data stream into a regular data stream...`);
|
||||
log.info(`Rolling over the ${stream} data stream into a regular data stream...`);
|
||||
await es.indices.rollover({
|
||||
alias: tsdbStream,
|
||||
alias: stream,
|
||||
});
|
||||
// Uncomment only when needed
|
||||
// log.debug(`POST ${stream}/_rollover`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes care of the entire process to create a data stream
|
||||
* @param streamIndex name of the new data stream to create
|
||||
* @param mappings the mapping to associate with the data stream
|
||||
* @param tsdb when enabled it will configure the data stream as a TSDB/TSDS
|
||||
* @param tsdb when enabled it will configure the data stream as a TSDB/TSDS/LogsDB
|
||||
*/
|
||||
async function createDataStream(
|
||||
streamIndex: string,
|
||||
mappings: Record<string, MappingProperty>,
|
||||
tsdb: boolean = true
|
||||
mode: 'tsdb' | 'logsdb' | undefined
|
||||
) {
|
||||
log.info(`Creating ${streamIndex} data stream component template...`);
|
||||
|
||||
await updateDataStreamTemplate(streamIndex, mappings, tsdb);
|
||||
await updateDataStreamTemplate(streamIndex, mappings, mode);
|
||||
|
||||
log.info(`Creating ${streamIndex} data stream index...`);
|
||||
await es.indices.createDataStream({
|
||||
name: streamIndex,
|
||||
});
|
||||
// Uncomment only when needed
|
||||
// log.debug(`PUT _data_stream/${streamIndex}`);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -212,21 +261,27 @@ export function DataStreamProvider({ getService, getPageObject }: FtrProviderCon
|
|||
async function deleteDataStream(streamIndex: string) {
|
||||
log.info(`Delete ${streamIndex} data stream index...`);
|
||||
await es.indices.deleteDataStream({ name: streamIndex });
|
||||
// Uncomment only when needed
|
||||
// log.debug(`DELETE _data_stream/${streamIndex}`);
|
||||
log.info(`Delete ${streamIndex} index template...`);
|
||||
await es.indices.deleteIndexTemplate({
|
||||
name: `${streamIndex}_index_template`,
|
||||
});
|
||||
// Uncomment only when needed
|
||||
// log.debug(`DELETE _index_template/${streamIndex}-index-template`);
|
||||
log.info(`Delete ${streamIndex} data stream component template...`);
|
||||
await es.cluster.deleteComponentTemplate({
|
||||
name: `${streamIndex}_mapping`,
|
||||
});
|
||||
// Uncomment only when needed
|
||||
// log.debug(`DELETE _component_template/${streamIndex}_mappings`);
|
||||
}
|
||||
|
||||
return {
|
||||
createDataStream,
|
||||
deleteDataStream,
|
||||
downsampleTSDBIndex,
|
||||
upgradeStreamToTSDB,
|
||||
downgradeTSDBtoStream,
|
||||
upgradeStream,
|
||||
downgradeStream,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -75,6 +75,7 @@ export default ({ getService, loadTestFile, getPageObjects }: FtrProviderContext
|
|||
|
||||
loadTestFile(require.resolve('./smokescreen.ts'));
|
||||
loadTestFile(require.resolve('./tsdb.ts'));
|
||||
loadTestFile(require.resolve('./logsdb.ts'));
|
||||
loadTestFile(require.resolve('./vega_chart.ts'));
|
||||
});
|
||||
};
|
||||
|
|
|
@ -0,0 +1,586 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import moment from 'moment';
|
||||
import { FtrProviderContext } from '../../../../ftr_provider_context';
|
||||
import {
|
||||
type ScenarioIndexes,
|
||||
getDataMapping,
|
||||
getDocsGenerator,
|
||||
setupScenarioRunner,
|
||||
TIME_PICKER_FORMAT,
|
||||
} from './tsdb_logsdb_helpers';
|
||||
|
||||
export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
||||
const { common, lens, discover, header } = getPageObjects([
|
||||
'common',
|
||||
'lens',
|
||||
'discover',
|
||||
'header',
|
||||
]);
|
||||
const testSubjects = getService('testSubjects');
|
||||
const find = getService('find');
|
||||
const kibanaServer = getService('kibanaServer');
|
||||
const es = getService('es');
|
||||
const log = getService('log');
|
||||
const dataStreams = getService('dataStreams');
|
||||
const indexPatterns = getService('indexPatterns');
|
||||
const esArchiver = getService('esArchiver');
|
||||
const monacoEditor = getService('monacoEditor');
|
||||
const retry = getService('retry');
|
||||
|
||||
const createDocs = getDocsGenerator(log, es, 'logsdb');
|
||||
|
||||
describe('lens logsdb', function () {
|
||||
const logsdbIndex = 'kibana_sample_data_logslogsdb';
|
||||
const logsdbDataView = logsdbIndex;
|
||||
const logsdbEsArchive = 'test/functional/fixtures/es_archiver/kibana_sample_data_logs_logsdb';
|
||||
const fromTime = 'Apr 16, 2023 @ 00:00:00.000';
|
||||
const toTime = 'Jun 16, 2023 @ 00:00:00.000';
|
||||
|
||||
before(async () => {
|
||||
log.info(`loading ${logsdbIndex} index...`);
|
||||
await esArchiver.loadIfNeeded(logsdbEsArchive);
|
||||
log.info(`creating a data view for "${logsdbDataView}"...`);
|
||||
await indexPatterns.create(
|
||||
{
|
||||
title: logsdbDataView,
|
||||
timeFieldName: '@timestamp',
|
||||
},
|
||||
{ override: true }
|
||||
);
|
||||
log.info(`updating settings to use the "${logsdbDataView}" dataView...`);
|
||||
await kibanaServer.uiSettings.update({
|
||||
'dateFormat:tz': 'UTC',
|
||||
defaultIndex: '0ae0bc7a-e4ca-405c-ab67-f2b5913f2a51',
|
||||
'timepicker:timeDefaults': `{ "from": "${fromTime}", "to": "${toTime}" }`,
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await kibanaServer.savedObjects.cleanStandardList();
|
||||
await kibanaServer.uiSettings.replace({});
|
||||
await es.indices.delete({ index: [logsdbIndex] });
|
||||
});
|
||||
|
||||
describe('smoke testing functions support', () => {
|
||||
before(async () => {
|
||||
await common.navigateToApp('lens');
|
||||
await lens.switchDataPanelIndexPattern(logsdbDataView);
|
||||
await lens.goToTimeRange();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await lens.removeLayer();
|
||||
});
|
||||
|
||||
// skip count for now as it's a special function and will
|
||||
// change automatically the unsupported field to Records when detected
|
||||
const allOperations = [
|
||||
'average',
|
||||
'max',
|
||||
'last_value',
|
||||
'median',
|
||||
'percentile',
|
||||
'percentile_rank',
|
||||
'standard_deviation',
|
||||
'sum',
|
||||
'unique_count',
|
||||
'min',
|
||||
'max',
|
||||
'counter_rate',
|
||||
'last_value',
|
||||
];
|
||||
|
||||
it(`should work with all operations`, async () => {
|
||||
// start from a count() over a date histogram
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// minimum supports all logsdb field types
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'count',
|
||||
field: 'bytes',
|
||||
keepOpen: true,
|
||||
});
|
||||
|
||||
// now check that operations won't show the incompatibility tooltip
|
||||
for (const operation of allOperations) {
|
||||
expect(
|
||||
testSubjects.exists(`lns-indexPatternDimension-${operation} incompatible`, {
|
||||
timeout: 500,
|
||||
})
|
||||
).to.eql(false);
|
||||
}
|
||||
|
||||
for (const operation of allOperations) {
|
||||
// try to change to the provided function and check all is ok
|
||||
await lens.selectOperation(operation);
|
||||
|
||||
expect(
|
||||
await find.existsByCssSelector(
|
||||
'[data-test-subj="indexPattern-field-selection-row"] .euiFormErrorText'
|
||||
)
|
||||
).to.be(false);
|
||||
}
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
|
||||
describe('Scenarios with changing stream type', () => {
|
||||
const getScenarios = (
|
||||
initialIndex: string
|
||||
): Array<{
|
||||
name: string;
|
||||
indexes: ScenarioIndexes[];
|
||||
}> => [
|
||||
{
|
||||
name: 'LogsDB stream with no additional stream/index',
|
||||
indexes: [{ index: initialIndex }],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with no additional stream/index and no host.name field',
|
||||
indexes: [
|
||||
{
|
||||
index: `${initialIndex}_no_host`,
|
||||
removeLogsDBFields: true,
|
||||
create: true,
|
||||
mode: 'logsdb',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional regular index',
|
||||
indexes: [{ index: initialIndex }, { index: 'regular_index', create: true }],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional LogsDB stream',
|
||||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'logsdb_index_2', create: true, mode: 'logsdb' },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional TSDB stream',
|
||||
indexes: [{ index: initialIndex }, { index: 'tsdb_index', create: true, mode: 'tsdb' }],
|
||||
},
|
||||
{
|
||||
name: 'LogsDB stream with an additional TSDB stream downsampled',
|
||||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'tsdb_index_downsampled', create: true, mode: 'tsdb', downsample: true },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const { runTestsForEachScenario, toTimeForScenarios, fromTimeForScenarios } =
|
||||
setupScenarioRunner(getService, getPageObjects, getScenarios);
|
||||
|
||||
describe('Data-stream upgraded to LogsDB scenarios', () => {
|
||||
const streamIndex = 'data_stream';
|
||||
// rollover does not allow to change name, it will just change backing index underneath
|
||||
const streamConvertedToLogsDBIndex = streamIndex;
|
||||
|
||||
before(async () => {
|
||||
log.info(`Creating "${streamIndex}" data stream...`);
|
||||
await dataStreams.createDataStream(
|
||||
streamIndex,
|
||||
getDataMapping({ mode: 'logsdb' }),
|
||||
undefined
|
||||
);
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(streamIndex, { isStream: true }, fromTimeForScenarios);
|
||||
|
||||
log.info(`Update settings for "${streamIndex}" dataView...`);
|
||||
await kibanaServer.uiSettings.update({
|
||||
'dateFormat:tz': 'UTC',
|
||||
'timepicker:timeDefaults': '{ "from": "now-1y", "to": "now" }',
|
||||
});
|
||||
log.info(`Upgrade "${streamIndex}" stream to LogsDB...`);
|
||||
|
||||
const logsdbMapping = getDataMapping({ mode: 'logsdb' });
|
||||
await dataStreams.upgradeStream(streamIndex, logsdbMapping, 'logsdb');
|
||||
log.info(
|
||||
`Add more data to new "${streamConvertedToLogsDBIndex}" dataView (now with LogsDB backing index)...`
|
||||
);
|
||||
// add some more data when upgraded
|
||||
await createDocs(streamConvertedToLogsDBIndex, { isStream: true }, toTimeForScenarios);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await dataStreams.deleteDataStream(streamIndex);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(streamConvertedToLogsDBIndex, 'logsdb', (indexes) => {
|
||||
it(`should visualize a date histogram chart`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// check that a basic agg on a field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it(`should visualize a date histogram chart using a different date field`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: '@timestamp',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', 'utc_time'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream using another time field', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: 'utc_time',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', '@timestamp'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize correctly ES|QL queries based on a LogsDB stream', async () => {
|
||||
await common.navigateToApp('discover');
|
||||
await discover.selectTextBaseLang();
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await monacoEditor.setCodeEditorValue(
|
||||
`from ${indexes
|
||||
.map(({ index }) => index)
|
||||
.join(', ')} | stats averageB = avg(bytes) by extension`
|
||||
);
|
||||
await testSubjects.click('querySubmitButton');
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await testSubjects.click('unifiedHistogramEditFlyoutVisualization');
|
||||
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
|
||||
await retry.waitFor('lens flyout', async () => {
|
||||
const dimensions = await testSubjects.findAll('lns-dimensionTrigger-textBased');
|
||||
return (
|
||||
dimensions.length === 2 && (await dimensions[1].getVisibleText()) === 'averageB'
|
||||
);
|
||||
});
|
||||
|
||||
// go back to Lens to not break the wrapping function
|
||||
await common.navigateToApp('lens');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('LogsDB downgraded to regular data stream scenarios', () => {
|
||||
const logsdbStream = 'logsdb_stream_dowgradable';
|
||||
// rollover does not allow to change name, it will just change backing index underneath
|
||||
const logsdbConvertedToStream = logsdbStream;
|
||||
|
||||
before(async () => {
|
||||
log.info(`Creating "${logsdbStream}" data stream...`);
|
||||
await dataStreams.createDataStream(
|
||||
logsdbStream,
|
||||
getDataMapping({ mode: 'logsdb' }),
|
||||
'logsdb'
|
||||
);
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(logsdbStream, { isStream: true }, fromTimeForScenarios);
|
||||
|
||||
log.info(`Update settings for "${logsdbStream}" dataView...`);
|
||||
await kibanaServer.uiSettings.update({
|
||||
'dateFormat:tz': 'UTC',
|
||||
'timepicker:timeDefaults': '{ "from": "now-1y", "to": "now" }',
|
||||
});
|
||||
log.info(
|
||||
`Dowgrade "${logsdbStream}" stream into regular stream "${logsdbConvertedToStream}"...`
|
||||
);
|
||||
|
||||
await dataStreams.downgradeStream(
|
||||
logsdbStream,
|
||||
getDataMapping({ mode: 'logsdb' }),
|
||||
'logsdb'
|
||||
);
|
||||
log.info(
|
||||
`Add more data to new "${logsdbConvertedToStream}" dataView (no longer LogsDB)...`
|
||||
);
|
||||
// add some more data when upgraded
|
||||
await createDocs(logsdbConvertedToStream, { isStream: true }, toTimeForScenarios);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await dataStreams.deleteDataStream(logsdbConvertedToStream);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(logsdbConvertedToStream, 'logsdb', (indexes) => {
|
||||
it(`should visualize a date histogram chart`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// check that a basic agg on a field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it(`should visualize a date histogram chart using a different date field`, async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(bars?.[0].y).to.be.above(0);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(bars?.[bars.length - 1].y).to.be.above(0);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: '@timestamp',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', 'utc_time'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize an annotation layer from a logsDB stream using another time field', async () => {
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: 'utc_time',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes`,
|
||||
});
|
||||
await lens.createLayer('annotations');
|
||||
|
||||
expect(
|
||||
(await find.allByCssSelector(`[data-test-subj^="lns-layerPanel-"]`)).length
|
||||
).to.eql(2);
|
||||
expect(
|
||||
await (
|
||||
await testSubjects.find('lnsXY_xAnnotationsPanel > lns-dimensionTrigger')
|
||||
).getVisibleText()
|
||||
).to.eql('Event');
|
||||
await testSubjects.click('lnsXY_xAnnotationsPanel > lns-dimensionTrigger');
|
||||
await testSubjects.click('lnsXY_annotation_query');
|
||||
await lens.configureQueryAnnotation({
|
||||
queryString: 'host.name: *',
|
||||
timeField: 'utc_time',
|
||||
textDecoration: { type: 'name' },
|
||||
extraFields: ['host.name', '@timestamp'],
|
||||
});
|
||||
await lens.closeDimensionEditor();
|
||||
|
||||
await testSubjects.existOrFail('xyVisGroupedAnnotationIcon');
|
||||
await lens.removeLayer(1);
|
||||
});
|
||||
|
||||
it('should visualize correctly ES|QL queries based on a LogsDB stream', async () => {
|
||||
await common.navigateToApp('discover');
|
||||
await discover.selectTextBaseLang();
|
||||
|
||||
// Use the lens page object here also for discover: both use the same timePicker object
|
||||
await lens.goToTimeRange(
|
||||
fromTimeForScenarios,
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(2, 'hour')
|
||||
.format(TIME_PICKER_FORMAT)
|
||||
);
|
||||
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await monacoEditor.setCodeEditorValue(
|
||||
`from ${indexes
|
||||
.map(({ index }) => index)
|
||||
.join(', ')} | stats averageB = avg(bytes) by extension`
|
||||
);
|
||||
await testSubjects.click('querySubmitButton');
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
await testSubjects.click('unifiedHistogramEditFlyoutVisualization');
|
||||
|
||||
await header.waitUntilLoadingHasFinished();
|
||||
|
||||
await retry.waitFor('lens flyout', async () => {
|
||||
const dimensions = await testSubjects.findAll('lns-dimensionTrigger-textBased');
|
||||
return (
|
||||
dimensions.length === 2 && (await dimensions[1].getVisibleText()) === 'averageB'
|
||||
);
|
||||
});
|
||||
|
||||
// go back to Lens to not break the wrapping function
|
||||
await common.navigateToApp('lens');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -8,239 +8,20 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { partition } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { MappingProperty } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { FtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
const TEST_DOC_COUNT = 100;
|
||||
const TIME_PICKER_FORMAT = 'MMM D, YYYY [@] HH:mm:ss.SSS';
|
||||
const timeSeriesMetrics: Record<string, 'gauge' | 'counter'> = {
|
||||
bytes_gauge: 'gauge',
|
||||
bytes_counter: 'counter',
|
||||
};
|
||||
const timeSeriesDimensions = ['request', 'url'];
|
||||
|
||||
type TestDoc = Record<string, string | string[] | number | null | Record<string, unknown>>;
|
||||
|
||||
const testDocTemplate: TestDoc = {
|
||||
agent: 'Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1',
|
||||
bytes: 6219,
|
||||
clientip: '223.87.60.27',
|
||||
extension: 'deb',
|
||||
geo: {
|
||||
srcdest: 'US:US',
|
||||
src: 'US',
|
||||
dest: 'US',
|
||||
coordinates: { lat: 39.41042861, lon: -88.8454325 },
|
||||
},
|
||||
host: 'artifacts.elastic.co',
|
||||
index: 'kibana_sample_data_logs',
|
||||
ip: '223.87.60.27',
|
||||
machine: { ram: 8589934592, os: 'win 8' },
|
||||
memory: null,
|
||||
message:
|
||||
'223.87.60.27 - - [2018-07-22T00:39:02.912Z] "GET /elasticsearch/elasticsearch-6.3.2.deb_1 HTTP/1.1" 200 6219 "-" "Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1"',
|
||||
phpmemory: null,
|
||||
referer: 'http://twitter.com/success/wendy-lawrence',
|
||||
request: '/elasticsearch/elasticsearch-6.3.2.deb',
|
||||
response: 200,
|
||||
tags: ['success', 'info'],
|
||||
'@timestamp': '2018-07-22T00:39:02.912Z',
|
||||
url: 'https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.3.2.deb_1',
|
||||
utc_time: '2018-07-22T00:39:02.912Z',
|
||||
event: { dataset: 'sample_web_logs' },
|
||||
bytes_gauge: 0,
|
||||
bytes_counter: 0,
|
||||
};
|
||||
|
||||
function getDataMapping(
|
||||
{ tsdb, removeTSDBFields }: { tsdb: boolean; removeTSDBFields?: boolean } = {
|
||||
tsdb: false,
|
||||
}
|
||||
): Record<string, MappingProperty> {
|
||||
const dataStreamMapping: Record<string, MappingProperty> = {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
},
|
||||
agent: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
bytes: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_counter: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_gauge: {
|
||||
type: 'long',
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip',
|
||||
},
|
||||
event: {
|
||||
properties: {
|
||||
dataset: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
extension: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
geo: {
|
||||
properties: {
|
||||
coordinates: {
|
||||
type: 'geo_point',
|
||||
},
|
||||
dest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
src: {
|
||||
type: 'keyword',
|
||||
},
|
||||
srcdest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
host: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
index: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ip: {
|
||||
type: 'ip',
|
||||
},
|
||||
machine: {
|
||||
properties: {
|
||||
os: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ram: {
|
||||
type: 'long',
|
||||
},
|
||||
},
|
||||
},
|
||||
memory: {
|
||||
type: 'double',
|
||||
},
|
||||
message: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
phpmemory: {
|
||||
type: 'long',
|
||||
},
|
||||
referer: {
|
||||
type: 'keyword',
|
||||
},
|
||||
request: {
|
||||
type: 'keyword',
|
||||
},
|
||||
response: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
tags: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
timestamp: {
|
||||
path: '@timestamp',
|
||||
type: 'alias',
|
||||
},
|
||||
url: {
|
||||
type: 'keyword',
|
||||
},
|
||||
utc_time: {
|
||||
type: 'date',
|
||||
},
|
||||
};
|
||||
|
||||
if (tsdb) {
|
||||
// augment the current mapping
|
||||
for (const [fieldName, fieldMapping] of Object.entries(dataStreamMapping || {})) {
|
||||
if (
|
||||
timeSeriesMetrics[fieldName] &&
|
||||
(fieldMapping.type === 'double' || fieldMapping.type === 'long')
|
||||
) {
|
||||
fieldMapping.time_series_metric = timeSeriesMetrics[fieldName];
|
||||
}
|
||||
|
||||
if (timeSeriesDimensions.includes(fieldName) && fieldMapping.type === 'keyword') {
|
||||
fieldMapping.time_series_dimension = true;
|
||||
}
|
||||
}
|
||||
} else if (removeTSDBFields) {
|
||||
for (const fieldName of Object.keys(timeSeriesMetrics)) {
|
||||
delete dataStreamMapping[fieldName];
|
||||
}
|
||||
}
|
||||
return dataStreamMapping;
|
||||
}
|
||||
|
||||
function sumFirstNValues(n: number, bars: Array<{ y: number }>): number {
|
||||
const indexes = Array(n)
|
||||
.fill(1)
|
||||
.map((_, i) => i);
|
||||
let countSum = 0;
|
||||
for (const index of indexes) {
|
||||
if (bars[index]) {
|
||||
countSum += bars[index].y;
|
||||
}
|
||||
}
|
||||
return countSum;
|
||||
}
|
||||
import {
|
||||
type ScenarioIndexes,
|
||||
TEST_DOC_COUNT,
|
||||
TIME_PICKER_FORMAT,
|
||||
getDataMapping,
|
||||
getDocsGenerator,
|
||||
setupScenarioRunner,
|
||||
sumFirstNValues,
|
||||
} from './tsdb_logsdb_helpers';
|
||||
|
||||
export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
||||
const PageObjects = getPageObjects([
|
||||
const { common, lens, dashboard, svlCommonPage } = getPageObjects([
|
||||
'common',
|
||||
'timePicker',
|
||||
'lens',
|
||||
'dashboard',
|
||||
'svlCommonPage',
|
||||
|
@ -251,71 +32,11 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
const es = getService('es');
|
||||
const log = getService('log');
|
||||
const dataStreams = getService('dataStreams');
|
||||
const elasticChart = getService('elasticChart');
|
||||
const indexPatterns = getService('indexPatterns');
|
||||
const esArchiver = getService('esArchiver');
|
||||
const comboBox = getService('comboBox');
|
||||
|
||||
const createDocs = async (
|
||||
esIndex: string,
|
||||
{ isStream, removeTSDBFields }: { isStream: boolean; removeTSDBFields?: boolean },
|
||||
startTime: string
|
||||
) => {
|
||||
log.info(
|
||||
`Adding ${TEST_DOC_COUNT} to ${esIndex} with starting time from ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.format(TIME_PICKER_FORMAT)} to ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(2 * TEST_DOC_COUNT, 'seconds')
|
||||
.format(TIME_PICKER_FORMAT)}`
|
||||
);
|
||||
const docs = Array<TestDoc>(TEST_DOC_COUNT)
|
||||
.fill(testDocTemplate)
|
||||
.map((templateDoc, i) => {
|
||||
const timestamp = moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(TEST_DOC_COUNT + i, 'seconds')
|
||||
.format();
|
||||
const doc: TestDoc = {
|
||||
...templateDoc,
|
||||
'@timestamp': timestamp,
|
||||
utc_time: timestamp,
|
||||
bytes_gauge: Math.floor(Math.random() * 10000 * i),
|
||||
bytes_counter: 5000,
|
||||
};
|
||||
if (removeTSDBFields) {
|
||||
for (const field of Object.keys(timeSeriesMetrics)) {
|
||||
delete doc[field];
|
||||
}
|
||||
}
|
||||
return doc;
|
||||
});
|
||||
|
||||
const result = await es.bulk(
|
||||
{
|
||||
index: esIndex,
|
||||
body: docs.map((d) => `{"${isStream ? 'create' : 'index'}": {}}\n${JSON.stringify(d)}\n`),
|
||||
},
|
||||
{ meta: true }
|
||||
);
|
||||
|
||||
const res = result.body;
|
||||
|
||||
if (res.errors) {
|
||||
const resultsWithErrors = res.items
|
||||
.filter(({ index }) => index?.error)
|
||||
.map(({ index }) => index?.error);
|
||||
for (const error of resultsWithErrors) {
|
||||
log.error(`Error: ${JSON.stringify(error)}`);
|
||||
}
|
||||
const [indexExists, dataStreamExists] = await Promise.all([
|
||||
es.indices.exists({ index: esIndex }),
|
||||
es.indices.getDataStream({ name: esIndex }),
|
||||
]);
|
||||
log.debug(`Index exists: ${indexExists} - Data stream exists: ${dataStreamExists}`);
|
||||
}
|
||||
log.info(`Indexed ${res.items.length} test data docs.`);
|
||||
};
|
||||
const createDocs = getDocsGenerator(log, es, 'tsdb');
|
||||
|
||||
describe('lens tsdb', function () {
|
||||
const tsdbIndex = 'kibana_sample_data_logstsdb';
|
||||
|
@ -325,7 +46,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
const toTime = 'Jun 16, 2023 @ 00:00:00.000';
|
||||
|
||||
before(async () => {
|
||||
await PageObjects.svlCommonPage.loginAsAdmin();
|
||||
await svlCommonPage.loginAsAdmin();
|
||||
log.info(`loading ${tsdbIndex} index...`);
|
||||
await esArchiver.loadIfNeeded(tsdbEsArchive);
|
||||
log.info(`creating a data view for "${tsdbDataView}"...`);
|
||||
|
@ -375,48 +96,48 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
describe('for regular metric', () => {
|
||||
it('defaults to median for non-rolled up metric', async () => {
|
||||
await PageObjects.common.navigateToApp('lens');
|
||||
await PageObjects.lens.switchDataPanelIndexPattern(tsdbDataView);
|
||||
await PageObjects.lens.waitForField('bytes_gauge');
|
||||
await PageObjects.lens.dragFieldToWorkspace('bytes_gauge', 'xyVisChart');
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
await common.navigateToApp('lens');
|
||||
await lens.switchDataPanelIndexPattern(tsdbDataView);
|
||||
await lens.waitForField('bytes_gauge');
|
||||
await lens.dragFieldToWorkspace('bytes_gauge', 'xyVisChart');
|
||||
expect(await lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
'Median of bytes_gauge'
|
||||
);
|
||||
});
|
||||
|
||||
it('does not show a warning', async () => {
|
||||
await PageObjects.lens.openDimensionEditor('lnsXY_yDimensionPanel');
|
||||
await lens.openDimensionEditor('lnsXY_yDimensionPanel');
|
||||
await testSubjects.missingOrFail('median-partial-warning');
|
||||
await PageObjects.lens.assertNoEditorWarning();
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
await lens.assertNoEditorWarning();
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
});
|
||||
|
||||
describe('for rolled up metric (downsampled)', () => {
|
||||
it('defaults to average for rolled up metric', async () => {
|
||||
await PageObjects.lens.switchDataPanelIndexPattern(downsampleDataView.dataView);
|
||||
await PageObjects.lens.removeLayer();
|
||||
await PageObjects.lens.waitForField('bytes_gauge');
|
||||
await PageObjects.lens.dragFieldToWorkspace('bytes_gauge', 'xyVisChart');
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
await lens.switchDataPanelIndexPattern(downsampleDataView.dataView);
|
||||
await lens.removeLayer();
|
||||
await lens.waitForField('bytes_gauge');
|
||||
await lens.dragFieldToWorkspace('bytes_gauge', 'xyVisChart');
|
||||
expect(await lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
'Average of bytes_gauge'
|
||||
);
|
||||
});
|
||||
it('shows warnings in editor when using median', async () => {
|
||||
await PageObjects.lens.openDimensionEditor('lnsXY_yDimensionPanel');
|
||||
await lens.openDimensionEditor('lnsXY_yDimensionPanel');
|
||||
await testSubjects.existOrFail('median-partial-warning');
|
||||
await testSubjects.click('lns-indexPatternDimension-median');
|
||||
await PageObjects.lens.waitForVisualization('xyVisChart');
|
||||
await PageObjects.lens.assertMessageListContains(
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
await lens.assertMessageListContains(
|
||||
'Median of bytes_gauge uses a function that is unsupported by rolled up data. Select a different function or change the time range.',
|
||||
'warning'
|
||||
);
|
||||
});
|
||||
it('shows warnings in dashboards as well', async () => {
|
||||
await PageObjects.lens.save('New', false, false, false, 'new');
|
||||
await lens.save('New', false, false, false, 'new');
|
||||
|
||||
await PageObjects.dashboard.waitForRenderComplete();
|
||||
await PageObjects.lens.assertMessageListContains(
|
||||
await dashboard.waitForRenderComplete();
|
||||
await lens.assertMessageListContains(
|
||||
'Median of bytes_gauge uses a function that is unsupported by rolled up data. Select a different function or change the time range.',
|
||||
'warning'
|
||||
);
|
||||
|
@ -426,13 +147,13 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
describe('time series special field types support', () => {
|
||||
before(async () => {
|
||||
await PageObjects.common.navigateToApp('lens');
|
||||
await PageObjects.lens.switchDataPanelIndexPattern(tsdbDataView);
|
||||
await PageObjects.lens.goToTimeRange();
|
||||
await common.navigateToApp('lens');
|
||||
await lens.switchDataPanelIndexPattern(tsdbDataView);
|
||||
await lens.goToTimeRange();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await PageObjects.lens.removeLayer();
|
||||
await lens.removeLayer();
|
||||
});
|
||||
|
||||
// skip count for now as it's a special function and will
|
||||
|
@ -467,14 +188,14 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
if (supportedOperations.length) {
|
||||
it(`should allow operations when supported by ${fieldType} field type`, async () => {
|
||||
// Counter rate requires a date histogram dimension configured to work
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// minimum supports all tsdb field types
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes_${fieldType}`,
|
||||
|
@ -492,7 +213,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
for (const supportedOp of supportedOperations) {
|
||||
// try to change to the provided function and check all is ok
|
||||
await PageObjects.lens.selectOperation(supportedOp.name);
|
||||
await lens.selectOperation(supportedOp.name);
|
||||
|
||||
expect(
|
||||
await find.existsByCssSelector(
|
||||
|
@ -501,22 +222,22 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
).to.be(false);
|
||||
|
||||
// return in a clean state before checking the next operation
|
||||
await PageObjects.lens.selectOperation('min');
|
||||
await lens.selectOperation('min');
|
||||
}
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
}
|
||||
if (unsupportedOperatons.length) {
|
||||
it(`should notify the incompatibility of unsupported operations for the ${fieldType} field type`, async () => {
|
||||
// Counter rate requires a date histogram dimension configured to work
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// minimum supports all tsdb field types
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes_${fieldType}`,
|
||||
|
@ -537,7 +258,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
for (const unsupportedOp of unsupportedOperatons) {
|
||||
// try to change to the provided function and check if it's in an incompatibility state
|
||||
await PageObjects.lens.selectOperation(unsupportedOp.name, true);
|
||||
await lens.selectOperation(unsupportedOp.name, true);
|
||||
|
||||
const fieldSelectErrorEl = await find.byCssSelector(
|
||||
'[data-test-subj="indexPattern-field-selection-row"] .euiFormErrorText'
|
||||
|
@ -548,28 +269,28 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
);
|
||||
|
||||
// return in a clean state before checking the next operation
|
||||
await PageObjects.lens.selectOperation('min');
|
||||
await lens.selectOperation('min');
|
||||
}
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
describe('show time series dimension groups within breakdown', () => {
|
||||
it('should show the time series dimension group on field picker when configuring a breakdown', async () => {
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: 'bytes_counter',
|
||||
});
|
||||
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_splitDimensionPanel > lns-empty-dimension',
|
||||
operation: 'terms',
|
||||
keepOpen: true,
|
||||
|
@ -577,46 +298,34 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
const list = await comboBox.getOptionsList('indexPattern-dimension-field');
|
||||
expect(list).to.contain('Time series dimensions');
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
|
||||
it("should not show the time series dimension group on field picker if it's not a breakdown", async () => {
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: 'bytes_counter',
|
||||
});
|
||||
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
keepOpen: true,
|
||||
});
|
||||
const list = await comboBox.getOptionsList('indexPattern-dimension-field');
|
||||
expect(list).to.not.contain('Time series dimensions');
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Scenarios with changing stream type', () => {
|
||||
const now = moment().utc();
|
||||
const fromMoment = now.clone().subtract(1, 'hour');
|
||||
const toMoment = now.clone();
|
||||
const fromTimeForScenarios = fromMoment.format(TIME_PICKER_FORMAT);
|
||||
const toTimeForScenarios = toMoment.format(TIME_PICKER_FORMAT);
|
||||
|
||||
const getScenarios = (
|
||||
initialIndex: string
|
||||
): Array<{
|
||||
name: string;
|
||||
indexes: Array<{
|
||||
index: string;
|
||||
create?: boolean;
|
||||
downsample?: boolean;
|
||||
tsdb?: boolean;
|
||||
removeTSDBFields?: boolean;
|
||||
}>;
|
||||
indexes: ScenarioIndexes[];
|
||||
}> => [
|
||||
{
|
||||
name: 'Dataview with no additional stream/index',
|
||||
|
@ -633,7 +342,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
name: 'Dataview with an additional downsampled TSDB stream',
|
||||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'tsdb_index_2', create: true, tsdb: true, downsample: true },
|
||||
{ index: 'tsdb_index_2', create: true, mode: 'tsdb', downsample: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -641,112 +350,17 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
indexes: [
|
||||
{ index: initialIndex },
|
||||
{ index: 'regular_index', create: true, removeTSDBFields: true },
|
||||
{ index: 'tsdb_index_2', create: true, tsdb: true, downsample: true },
|
||||
{ index: 'tsdb_index_2', create: true, mode: 'tsdb', downsample: true },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Dataview with an additional TSDB stream',
|
||||
indexes: [{ index: initialIndex }, { index: 'tsdb_index_2', create: true, tsdb: true }],
|
||||
indexes: [{ index: initialIndex }, { index: 'tsdb_index_2', create: true, mode: 'tsdb' }],
|
||||
},
|
||||
];
|
||||
|
||||
function runTestsForEachScenario(
|
||||
initialIndex: string,
|
||||
testingFn: (
|
||||
indexes: Array<{
|
||||
index: string;
|
||||
create?: boolean;
|
||||
downsample?: boolean;
|
||||
tsdb?: boolean;
|
||||
removeTSDBFields?: boolean;
|
||||
}>
|
||||
) => void
|
||||
): void {
|
||||
for (const { name, indexes } of getScenarios(initialIndex)) {
|
||||
describe(name, () => {
|
||||
let dataViewName: string;
|
||||
let downsampledTargetIndex: string = '';
|
||||
|
||||
before(async () => {
|
||||
for (const { index, create, downsample, tsdb, removeTSDBFields } of indexes) {
|
||||
if (create) {
|
||||
if (tsdb) {
|
||||
await dataStreams.createDataStream(
|
||||
index,
|
||||
getDataMapping({ tsdb, removeTSDBFields }),
|
||||
tsdb
|
||||
);
|
||||
} else {
|
||||
log.info(`creating a index "${index}" with mapping...`);
|
||||
await es.indices.create({
|
||||
index,
|
||||
mappings: {
|
||||
properties: getDataMapping({ tsdb: Boolean(tsdb), removeTSDBFields }),
|
||||
},
|
||||
});
|
||||
}
|
||||
// add data to the newly created index
|
||||
await createDocs(
|
||||
index,
|
||||
{ isStream: Boolean(tsdb), removeTSDBFields },
|
||||
fromTimeForScenarios
|
||||
);
|
||||
}
|
||||
if (downsample) {
|
||||
downsampledTargetIndex = await dataStreams.downsampleTSDBIndex(index, {
|
||||
isStream: Boolean(tsdb),
|
||||
});
|
||||
}
|
||||
}
|
||||
dataViewName = `${indexes.map(({ index }) => index).join(',')}${
|
||||
downsampledTargetIndex ? `,${downsampledTargetIndex}` : ''
|
||||
}`;
|
||||
log.info(`creating a data view for "${dataViewName}"...`);
|
||||
await indexPatterns.create(
|
||||
{
|
||||
title: dataViewName,
|
||||
timeFieldName: '@timestamp',
|
||||
},
|
||||
{ override: true }
|
||||
);
|
||||
await PageObjects.common.navigateToApp('lens');
|
||||
await elasticChart.setNewChartUiDebugFlag(true);
|
||||
// go to the
|
||||
await PageObjects.lens.goToTimeRange(
|
||||
fromTimeForScenarios,
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(2, 'hour')
|
||||
.format(TIME_PICKER_FORMAT) // consider also new documents
|
||||
);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
for (const { index, create, tsdb } of indexes) {
|
||||
if (create) {
|
||||
if (tsdb) {
|
||||
await dataStreams.deleteDataStream(index);
|
||||
} else {
|
||||
log.info(`deleting the index "${index}"...`);
|
||||
await es.indices.delete({
|
||||
index,
|
||||
});
|
||||
}
|
||||
}
|
||||
// no need to cleant he specific downsample index as everything linked to the stream
|
||||
// is cleaned up automatically
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await PageObjects.lens.switchDataPanelIndexPattern(dataViewName);
|
||||
await PageObjects.lens.removeLayer();
|
||||
});
|
||||
|
||||
testingFn(indexes);
|
||||
});
|
||||
}
|
||||
}
|
||||
const { runTestsForEachScenario, toTimeForScenarios, fromTimeForScenarios } =
|
||||
setupScenarioRunner(getService, getPageObjects, getScenarios);
|
||||
|
||||
describe('Data-stream upgraded to TSDB scenarios', () => {
|
||||
const streamIndex = 'data_stream';
|
||||
|
@ -755,7 +369,11 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
before(async () => {
|
||||
log.info(`Creating "${streamIndex}" data stream...`);
|
||||
await dataStreams.createDataStream(streamIndex, getDataMapping(), false);
|
||||
await dataStreams.createDataStream(
|
||||
streamIndex,
|
||||
getDataMapping({ mode: 'tsdb' }),
|
||||
undefined
|
||||
);
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(streamIndex, { isStream: true }, fromTimeForScenarios);
|
||||
|
@ -767,8 +385,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
});
|
||||
log.info(`Upgrade "${streamIndex}" stream to TSDB...`);
|
||||
|
||||
const tsdbMapping = getDataMapping({ tsdb: true });
|
||||
await dataStreams.upgradeStreamToTSDB(streamIndex, tsdbMapping);
|
||||
const tsdbMapping = getDataMapping({ mode: 'tsdb' });
|
||||
await dataStreams.upgradeStream(streamIndex, tsdbMapping, 'tsdb');
|
||||
log.info(
|
||||
`Add more data to new "${streamConvertedToTsdbIndex}" dataView (now with TSDB backing index)...`
|
||||
);
|
||||
|
@ -780,15 +398,15 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await dataStreams.deleteDataStream(streamIndex);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(streamConvertedToTsdbIndex, (indexes) => {
|
||||
runTestsForEachScenario(streamConvertedToTsdbIndex, 'tsdb', (indexes) => {
|
||||
it('should detect the data stream has now been upgraded to TSDB', async () => {
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes_counter`,
|
||||
|
@ -800,53 +418,53 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
timeout: 500,
|
||||
})
|
||||
).to.eql(false);
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
|
||||
it(`should visualize a date histogram chart for counter field`, async () => {
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
// check the counter field works
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes_counter`,
|
||||
});
|
||||
// and also that the count of documents should be "indexes.length" times overall
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'count',
|
||||
});
|
||||
|
||||
await PageObjects.lens.waitForVisualization('xyVisChart');
|
||||
const data = await PageObjects.lens.getCurrentChartDebugState('xyVisChart');
|
||||
const counterBars = data.bars![0].bars;
|
||||
const countBars = data.bars![1].bars;
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const counterBars = data?.bars![0].bars;
|
||||
const countBars = data?.bars![1].bars;
|
||||
|
||||
log.info('Check counter data before the upgrade');
|
||||
// check there's some data before the upgrade
|
||||
expect(counterBars[0].y).to.eql(5000);
|
||||
expect(counterBars?.[0].y).to.eql(5000);
|
||||
log.info('Check counter data after the upgrade');
|
||||
// check there's some data after the upgrade
|
||||
expect(counterBars[counterBars.length - 1].y).to.eql(5000);
|
||||
expect(counterBars?.[counterBars.length - 1].y).to.eql(5000);
|
||||
|
||||
// due to the flaky nature of exact check here, we're going to relax it
|
||||
// as long as there's data before and after it is ok
|
||||
log.info('Check count before the upgrade');
|
||||
const columnsToCheck = countBars.length / 2;
|
||||
const columnsToCheck = countBars ? countBars.length / 2 : 0;
|
||||
// Before the upgrade the count is N times the indexes
|
||||
expect(sumFirstNValues(columnsToCheck, countBars)).to.be.greaterThan(
|
||||
indexes.length * TEST_DOC_COUNT - 1
|
||||
);
|
||||
log.info('Check count after the upgrade');
|
||||
// later there are only documents for the upgraded stream
|
||||
expect(sumFirstNValues(columnsToCheck, [...countBars].reverse())).to.be.greaterThan(
|
||||
TEST_DOC_COUNT - 1
|
||||
);
|
||||
expect(
|
||||
sumFirstNValues(columnsToCheck, [...(countBars ?? [])].reverse())
|
||||
).to.be.greaterThan(TEST_DOC_COUNT - 1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -858,7 +476,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
before(async () => {
|
||||
log.info(`Creating "${tsdbStream}" data stream...`);
|
||||
await dataStreams.createDataStream(tsdbStream, getDataMapping({ tsdb: true }), true);
|
||||
await dataStreams.createDataStream(tsdbStream, getDataMapping({ mode: 'tsdb' }), 'tsdb');
|
||||
|
||||
// add some data to the stream
|
||||
await createDocs(tsdbStream, { isStream: true }, fromTimeForScenarios);
|
||||
|
@ -872,7 +490,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
`Dowgrade "${tsdbStream}" stream into regular stream "${tsdbConvertedToStream}"...`
|
||||
);
|
||||
|
||||
await dataStreams.downgradeTSDBtoStream(tsdbStream, getDataMapping({ tsdb: true }));
|
||||
await dataStreams.downgradeStream(tsdbStream, getDataMapping({ mode: 'tsdb' }), 'tsdb');
|
||||
log.info(`Add more data to new "${tsdbConvertedToStream}" dataView (no longer TSDB)...`);
|
||||
// add some more data when upgraded
|
||||
await createDocs(tsdbConvertedToStream, { isStream: true }, toTimeForScenarios);
|
||||
|
@ -882,15 +500,15 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await dataStreams.deleteDataStream(tsdbConvertedToStream);
|
||||
});
|
||||
|
||||
runTestsForEachScenario(tsdbConvertedToStream, (indexes) => {
|
||||
runTestsForEachScenario(tsdbConvertedToStream, 'tsdb', (indexes) => {
|
||||
it('should keep TSDB restrictions only if a tsdb stream is in the dataView mix', async () => {
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'min',
|
||||
field: `bytes_counter`,
|
||||
|
@ -901,28 +519,28 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
testSubjects.exists(`lns-indexPatternDimension-average incompatible`, {
|
||||
timeout: 500,
|
||||
})
|
||||
).to.eql(indexes.some(({ tsdb }) => tsdb));
|
||||
await PageObjects.lens.closeDimensionEditor();
|
||||
).to.eql(indexes.some(({ mode }) => mode === 'tsdb'));
|
||||
await lens.closeDimensionEditor();
|
||||
});
|
||||
|
||||
it(`should visualize a date histogram chart for counter field`, async () => {
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
// just check the data is shown
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'count',
|
||||
});
|
||||
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const data = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data?.bars![0].bars;
|
||||
const columnsToCheck = bars ? bars.length / 2 : 0;
|
||||
// due to the flaky nature of exact check here, we're going to relax it
|
||||
// as long as there's data before and after it is ok
|
||||
await PageObjects.lens.waitForVisualization('xyVisChart');
|
||||
const data = await PageObjects.lens.getCurrentChartDebugState('xyVisChart');
|
||||
const bars = data.bars![0].bars;
|
||||
const columnsToCheck = bars.length / 2;
|
||||
log.info('Check count before the downgrade');
|
||||
// Before the upgrade the count is N times the indexes
|
||||
expect(sumFirstNValues(columnsToCheck, bars)).to.be.greaterThan(
|
||||
|
@ -930,14 +548,14 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
);
|
||||
log.info('Check count after the downgrade');
|
||||
// later there are only documents for the upgraded stream
|
||||
expect(sumFirstNValues(columnsToCheck, [...bars].reverse())).to.be.greaterThan(
|
||||
expect(sumFirstNValues(columnsToCheck, [...(bars ?? [])].reverse())).to.be.greaterThan(
|
||||
TEST_DOC_COUNT - 1
|
||||
);
|
||||
});
|
||||
|
||||
it('should visualize data when moving the time window around the downgrade moment', async () => {
|
||||
// check after the downgrade
|
||||
await PageObjects.lens.goToTimeRange(
|
||||
await lens.goToTimeRange(
|
||||
moment
|
||||
.utc(fromTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.subtract(1, 'hour')
|
||||
|
@ -948,23 +566,23 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
.format(TIME_PICKER_FORMAT) // consider only new documents
|
||||
);
|
||||
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_xDimensionPanel > lns-empty-dimension',
|
||||
operation: 'date_histogram',
|
||||
field: '@timestamp',
|
||||
});
|
||||
await PageObjects.lens.configureDimension({
|
||||
await lens.configureDimension({
|
||||
dimension: 'lnsXY_yDimensionPanel > lns-empty-dimension',
|
||||
operation: 'count',
|
||||
});
|
||||
|
||||
await PageObjects.lens.waitForVisualization('xyVisChart');
|
||||
const dataBefore = await PageObjects.lens.getCurrentChartDebugState('xyVisChart');
|
||||
const barsBefore = dataBefore.bars![0].bars;
|
||||
expect(barsBefore.some(({ y }) => y)).to.eql(true);
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const dataBefore = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const barsBefore = dataBefore?.bars![0].bars;
|
||||
expect(barsBefore?.some(({ y }) => y)).to.eql(true);
|
||||
|
||||
// check after the downgrade
|
||||
await PageObjects.lens.goToTimeRange(
|
||||
await lens.goToTimeRange(
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(1, 'second')
|
||||
|
@ -975,10 +593,10 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
.format(TIME_PICKER_FORMAT) // consider also new documents
|
||||
);
|
||||
|
||||
await PageObjects.lens.waitForVisualization('xyVisChart');
|
||||
const dataAfter = await PageObjects.lens.getCurrentChartDebugState('xyVisChart');
|
||||
const barsAfter = dataAfter.bars![0].bars;
|
||||
expect(barsAfter.some(({ y }) => y)).to.eql(true);
|
||||
await lens.waitForVisualization('xyVisChart');
|
||||
const dataAfter = await lens.getCurrentChartDebugState('xyVisChart');
|
||||
const barsAfter = dataAfter?.bars![0].bars;
|
||||
expect(barsAfter?.some(({ y }) => y)).to.eql(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,480 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import expect from '@kbn/expect';
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { MappingProperty } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import moment from 'moment';
|
||||
import { FtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
export const TEST_DOC_COUNT = 100;
|
||||
export const TIME_PICKER_FORMAT = 'MMM D, YYYY [@] HH:mm:ss.SSS';
|
||||
export const timeSeriesMetrics: Record<string, 'gauge' | 'counter'> = {
|
||||
bytes_gauge: 'gauge',
|
||||
bytes_counter: 'counter',
|
||||
};
|
||||
export const timeSeriesDimensions = ['request', 'url'];
|
||||
export const logsDBSpecialFields = ['host'];
|
||||
|
||||
export const sharedESArchive =
|
||||
'test/functional/fixtures/es_archiver/kibana_sample_data_logs_logsdb';
|
||||
export const fromTime = 'Apr 16, 2023 @ 00:00:00.000';
|
||||
export const toTime = 'Jun 16, 2023 @ 00:00:00.000';
|
||||
|
||||
export type TestDoc = Record<string, string | string[] | number | null | Record<string, unknown>>;
|
||||
|
||||
export function testDocTemplate(mode: 'tsdb' | 'logsdb'): TestDoc {
|
||||
return {
|
||||
agent: 'Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1',
|
||||
bytes: 6219,
|
||||
clientip: '223.87.60.27',
|
||||
extension: 'deb',
|
||||
geo: {
|
||||
srcdest: 'US:US',
|
||||
src: 'US',
|
||||
dest: 'US',
|
||||
coordinates: { lat: 39.41042861, lon: -88.8454325 },
|
||||
},
|
||||
host: mode === 'tsdb' ? 'artifacts.elastic.co' : { name: 'artifacts.elastic.co' },
|
||||
index: 'kibana_sample_data_logs',
|
||||
ip: '223.87.60.27',
|
||||
machine: { ram: 8589934592, os: 'win 8' },
|
||||
memory: null,
|
||||
message:
|
||||
'223.87.60.27 - - [2018-07-22T00:39:02.912Z] "GET /elasticsearch/elasticsearch-6.3.2.deb_1 HTTP/1.1" 200 6219 "-" "Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1"',
|
||||
phpmemory: null,
|
||||
referer: 'http://twitter.com/success/wendy-lawrence',
|
||||
request: '/elasticsearch/elasticsearch-6.3.2.deb',
|
||||
response: 200,
|
||||
tags: ['success', 'info'],
|
||||
'@timestamp': '2018-07-22T00:39:02.912Z',
|
||||
url: 'https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.3.2.deb_1',
|
||||
utc_time: '2018-07-22T00:39:02.912Z',
|
||||
event: { dataset: 'sample_web_logs' },
|
||||
bytes_gauge: 0,
|
||||
bytes_counter: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export function getDataMapping({
|
||||
mode,
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
}: {
|
||||
mode: 'tsdb' | 'logsdb';
|
||||
removeTSDBFields?: boolean;
|
||||
removeLogsDBFields?: boolean;
|
||||
}): Record<string, MappingProperty> {
|
||||
const dataStreamMapping: Record<string, MappingProperty> = {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
},
|
||||
agent: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
bytes: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_counter: {
|
||||
type: 'long',
|
||||
},
|
||||
bytes_gauge: {
|
||||
type: 'long',
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip',
|
||||
},
|
||||
event: {
|
||||
properties: {
|
||||
dataset: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
extension: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
geo: {
|
||||
properties: {
|
||||
coordinates: {
|
||||
type: 'geo_point',
|
||||
},
|
||||
dest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
src: {
|
||||
type: 'keyword',
|
||||
},
|
||||
srcdest: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
host:
|
||||
mode === 'tsdb'
|
||||
? {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
}
|
||||
: {
|
||||
properties: {
|
||||
name: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
index: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ip: {
|
||||
type: 'ip',
|
||||
},
|
||||
machine: {
|
||||
properties: {
|
||||
os: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
ram: {
|
||||
type: 'long',
|
||||
},
|
||||
},
|
||||
},
|
||||
memory: {
|
||||
type: 'double',
|
||||
},
|
||||
message: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
phpmemory: {
|
||||
type: 'long',
|
||||
},
|
||||
referer: {
|
||||
type: 'keyword',
|
||||
},
|
||||
request: {
|
||||
type: 'keyword',
|
||||
},
|
||||
response: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
tags: {
|
||||
fields: {
|
||||
keyword: {
|
||||
ignore_above: 256,
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
type: 'text',
|
||||
},
|
||||
timestamp: {
|
||||
path: '@timestamp',
|
||||
type: 'alias',
|
||||
},
|
||||
url: {
|
||||
type: 'keyword',
|
||||
},
|
||||
utc_time: {
|
||||
type: 'date',
|
||||
},
|
||||
};
|
||||
|
||||
if (mode === 'tsdb') {
|
||||
// augment the current mapping
|
||||
for (const [fieldName, fieldMapping] of Object.entries(dataStreamMapping || {})) {
|
||||
if (
|
||||
timeSeriesMetrics[fieldName] &&
|
||||
(fieldMapping.type === 'double' || fieldMapping.type === 'long')
|
||||
) {
|
||||
fieldMapping.time_series_metric = timeSeriesMetrics[fieldName];
|
||||
}
|
||||
|
||||
if (timeSeriesDimensions.includes(fieldName) && fieldMapping.type === 'keyword') {
|
||||
fieldMapping.time_series_dimension = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (removeTSDBFields) {
|
||||
for (const fieldName of Object.keys(timeSeriesMetrics)) {
|
||||
delete dataStreamMapping[fieldName];
|
||||
}
|
||||
}
|
||||
if (removeLogsDBFields) {
|
||||
for (const fieldName of logsDBSpecialFields) {
|
||||
delete dataStreamMapping[fieldName];
|
||||
}
|
||||
}
|
||||
return dataStreamMapping;
|
||||
}
|
||||
|
||||
export function sumFirstNValues(n: number, bars: Array<{ y: number }> | undefined): number {
|
||||
const indexes = Array(n)
|
||||
.fill(1)
|
||||
.map((_, i) => i);
|
||||
let countSum = 0;
|
||||
for (const index of indexes) {
|
||||
if (bars?.[index]) {
|
||||
countSum += bars[index].y;
|
||||
}
|
||||
}
|
||||
return countSum;
|
||||
}
|
||||
|
||||
export const getDocsGenerator =
|
||||
(log: ToolingLog, es: Client, mode: 'tsdb' | 'logsdb') =>
|
||||
async (
|
||||
esIndex: string,
|
||||
{
|
||||
isStream,
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
}: { isStream: boolean; removeTSDBFields?: boolean; removeLogsDBFields?: boolean },
|
||||
startTime: string
|
||||
) => {
|
||||
log.info(
|
||||
`Adding ${TEST_DOC_COUNT} to ${esIndex} with starting time from ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.format(TIME_PICKER_FORMAT)} to ${moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(2 * TEST_DOC_COUNT, 'seconds')
|
||||
.format(TIME_PICKER_FORMAT)}`
|
||||
);
|
||||
const docs = Array<TestDoc>(TEST_DOC_COUNT)
|
||||
.fill(testDocTemplate(mode))
|
||||
.map((templateDoc, i) => {
|
||||
const timestamp = moment
|
||||
.utc(startTime, TIME_PICKER_FORMAT)
|
||||
.add(TEST_DOC_COUNT + i, 'seconds')
|
||||
.format();
|
||||
const doc: TestDoc = {
|
||||
...templateDoc,
|
||||
'@timestamp': timestamp,
|
||||
utc_time: timestamp,
|
||||
bytes_gauge: Math.floor(Math.random() * 10000 * i),
|
||||
bytes_counter: 5000,
|
||||
};
|
||||
if (removeTSDBFields) {
|
||||
for (const field of Object.keys(timeSeriesMetrics)) {
|
||||
delete doc[field];
|
||||
}
|
||||
}
|
||||
// do not remove the fields for logsdb - ignore the flag
|
||||
return doc;
|
||||
});
|
||||
|
||||
const result = await es.bulk(
|
||||
{
|
||||
index: esIndex,
|
||||
body: docs.map((d) => `{"${isStream ? 'create' : 'index'}": {}}\n${JSON.stringify(d)}\n`),
|
||||
},
|
||||
{ meta: true }
|
||||
);
|
||||
|
||||
const res = result.body;
|
||||
|
||||
if (res.errors) {
|
||||
const resultsWithErrors = res.items
|
||||
.filter(({ index }) => index?.error)
|
||||
.map(({ index }) => index?.error);
|
||||
for (const error of resultsWithErrors) {
|
||||
log.error(`Error: ${JSON.stringify(error)}`);
|
||||
}
|
||||
const [indexExists, dataStreamExists] = await Promise.all([
|
||||
es.indices.exists({ index: esIndex }),
|
||||
es.indices.getDataStream({ name: esIndex }),
|
||||
]);
|
||||
log.debug(`Index exists: ${indexExists} - Data stream exists: ${dataStreamExists}`);
|
||||
}
|
||||
log.info(`Indexed ${res.items.length} test data docs.`);
|
||||
};
|
||||
|
||||
export interface ScenarioIndexes {
|
||||
index: string;
|
||||
create?: boolean;
|
||||
downsample?: boolean;
|
||||
removeTSDBFields?: boolean;
|
||||
removeLogsDBFields?: boolean;
|
||||
mode?: 'tsdb' | 'logsdb';
|
||||
}
|
||||
type GetScenarioFn = (initialIndex: string) => Array<{
|
||||
name: string;
|
||||
indexes: ScenarioIndexes[];
|
||||
}>;
|
||||
|
||||
export function setupScenarioRunner(
|
||||
getService: FtrProviderContext['getService'],
|
||||
getPageObjects: FtrProviderContext['getPageObjects'],
|
||||
getScenario: GetScenarioFn
|
||||
) {
|
||||
const now = moment().utc();
|
||||
const fromMoment = now.clone().subtract(1, 'hour');
|
||||
const toMoment = now.clone();
|
||||
const fromTimeForScenarios = fromMoment.format(TIME_PICKER_FORMAT);
|
||||
const toTimeForScenarios = toMoment.format(TIME_PICKER_FORMAT);
|
||||
|
||||
function runTestsForEachScenario(
|
||||
initialIndex: string,
|
||||
scenarioMode: 'tsdb' | 'logsdb',
|
||||
testingFn: (indexes: ScenarioIndexes[]) => void
|
||||
): void {
|
||||
const { common, lens } = getPageObjects(['common', 'lens', 'dashboard']);
|
||||
const es = getService('es');
|
||||
const log = getService('log');
|
||||
const dataStreams = getService('dataStreams');
|
||||
const elasticChart = getService('elasticChart');
|
||||
const indexPatterns = getService('indexPatterns');
|
||||
const createDocs = getDocsGenerator(log, es, scenarioMode);
|
||||
|
||||
for (const { name, indexes } of getScenario(initialIndex)) {
|
||||
describe(name, () => {
|
||||
let dataViewName: string;
|
||||
let downsampledTargetIndex: string = '';
|
||||
|
||||
before(async () => {
|
||||
for (const {
|
||||
index,
|
||||
create,
|
||||
downsample,
|
||||
mode,
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
} of indexes) {
|
||||
// Validate the scenario config
|
||||
if (downsample && mode !== 'tsdb') {
|
||||
expect().fail('Cannot create a scenario with downsampled stream without tsdb');
|
||||
}
|
||||
// Kick off the creation
|
||||
const isStream = mode !== undefined;
|
||||
if (create) {
|
||||
if (isStream) {
|
||||
await dataStreams.createDataStream(
|
||||
index,
|
||||
getDataMapping({
|
||||
mode,
|
||||
removeTSDBFields: Boolean(removeTSDBFields || mode === 'logsdb'),
|
||||
removeLogsDBFields,
|
||||
}),
|
||||
mode
|
||||
);
|
||||
} else {
|
||||
log.info(`creating a index "${index}" with mapping...`);
|
||||
await es.indices.create({
|
||||
index,
|
||||
mappings: {
|
||||
properties: getDataMapping({
|
||||
mode: mode === 'logsdb' ? 'logsdb' : 'tsdb', // use tsdb by default in regular index is specified
|
||||
removeTSDBFields,
|
||||
removeLogsDBFields,
|
||||
}),
|
||||
},
|
||||
});
|
||||
}
|
||||
// add data to the newly created index
|
||||
await createDocs(
|
||||
index,
|
||||
{ isStream, removeTSDBFields, removeLogsDBFields },
|
||||
fromTimeForScenarios
|
||||
);
|
||||
}
|
||||
if (downsample) {
|
||||
downsampledTargetIndex = await dataStreams.downsampleTSDBIndex(index, {
|
||||
isStream: mode === 'tsdb',
|
||||
});
|
||||
}
|
||||
}
|
||||
dataViewName = `${indexes.map(({ index }) => index).join(',')}${
|
||||
downsampledTargetIndex ? `,${downsampledTargetIndex}` : ''
|
||||
}`;
|
||||
log.info(`creating a data view for "${dataViewName}"...`);
|
||||
await indexPatterns.create(
|
||||
{
|
||||
title: dataViewName,
|
||||
timeFieldName: '@timestamp',
|
||||
},
|
||||
{ override: true }
|
||||
);
|
||||
await common.navigateToApp('lens');
|
||||
await elasticChart.setNewChartUiDebugFlag(true);
|
||||
// go to the
|
||||
await lens.goToTimeRange(
|
||||
fromTimeForScenarios,
|
||||
moment
|
||||
.utc(toTimeForScenarios, TIME_PICKER_FORMAT)
|
||||
.add(2, 'hour')
|
||||
.format(TIME_PICKER_FORMAT) // consider also new documents
|
||||
);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
for (const { index, create, mode: indexMode } of indexes) {
|
||||
if (create) {
|
||||
if (indexMode === 'tsdb' || indexMode === 'logsdb') {
|
||||
await dataStreams.deleteDataStream(index);
|
||||
} else {
|
||||
log.info(`deleting the index "${index}"...`);
|
||||
await es.indices.delete({
|
||||
index,
|
||||
});
|
||||
}
|
||||
}
|
||||
// no need to cleant he specific downsample index as everything linked to the stream
|
||||
// is cleaned up automatically
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await lens.switchDataPanelIndexPattern(dataViewName);
|
||||
await lens.removeLayer();
|
||||
});
|
||||
|
||||
testingFn(indexes);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { runTestsForEachScenario, fromTimeForScenarios, toTimeForScenarios };
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue