[Stack monitoring] logstash - read from correct aggregation (#122443) (#123149)

* add test-subj to pipeline viewer section

* pipeline viewer service

* clickPipeline

* pipeline viewer_mb test

* lint

* lint

* update component snapshot

* fix get_pipeline bucket selection

* align metricbeat pipeline data with internal monitoring

* collect filters

* fix pipeline viewer test

* clean logstash_pipelines_mb archive

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
(cherry picked from commit 6c6a2edee3)

Co-authored-by: Kevin Lacabane <klacabane@gmail.com>
This commit is contained in:
Kibana Machine 2022-01-17 09:13:21 -05:00 committed by GitHub
parent 5a38f03606
commit aa23b8d094
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 174 additions and 8 deletions

View file

@ -1,7 +1,9 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`StatementSection component renders heading text, correct icon type, and elements for StatementSection 1`] = `
<div>
<div
data-test-subj="pipelineViewerSection_Inputs"
>
<StatementListHeading
iconType="logstashInput"
title="Inputs"

View file

@ -17,7 +17,7 @@ export function StatementSection({ iconType, headingText, elements, onShowVertex
}
return (
<div>
<div data-test-subj={`pipelineViewerSection_${headingText}`}>
<StatementListHeading iconType={iconType} title={headingText} />
<EuiSpacer size="s" />
<StatementList elements={elements} onShowVertexDetails={onShowVertexDetails} />

View file

@ -76,6 +76,17 @@ export function _enrichStateWithStatsAggregation(
statsAggregation: any,
timeseriesIntervalInSeconds: number
) {
// we could have data in both legacy and metricbeat collection, we pick the bucket most filled
const bucketCount = (aggregationKey: string) =>
get(
statsAggregation.aggregations,
`${aggregationKey}.scoped.total_processor_duration_stats.count`
);
const pipelineBucket =
bucketCount('pipelines_mb') > bucketCount('pipelines')
? statsAggregation.aggregations.pipelines_mb
: statsAggregation.aggregations.pipelines;
const logstashState = stateDocument.logstash_state || stateDocument.logstash?.node?.state;
const vertices = logstashState?.pipeline?.representation?.graph?.vertices ?? [];
@ -85,14 +96,10 @@ export function _enrichStateWithStatsAggregation(
vertex.stats = {};
});
const totalDurationStats =
statsAggregation.aggregations.pipelines.scoped.total_processor_duration_stats;
const totalDurationStats = pipelineBucket.scoped.total_processor_duration_stats;
const totalProcessorsDurationInMillis = totalDurationStats.max - totalDurationStats.min;
const verticesWithStatsBuckets =
statsAggregation.aggregations?.pipelines.scoped.vertices?.vertex_id.buckets ??
statsAggregation.aggregations?.pipelines_mb.scoped.vertices?.vertex_id.buckets ??
[];
const verticesWithStatsBuckets = pipelineBucket.scoped.vertices?.vertex_id.buckets ?? [];
verticesWithStatsBuckets.forEach((vertexStatsBucket: any) => {
// Each vertexStats bucket contains a list of stats for a single vertex within a single timeseries interval
const vertexId = vertexStatsBucket.key;

View file

@ -41,6 +41,8 @@ export default function ({ loadTestFile }) {
loadTestFile(require.resolve('./logstash/nodes_mb'));
loadTestFile(require.resolve('./logstash/pipelines'));
loadTestFile(require.resolve('./logstash/pipelines_mb'));
loadTestFile(require.resolve('./logstash/pipeline_viewer'));
loadTestFile(require.resolve('./logstash/pipeline_viewer_mb'));
loadTestFile(require.resolve('./logstash/node_detail'));
loadTestFile(require.resolve('./logstash/node_detail_mb'));
loadTestFile(require.resolve('./beats/cluster'));

View file

@ -0,0 +1,48 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { getLifecycleMethods } from '../_get_lifecycle_methods';
export default function ({ getService, getPageObjects }) {
const overview = getService('monitoringClusterOverview');
const pipelinesList = getService('monitoringLogstashPipelines');
const pipelineViewer = getService('monitoringLogstashPipelineViewer');
describe('Logstash pipeline viewer', () => {
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
before(async () => {
await setup('x-pack/test/functional/es_archives/monitoring/logstash_pipelines', {
from: 'Jan 22, 2018 @ 09:10:00.000',
to: 'Jan 22, 2018 @ 09:41:00.000',
});
await overview.closeAlertsModal();
// go to nginx_logs pipeline view
await overview.clickLsPipelines();
expect(await pipelinesList.isOnListing()).to.be(true);
await pipelinesList.clickPipeline('nginx_logs');
expect(await pipelineViewer.isOnPipelineViewer()).to.be(true);
});
after(async () => {
await tearDown();
});
it('displays pipelines inputs, filters and ouputs', async () => {
const { inputs, filters, outputs } = await pipelineViewer.getPipelineDefinition();
expect(inputs).to.eql([{ name: 'generator', metrics: ['mygen01', '62.5 e/s emitted'] }]);
expect(filters).to.eql([
{ name: 'sleep', metrics: ['1%', '94.86 ms/e', '62.5 e/s received'] },
]);
expect(outputs).to.eql([{ name: 'stdout', metrics: ['0%', '0 ms/e', '62.5 e/s received'] }]);
});
});
}

View file

@ -0,0 +1,49 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { getLifecycleMethods } from '../_get_lifecycle_methods';
export default function ({ getService, getPageObjects }) {
const overview = getService('monitoringClusterOverview');
const pipelinesList = getService('monitoringLogstashPipelines');
const pipelineViewer = getService('monitoringLogstashPipelineViewer');
describe('Logstash pipeline viewer mb', () => {
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
before(async () => {
await setup('x-pack/test/functional/es_archives/monitoring/logstash_pipelines_mb', {
from: 'Jan 22, 2018 @ 09:10:00.000',
to: 'Jan 22, 2018 @ 09:41:00.000',
useCreate: true,
});
await overview.closeAlertsModal();
// go to nginx_logs pipeline view
await overview.clickLsPipelines();
expect(await pipelinesList.isOnListing()).to.be(true);
await pipelinesList.clickPipeline('nginx_logs');
expect(await pipelineViewer.isOnPipelineViewer()).to.be(true);
});
after(async () => {
await tearDown();
});
it('displays pipelines inputs and ouputs', async () => {
const { inputs, filters, outputs } = await pipelineViewer.getPipelineDefinition();
expect(inputs).to.eql([{ name: 'generator', metrics: ['mygen01', '62.5 e/s emitted'] }]);
expect(filters).to.eql([
{ name: 'sleep', metrics: ['1%', '94.86 ms/e', '62.5 e/s received'] },
]);
expect(outputs).to.eql([{ name: 'stdout', metrics: ['0%', '0 ms/e', '62.5 e/s received'] }]);
});
});
}

View file

@ -31,6 +31,7 @@ import {
MonitoringLogstashNodesProvider,
MonitoringLogstashNodeDetailProvider,
MonitoringLogstashPipelinesProvider,
MonitoringLogstashPipelineViewerProvider,
MonitoringLogstashSummaryStatusProvider,
MonitoringKibanaOverviewProvider,
MonitoringKibanaInstancesProvider,
@ -98,6 +99,7 @@ export const services = {
monitoringLogstashNodes: MonitoringLogstashNodesProvider,
monitoringLogstashNodeDetail: MonitoringLogstashNodeDetailProvider,
monitoringLogstashPipelines: MonitoringLogstashPipelinesProvider,
monitoringLogstashPipelineViewer: MonitoringLogstashPipelineViewerProvider,
monitoringLogstashSummaryStatus: MonitoringLogstashSummaryStatusProvider,
monitoringKibanaOverview: MonitoringKibanaOverviewProvider,
monitoringKibanaInstances: MonitoringKibanaInstancesProvider,

View file

@ -24,6 +24,7 @@ export { MonitoringLogstashOverviewProvider } from './logstash_overview';
export { MonitoringLogstashNodesProvider } from './logstash_nodes';
export { MonitoringLogstashNodeDetailProvider } from './logstash_node_detail';
export { MonitoringLogstashPipelinesProvider } from './logstash_pipelines';
export { MonitoringLogstashPipelineViewerProvider } from './logstash_pipeline_viewer';
export { MonitoringLogstashSummaryStatusProvider } from './logstash_summary_status';
export { MonitoringKibanaOverviewProvider } from './kibana_overview';
export { MonitoringKibanaInstancesProvider } from './kibana_instances';

View file

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export function MonitoringLogstashPipelineViewerProvider({ getService }) {
const testSubjects = getService('testSubjects');
const retry = getService('retry');
const find = getService('find');
const PIPELINE_VIEWER_SELECTOR = '.monPipelineViewer';
const SUBJ_PIPELINE_SECTION_PREFIX = 'pipelineViewerSection_';
const PIPELINE_SECTION_ITEM_CLS = 'monPipelineViewer__listItem';
return new (class LogstashPipelineViewer {
isOnPipelineViewer() {
return retry.try(() => find.existsByCssSelector(PIPELINE_VIEWER_SELECTOR));
}
async getPipelineDefinition() {
const getSectionItems = async (section) => {
const items = await section.findAllByClassName(PIPELINE_SECTION_ITEM_CLS);
return Promise.all(
items.map(async (item) => {
const [name, ...metrics] = await item.getVisibleText().then((text) => text.split('\n'));
return { name, metrics };
})
);
};
const [inputs, filters, outputs] = await Promise.all([
testSubjects.find(SUBJ_PIPELINE_SECTION_PREFIX + 'Inputs').then(getSectionItems),
testSubjects.find(SUBJ_PIPELINE_SECTION_PREFIX + 'Filters').then(getSectionItems),
testSubjects.find(SUBJ_PIPELINE_SECTION_PREFIX + 'Outputs').then(getSectionItems),
]);
return { inputs, filters, outputs };
}
})();
}

View file

@ -64,6 +64,18 @@ export function MonitoringLogstashPipelinesProvider({ getService, getPageObjects
}, []);
}
async clickPipeline(id) {
const anchors = await testSubjects.findAll(SUBJ_PIPELINES_IDS);
for (let i = 0; i < anchors.length; i++) {
const anchor = anchors[i];
if ((await anchor.getVisibleText()) === id) {
return anchor.click();
}
}
throw new Error(`pipeline with id ${id} not found`);
}
async clickIdCol() {
const headerCell = await testSubjects.find(SUBJ_TABLE_SORT_ID_COL);
const button = await headerCell.findByTagName('button');