mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 01:13:23 -04:00
[Stack Monitoring] Use UI time range filter in logstash pipeline details query (#150032)
## Summary closes #145226 This PR changes SM Logstash pipeline related queries to use the UI time range filter, instead of the pipeline version `firstSeen` and `lastSeen` attributes.  ### How to test - Setup your [local environment with Logstash](https://github.com/elastic/kibana/blob/main/x-pack/plugins/monitoring/dev_docs/how_to/local_setup.md#standalone-cluster) - In Stack Monitoring, navigate to "Standalone Cluster" and click on Logstash Pipeline tile --------- Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
9d33af3200
commit
eafbc47468
7 changed files with 89 additions and 20 deletions
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { clusterUuidRT, ccsRT } from '../shared';
|
||||
import { clusterUuidRT, ccsRT, timeRangeRT } from '../shared';
|
||||
|
||||
export const postLogstashPipelineRequestParamsRT = rt.type({
|
||||
clusterUuid: clusterUuidRT,
|
||||
|
@ -21,4 +21,15 @@ export const postLogstashPipelineRequestPayloadRT = rt.intersection([
|
|||
rt.partial({
|
||||
detailVertexId: rt.string,
|
||||
}),
|
||||
rt.type({
|
||||
timeRange: timeRangeRT,
|
||||
}),
|
||||
]);
|
||||
|
||||
export type PostLogstashPipelineRequestParams = rt.TypeOf<
|
||||
typeof postLogstashPipelineRequestParamsRT
|
||||
>;
|
||||
|
||||
export type PostLogstashPipelineRequestPayload = rt.TypeOf<
|
||||
typeof postLogstashPipelineRequestPayloadRT
|
||||
>;
|
||||
|
|
|
@ -10,6 +10,7 @@ import { find } from 'lodash';
|
|||
import moment from 'moment';
|
||||
import { useRouteMatch } from 'react-router-dom';
|
||||
import { useKibana, useUiSetting } from '@kbn/kibana-react-plugin/public';
|
||||
import { EuiSpacer } from '@elastic/eui';
|
||||
import { GlobalStateContext } from '../../contexts/global_state_context';
|
||||
import { ComponentProps } from '../../route_init';
|
||||
import { List } from '../../../components/logstash/pipeline_viewer/models/list';
|
||||
|
@ -56,6 +57,7 @@ export const LogStashPipelinePage: React.FC<ComponentProps> = ({ clusters }) =>
|
|||
});
|
||||
|
||||
const getPageData = useCallback(async () => {
|
||||
const bounds = services.data?.query.timefilter.timefilter.getBounds();
|
||||
const url = pipelineHash
|
||||
? `../api/monitoring/v1/clusters/${clusterUuid}/logstash/pipeline/${pipelineId}/${pipelineHash}`
|
||||
: `../api/monitoring/v1/clusters/${clusterUuid}/logstash/pipeline/${pipelineId}`;
|
||||
|
@ -65,6 +67,10 @@ export const LogStashPipelinePage: React.FC<ComponentProps> = ({ clusters }) =>
|
|||
body: JSON.stringify({
|
||||
ccs,
|
||||
detailVertexId: detailVertexId || undefined,
|
||||
timeRange: {
|
||||
min: bounds.min.toISOString(),
|
||||
max: bounds.max.toISOString(),
|
||||
},
|
||||
}),
|
||||
});
|
||||
const myData = response;
|
||||
|
@ -112,6 +118,7 @@ export const LogStashPipelinePage: React.FC<ComponentProps> = ({ clusters }) =>
|
|||
minIntervalSeconds,
|
||||
pipelineHash,
|
||||
pipelineId,
|
||||
services.data?.query.timefilter.timefilter,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -168,16 +175,17 @@ export const LogStashPipelinePage: React.FC<ComponentProps> = ({ clusters }) =>
|
|||
pipelineHash={pipelineHash}
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
{pipelineState && (
|
||||
{pipelineState && (
|
||||
<div>
|
||||
<EuiSpacer size="s" />
|
||||
<PipelineViewer
|
||||
pipeline={List.fromPipeline(Pipeline.fromPipelineGraph(pipelineState.config.graph))}
|
||||
timeseriesTooltipXValueFormatter={timeseriesTooltipXValueFormatter}
|
||||
setDetailVertexId={onVertexChange}
|
||||
detailVertex={data.vertex ? vertexFactory(null, data.vertex) : null}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</LogstashTemplate>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -6,6 +6,10 @@
|
|||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import {
|
||||
PostLogstashPipelineRequestParams,
|
||||
PostLogstashPipelineRequestPayload,
|
||||
} from '../../../common/http_api/logstash';
|
||||
import { PipelineNotFoundError } from '../errors';
|
||||
import { getPipelineStateDocument } from './get_pipeline_state_document';
|
||||
import { getPipelineStatsAggregation } from './get_pipeline_stats_aggregation';
|
||||
|
@ -120,7 +124,11 @@ export function _enrichStateWithStatsAggregation(
|
|||
}
|
||||
|
||||
export async function getPipeline(
|
||||
req: LegacyRequest,
|
||||
req: LegacyRequest<
|
||||
PostLogstashPipelineRequestParams,
|
||||
unknown,
|
||||
PostLogstashPipelineRequestPayload
|
||||
>,
|
||||
config: MonitoringConfig,
|
||||
clusterUuid: string,
|
||||
pipelineId: string,
|
||||
|
@ -129,8 +137,8 @@ export async function getPipeline(
|
|||
// Determine metrics' timeseries interval based on version's timespan
|
||||
const minIntervalSeconds = Math.max(config.ui.min_interval_seconds, 30);
|
||||
const timeseriesInterval = calculateTimeseriesInterval(
|
||||
Number(version.firstSeen),
|
||||
Number(version.lastSeen),
|
||||
req.payload.timeRange.min,
|
||||
req.payload.timeRange.max,
|
||||
Number(minIntervalSeconds)
|
||||
);
|
||||
|
||||
|
|
|
@ -5,6 +5,10 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
PostLogstashPipelineRequestParams,
|
||||
PostLogstashPipelineRequestPayload,
|
||||
} from '../../../common/http_api/logstash';
|
||||
import { LegacyRequest, PipelineVersion } from '../../types';
|
||||
import { getIndexPatterns, getLogstashDataset } from '../cluster/get_index_patterns';
|
||||
import { createQuery } from '../create_query';
|
||||
|
@ -163,7 +167,11 @@ export function getPipelineStatsAggregation({
|
|||
pipelineId,
|
||||
version,
|
||||
}: {
|
||||
req: LegacyRequest;
|
||||
req: LegacyRequest<
|
||||
PostLogstashPipelineRequestParams,
|
||||
unknown,
|
||||
PostLogstashPipelineRequestPayload
|
||||
>;
|
||||
timeseriesInterval: number;
|
||||
clusterUuid: string;
|
||||
pipelineId: string;
|
||||
|
@ -201,8 +209,8 @@ export function getPipelineStatsAggregation({
|
|||
},
|
||||
];
|
||||
|
||||
const start = version.lastSeen - timeseriesInterval * 1000;
|
||||
const end = version.lastSeen;
|
||||
const start = req.payload.timeRange.min - timeseriesInterval * 1000;
|
||||
const end = req.payload.timeRange.max;
|
||||
|
||||
const dataset = 'node_stats';
|
||||
const type = 'logstash_stats';
|
||||
|
|
|
@ -6,6 +6,10 @@
|
|||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import {
|
||||
PostLogstashPipelineRequestParams,
|
||||
PostLogstashPipelineRequestPayload,
|
||||
} from '../../../common/http_api/logstash';
|
||||
import { PipelineNotFoundError } from '../errors';
|
||||
import { getPipelineStateDocument } from './get_pipeline_state_document';
|
||||
import { getPipelineVertexStatsAggregation } from './get_pipeline_vertex_stats_aggregation';
|
||||
|
@ -135,7 +139,11 @@ export function _enrichVertexStateWithStatsAggregation(
|
|||
}
|
||||
|
||||
export async function getPipelineVertex(
|
||||
req: LegacyRequest,
|
||||
req: LegacyRequest<
|
||||
PostLogstashPipelineRequestParams,
|
||||
unknown,
|
||||
PostLogstashPipelineRequestPayload
|
||||
>,
|
||||
config: MonitoringConfig,
|
||||
clusterUuid: string,
|
||||
pipelineId: string,
|
||||
|
@ -145,8 +153,8 @@ export async function getPipelineVertex(
|
|||
// Determine metrics' timeseries interval based on version's timespan
|
||||
const minIntervalSeconds = Math.max(config.ui.min_interval_seconds, 30);
|
||||
const timeseriesInterval = calculateTimeseriesInterval(
|
||||
Number(version.firstSeen),
|
||||
Number(version.lastSeen),
|
||||
req.payload.timeRange.min,
|
||||
req.payload.timeRange.max,
|
||||
Number(minIntervalSeconds)
|
||||
);
|
||||
|
||||
|
|
|
@ -5,6 +5,10 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
PostLogstashPipelineRequestParams,
|
||||
PostLogstashPipelineRequestPayload,
|
||||
} from '../../../common/http_api/logstash';
|
||||
import { LegacyRequest, PipelineVersion } from '../../types';
|
||||
import { getIndexPatterns, getLogstashDataset } from '../cluster/get_index_patterns';
|
||||
import { createQuery } from '../create_query';
|
||||
|
@ -214,7 +218,11 @@ export function getPipelineVertexStatsAggregation({
|
|||
version,
|
||||
vertexId,
|
||||
}: {
|
||||
req: LegacyRequest;
|
||||
req: LegacyRequest<
|
||||
PostLogstashPipelineRequestParams,
|
||||
unknown,
|
||||
PostLogstashPipelineRequestPayload
|
||||
>;
|
||||
timeSeriesIntervalInSeconds: number;
|
||||
clusterUuid: string;
|
||||
pipelineId: string;
|
||||
|
@ -253,8 +261,8 @@ export function getPipelineVertexStatsAggregation({
|
|||
},
|
||||
];
|
||||
|
||||
const start = version.firstSeen;
|
||||
const end = version.lastSeen;
|
||||
const start = req.payload.timeRange.min;
|
||||
const end = req.payload.timeRange.max;
|
||||
|
||||
const dataset = 'node_stats';
|
||||
const type = 'logstash_stats';
|
||||
|
|
|
@ -12,6 +12,7 @@ export default function ({ getService, getPageObjects }) {
|
|||
const overview = getService('monitoringClusterOverview');
|
||||
const pipelinesList = getService('monitoringLogstashPipelines');
|
||||
const pipelineViewer = getService('monitoringLogstashPipelineViewer');
|
||||
const pageObjects = getPageObjects(['timePicker']);
|
||||
|
||||
describe('Logstash pipeline viewer', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
@ -38,11 +39,28 @@ export default function ({ getService, getPageObjects }) {
|
|||
it('displays pipelines inputs, filters and ouputs', async () => {
|
||||
const { inputs, filters, outputs } = await pipelineViewer.getPipelineDefinition();
|
||||
|
||||
expect(inputs).to.eql([{ name: 'generator', metrics: ['mygen01', '62.5 e/s emitted'] }]);
|
||||
expect(inputs).to.eql([{ name: 'generator', metrics: ['mygen01', '1.25k e/s emitted'] }]);
|
||||
expect(filters).to.eql([
|
||||
{ name: 'sleep', metrics: ['1%', '94.86 ms/e', '62.5 e/s received'] },
|
||||
{ name: 'sleep', metrics: ['1%', '96.44 ms/e', '1.25k e/s received'] },
|
||||
]);
|
||||
expect(outputs).to.eql([{ name: 'stdout', metrics: ['0%', '0 ms/e', '1.25k e/s received'] }]);
|
||||
});
|
||||
|
||||
it('Should change the pipeline data when date range changes', async () => {
|
||||
await pageObjects.timePicker.setAbsoluteRange(
|
||||
'Jan 22, 2018 @ 08:00:00.000',
|
||||
'Jan 22, 2018 @ 10:00:00.000'
|
||||
);
|
||||
|
||||
const { inputs, filters, outputs } = await pipelineViewer.getPipelineDefinition();
|
||||
|
||||
expect(inputs).to.eql([{ name: 'generator', metrics: ['mygen01', '643.75 e/s emitted'] }]);
|
||||
expect(filters).to.eql([
|
||||
{ name: 'sleep', metrics: ['1%', '96.37 ms/e', '643.75 e/s received'] },
|
||||
]);
|
||||
expect(outputs).to.eql([
|
||||
{ name: 'stdout', metrics: ['0%', '0 ms/e', '643.75 e/s received'] },
|
||||
]);
|
||||
expect(outputs).to.eql([{ name: 'stdout', metrics: ['0%', '0 ms/e', '62.5 e/s received'] }]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue