mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
* De-duplicate pipeline ids based on the ephemeral_id changing * Add tests
This commit is contained in:
parent
6d517e16e6
commit
fca66a0a28
6 changed files with 1662 additions and 2 deletions
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import moment from 'moment';
|
||||
import { get } from 'lodash';
|
||||
import { get, uniq } from 'lodash';
|
||||
import { createQuery } from '../create_query';
|
||||
import { LogstashMetric } from '../metrics';
|
||||
|
||||
|
@ -74,5 +74,6 @@ export async function getLogstashPipelineIds(req, logstashIndexPattern, { cluste
|
|||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
const response = await callWithRequest(req, 'search', params);
|
||||
return get(response, 'aggregations.nested_context.composite_data.buckets', []).map(bucket => bucket.key);
|
||||
const data = get(response, 'aggregations.nested_context.composite_data.buckets', []).map(bucket => bucket.key);
|
||||
return uniq(data, item => item.id);
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -10,6 +10,7 @@ export default function ({ loadTestFile }) {
|
|||
loadTestFile(require.resolve('./nodes'));
|
||||
loadTestFile(require.resolve('./node_detail'));
|
||||
loadTestFile(require.resolve('./multicluster_pipelines'));
|
||||
loadTestFile(require.resolve('./pipelines'));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import pipelinesFixture from './fixtures/pipelines';
|
||||
|
||||
export default function ({ getService }) {
|
||||
const supertest = getService('supertest');
|
||||
const esArchiver = getService('esArchiver');
|
||||
|
||||
describe('pipelines', () => {
|
||||
const archive = 'monitoring/logstash/changing_pipelines';
|
||||
const timeRange = {
|
||||
min: '2019-11-04T15:40:44.855Z',
|
||||
max: '2019-11-04T15:50:38.667Z'
|
||||
};
|
||||
const pagination = {
|
||||
size: 10,
|
||||
index: 0
|
||||
};
|
||||
const sort = {
|
||||
field: 'id',
|
||||
direction: 'asc'
|
||||
};
|
||||
|
||||
before('load archive', () => {
|
||||
return esArchiver.load(archive);
|
||||
});
|
||||
|
||||
after('unload archive', () => {
|
||||
return esArchiver.unload(archive);
|
||||
});
|
||||
|
||||
it('should return paginated pipelines', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/monitoring/v1/clusters/TUjQLdHNTh2SB9Wy0gOtWg/logstash/pipelines')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ timeRange, pagination, sort })
|
||||
.expect(200);
|
||||
|
||||
expect(body).to.eql(pipelinesFixture);
|
||||
});
|
||||
|
||||
it('should get one of each after enough pagination', async () => {
|
||||
async function getIds(page) {
|
||||
const { body } = await supertest
|
||||
.post('/api/monitoring/v1/clusters/TUjQLdHNTh2SB9Wy0gOtWg/logstash/pipelines')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ timeRange, pagination: { ...pagination, index: page }, sort })
|
||||
.expect(200);
|
||||
|
||||
return body.pipelines.map(pipeline => pipeline.id);
|
||||
}
|
||||
|
||||
const ids = [
|
||||
...await getIds(0),
|
||||
...await getIds(1),
|
||||
...await getIds(2),
|
||||
];
|
||||
expect(ids.length).to.be(26);
|
||||
});
|
||||
});
|
||||
}
|
Binary file not shown.
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue