mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[ML] Get buckets and get overall buckets api integration tests (#197226)
## Summary Part of: [#188459](https://github.com/elastic/kibana/issues/188459) Api integration tests for: `POST /internal/ml/anomaly_detectors/:jobId/results/buckets` `POST /internal/ml/anomaly_detectors/:jobId/results/overall_buckets`
This commit is contained in:
parent
9c4e67c985
commit
4a7ce9062e
3 changed files with 248 additions and 0 deletions
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DATAFEED_STATE, JOB_STATE } from '@kbn/ml-plugin/common';
|
||||
import expect from '@kbn/expect';
|
||||
import { USER } from '../../../../functional/services/ml/security_common';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
import { getCommonRequestHeader } from '../../../../functional/services/ml/common_api';
|
||||
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const ml = getService('ml');
|
||||
const supertest = getService('supertestWithoutAuth');
|
||||
|
||||
const jobId = `fq_single_buckets`;
|
||||
|
||||
async function getBuckets({
|
||||
_jobId,
|
||||
timestamp,
|
||||
expectedStatusCode = 200,
|
||||
}: {
|
||||
_jobId: string;
|
||||
timestamp?: number;
|
||||
expectedStatusCode?: number;
|
||||
}) {
|
||||
const endpoint = `/internal/ml/anomaly_detectors/${_jobId}/results/buckets/${
|
||||
timestamp ? `${timestamp}` : ''
|
||||
}`;
|
||||
|
||||
const { body, status } = await supertest
|
||||
.post(endpoint)
|
||||
.auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER))
|
||||
.set(getCommonRequestHeader('1'))
|
||||
.send({});
|
||||
|
||||
ml.api.assertResponseStatusCode(expectedStatusCode, status, body);
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
describe('POST anomaly_detectors results buckets', () => {
|
||||
before(async () => {
|
||||
await esArchiver.loadIfNeeded('x-pack/test/functional/es_archives/ml/farequote');
|
||||
await ml.testResources.setKibanaTimeZoneToUTC();
|
||||
|
||||
const jobConfig = ml.commonConfig.getADFqSingleMetricJobConfig(jobId);
|
||||
const datafeedConfig = ml.commonConfig.getADFqDatafeedConfig(jobId);
|
||||
|
||||
await ml.api.createAnomalyDetectionJob(jobConfig);
|
||||
|
||||
await ml.api.createDatafeed(datafeedConfig);
|
||||
|
||||
await ml.api.openAnomalyDetectionJob(jobId);
|
||||
await ml.api.startDatafeed(datafeedConfig.datafeed_id, {
|
||||
start: '0',
|
||||
end: String(Date.now()),
|
||||
});
|
||||
await ml.api.waitForDatafeedState(datafeedConfig.datafeed_id, DATAFEED_STATE.STOPPED);
|
||||
await ml.api.waitForJobState(jobId, JOB_STATE.CLOSED);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await ml.api.cleanMlIndices();
|
||||
});
|
||||
|
||||
it('should get buckets with correct structure for a job', async () => {
|
||||
const result = await getBuckets({ _jobId: jobId });
|
||||
|
||||
expect(result.count).to.be.greaterThan(0);
|
||||
expect(result.buckets).not.to.be.empty();
|
||||
expect(result.buckets[0]).to.have.keys(
|
||||
'job_id',
|
||||
'timestamp',
|
||||
'anomaly_score',
|
||||
'bucket_span',
|
||||
'initial_anomaly_score',
|
||||
'event_count',
|
||||
'is_interim',
|
||||
'bucket_influencers',
|
||||
'processing_time_ms',
|
||||
'result_type'
|
||||
);
|
||||
});
|
||||
|
||||
it('should get a single bucket when timestamp is specified', async () => {
|
||||
const allBuckets = await getBuckets({ _jobId: jobId });
|
||||
const sampleTimestamp = allBuckets.buckets[0].timestamp;
|
||||
const result = await getBuckets({ _jobId: jobId, timestamp: sampleTimestamp });
|
||||
|
||||
expect(result.count).to.eql(1);
|
||||
expect(result.buckets).to.have.length(1);
|
||||
});
|
||||
|
||||
it('should fail with non-existent job', async () => {
|
||||
await getBuckets({ _jobId: 'non-existent-job', expectedStatusCode: 404 });
|
||||
});
|
||||
|
||||
it('should fail with non-existent timestamp', async () => {
|
||||
await getBuckets({
|
||||
_jobId: jobId,
|
||||
timestamp: 1,
|
||||
expectedStatusCode: 404,
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DATAFEED_STATE, JOB_STATE } from '@kbn/ml-plugin/common';
|
||||
import expect from '@kbn/expect';
|
||||
import { USER } from '../../../../functional/services/ml/security_common';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
import { getCommonRequestHeader } from '../../../../functional/services/ml/common_api';
|
||||
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const ml = getService('ml');
|
||||
const supertest = getService('supertestWithoutAuth');
|
||||
|
||||
const jobId1 = `fq_single_overall_buckets_1`;
|
||||
const jobId2 = `fq_single_overall_buckets_2`;
|
||||
|
||||
async function getOverallBuckets({
|
||||
jobId,
|
||||
topN = 1,
|
||||
bucketSpan = '1h',
|
||||
start = 0,
|
||||
end = Date.now(),
|
||||
overallScore,
|
||||
expectedStatusCode = 200,
|
||||
}: {
|
||||
jobId: string;
|
||||
bucketSpan?: string;
|
||||
topN?: number;
|
||||
start?: number;
|
||||
end?: number;
|
||||
overallScore?: number;
|
||||
expectedStatusCode?: number;
|
||||
}) {
|
||||
const endpoint = `/internal/ml/anomaly_detectors/${jobId}/results/overall_buckets`;
|
||||
|
||||
const { body, status } = await supertest
|
||||
.post(endpoint)
|
||||
.auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER))
|
||||
.set(getCommonRequestHeader('1'))
|
||||
.send({
|
||||
topN,
|
||||
bucketSpan,
|
||||
start,
|
||||
end,
|
||||
...(overallScore !== undefined && { overall_score: overallScore }),
|
||||
});
|
||||
|
||||
ml.api.assertResponseStatusCode(expectedStatusCode, status, body);
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
describe('POST anomaly_detectors results overall_buckets', () => {
|
||||
before(async () => {
|
||||
await esArchiver.loadIfNeeded('x-pack/test/functional/es_archives/ml/farequote');
|
||||
await ml.testResources.setKibanaTimeZoneToUTC();
|
||||
|
||||
for (const jobId of [jobId1, jobId2]) {
|
||||
const jobConfig = ml.commonConfig.getADFqSingleMetricJobConfig(jobId);
|
||||
const datafeedConfig = ml.commonConfig.getADFqDatafeedConfig(jobId);
|
||||
|
||||
await ml.api.createAnomalyDetectionJob(jobConfig);
|
||||
|
||||
await ml.api.createDatafeed(datafeedConfig);
|
||||
|
||||
await ml.api.openAnomalyDetectionJob(jobId);
|
||||
await ml.api.startDatafeed(datafeedConfig.datafeed_id, {
|
||||
start: '0',
|
||||
end: String(Date.now()),
|
||||
});
|
||||
await ml.api.waitForDatafeedState(datafeedConfig.datafeed_id, DATAFEED_STATE.STOPPED);
|
||||
await ml.api.waitForJobState(jobId, JOB_STATE.CLOSED);
|
||||
}
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await ml.api.cleanMlIndices();
|
||||
});
|
||||
|
||||
it('should get overall buckets with correct structure for multiple jobs', async () => {
|
||||
const result = await getOverallBuckets({
|
||||
jobId: `${jobId1},${jobId2}`,
|
||||
});
|
||||
|
||||
expect(result.count).to.be.greaterThan(0);
|
||||
expect(result.overall_buckets).not.to.be.empty();
|
||||
expect(result.overall_buckets[0]).to.have.keys(
|
||||
'bucket_span',
|
||||
'is_interim',
|
||||
'jobs',
|
||||
'overall_score',
|
||||
'result_type',
|
||||
'timestamp'
|
||||
);
|
||||
expect(result.overall_buckets[0].jobs.length).to.equal(2);
|
||||
});
|
||||
|
||||
it('should respect the bucket_span parameter', async () => {
|
||||
const result1h = await getOverallBuckets({
|
||||
jobId: `${jobId1},${jobId2}`,
|
||||
bucketSpan: '1h',
|
||||
});
|
||||
const result2h = await getOverallBuckets({
|
||||
jobId: `${jobId1},${jobId2}`,
|
||||
bucketSpan: '2h',
|
||||
});
|
||||
|
||||
expect(result1h.overall_buckets[0].bucket_span).to.not.equal(
|
||||
result2h.overall_buckets[0].bucket_span
|
||||
);
|
||||
});
|
||||
|
||||
it('should filter results based on overall_score', async () => {
|
||||
const result = await getOverallBuckets({
|
||||
jobId: `${jobId1},${jobId2}`,
|
||||
overallScore: 5,
|
||||
});
|
||||
|
||||
for (const bucket of result.overall_buckets) {
|
||||
expect(bucket.overall_score).to.be.greaterThan(5);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail with non-existent job', async () => {
|
||||
await getOverallBuckets({
|
||||
jobId: 'non-existent-job',
|
||||
expectedStatusCode: 404,
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
|
@ -19,5 +19,7 @@ export default function ({ loadTestFile }: FtrProviderContext) {
|
|||
loadTestFile(require.resolve('./create_with_spaces'));
|
||||
loadTestFile(require.resolve('./forecast_with_spaces'));
|
||||
loadTestFile(require.resolve('./create_with_datafeed'));
|
||||
loadTestFile(require.resolve('./get_buckets'));
|
||||
loadTestFile(require.resolve('./get_overall_buckets'));
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue