mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[ML] Migrate server side Mocha tests to Jest. (#65651)
Migrates job validation related server side tests from Mocha to Jest.
This commit is contained in:
parent
0d3ddbe9d0
commit
6a6b3edd7f
18 changed files with 298 additions and 222 deletions
|
@ -4,8 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { estimateBucketSpanFactory } from '../bucket_span_estimator';
|
||||
import { APICaller } from 'kibana/server';
|
||||
|
||||
import { ES_AGGREGATION } from '../../../common/constants/aggregation_types';
|
||||
|
||||
import { estimateBucketSpanFactory, BucketSpanEstimatorData } from './bucket_span_estimator';
|
||||
|
||||
// Mock callWithRequest with the ability to simulate returning different
|
||||
// permission settings. On each call using `ml.privilegeCheck` we retrieve
|
||||
|
@ -14,7 +17,7 @@ import { estimateBucketSpanFactory } from '../bucket_span_estimator';
|
|||
// sufficient permissions should be returned, the second time insufficient
|
||||
// permissions.
|
||||
const permissions = [false, true];
|
||||
const callWithRequest = method => {
|
||||
const callWithRequest: APICaller = (method: string) => {
|
||||
return new Promise(resolve => {
|
||||
if (method === 'ml.privilegeCheck') {
|
||||
resolve({
|
||||
|
@ -28,34 +31,19 @@ const callWithRequest = method => {
|
|||
return;
|
||||
}
|
||||
resolve({});
|
||||
});
|
||||
}) as Promise<any>;
|
||||
};
|
||||
|
||||
const callWithInternalUser = () => {
|
||||
const callWithInternalUser: APICaller = () => {
|
||||
return new Promise(resolve => {
|
||||
resolve({});
|
||||
});
|
||||
}) as Promise<any>;
|
||||
};
|
||||
|
||||
// mock xpack_main plugin
|
||||
function mockXpackMainPluginFactory(isEnabled = false, licenseType = 'platinum') {
|
||||
return {
|
||||
info: {
|
||||
isAvailable: () => true,
|
||||
feature: () => ({
|
||||
isEnabled: () => isEnabled,
|
||||
}),
|
||||
license: {
|
||||
getType: () => licenseType,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// mock configuration to be passed to the estimator
|
||||
const formConfig = {
|
||||
aggTypes: ['count'],
|
||||
duration: {},
|
||||
const formConfig: BucketSpanEstimatorData = {
|
||||
aggTypes: [ES_AGGREGATION.COUNT],
|
||||
duration: { start: 0, end: 1 },
|
||||
fields: [null],
|
||||
index: '',
|
||||
query: {
|
||||
|
@ -64,13 +52,15 @@ const formConfig = {
|
|||
must_not: [],
|
||||
},
|
||||
},
|
||||
splitField: undefined,
|
||||
timeField: undefined,
|
||||
};
|
||||
|
||||
describe('ML - BucketSpanEstimator', () => {
|
||||
it('call factory', () => {
|
||||
expect(function() {
|
||||
estimateBucketSpanFactory(callWithRequest, callWithInternalUser);
|
||||
}).to.not.throwError('Not initialized.');
|
||||
estimateBucketSpanFactory(callWithRequest, callWithInternalUser, false);
|
||||
}).not.toThrow('Not initialized.');
|
||||
});
|
||||
|
||||
it('call factory and estimator with security disabled', done => {
|
||||
|
@ -78,44 +68,29 @@ describe('ML - BucketSpanEstimator', () => {
|
|||
const estimateBucketSpan = estimateBucketSpanFactory(
|
||||
callWithRequest,
|
||||
callWithInternalUser,
|
||||
mockXpackMainPluginFactory()
|
||||
true
|
||||
);
|
||||
|
||||
estimateBucketSpan(formConfig).catch(catchData => {
|
||||
expect(catchData).to.be('Unable to retrieve cluster setting search.max_buckets');
|
||||
expect(catchData).toBe('Unable to retrieve cluster setting search.max_buckets');
|
||||
|
||||
done();
|
||||
});
|
||||
}).to.not.throwError('Not initialized.');
|
||||
}).not.toThrow('Not initialized.');
|
||||
});
|
||||
|
||||
it('call factory and estimator with security enabled and sufficient permissions.', done => {
|
||||
it('call factory and estimator with security enabled.', done => {
|
||||
expect(function() {
|
||||
const estimateBucketSpan = estimateBucketSpanFactory(
|
||||
callWithRequest,
|
||||
callWithInternalUser,
|
||||
mockXpackMainPluginFactory(true)
|
||||
false
|
||||
);
|
||||
estimateBucketSpan(formConfig).catch(catchData => {
|
||||
expect(catchData).to.be('Unable to retrieve cluster setting search.max_buckets');
|
||||
expect(catchData).toBe('Unable to retrieve cluster setting search.max_buckets');
|
||||
|
||||
done();
|
||||
});
|
||||
}).to.not.throwError('Not initialized.');
|
||||
});
|
||||
|
||||
it('call factory and estimator with security enabled and insufficient permissions.', done => {
|
||||
expect(function() {
|
||||
const estimateBucketSpan = estimateBucketSpanFactory(
|
||||
callWithRequest,
|
||||
callWithInternalUser,
|
||||
mockXpackMainPluginFactory(true)
|
||||
);
|
||||
|
||||
estimateBucketSpan(formConfig).catch(catchData => {
|
||||
expect(catchData).to.be('Insufficient permissions to call bucket span estimation.');
|
||||
done();
|
||||
});
|
||||
}).to.not.throwError('Not initialized.');
|
||||
}).not.toThrow('Not initialized.');
|
||||
});
|
||||
});
|
|
@ -6,14 +6,19 @@
|
|||
|
||||
import { APICaller } from 'kibana/server';
|
||||
import { TypeOf } from '@kbn/config-schema';
|
||||
|
||||
import { DeepPartial } from '../../../common/types/common';
|
||||
|
||||
import { validateJobSchema } from '../../routes/schemas/job_validation_schema';
|
||||
|
||||
type ValidateJobPayload = TypeOf<typeof validateJobSchema>;
|
||||
import { ValidationMessage } from './messages';
|
||||
|
||||
export type ValidateJobPayload = TypeOf<typeof validateJobSchema>;
|
||||
|
||||
export function validateJob(
|
||||
callAsCurrentUser: APICaller,
|
||||
payload: ValidateJobPayload,
|
||||
kbnVersion: string,
|
||||
callAsInternalUser: APICaller,
|
||||
isSecurityDisabled: boolean
|
||||
): string[];
|
||||
payload?: DeepPartial<ValidateJobPayload>,
|
||||
kbnVersion?: string,
|
||||
callAsInternalUser?: APICaller,
|
||||
isSecurityDisabled?: boolean
|
||||
): Promise<ValidationMessage[]>;
|
||||
|
|
|
@ -4,16 +4,24 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { validateJob } from '../job_validation';
|
||||
import { APICaller } from 'kibana/server';
|
||||
|
||||
import { validateJob } from './job_validation';
|
||||
|
||||
// mock callWithRequest
|
||||
const callWithRequest = () => {
|
||||
const callWithRequest: APICaller = (method: string) => {
|
||||
return new Promise(resolve => {
|
||||
if (method === 'fieldCaps') {
|
||||
resolve({ fields: [] });
|
||||
return;
|
||||
}
|
||||
resolve({});
|
||||
});
|
||||
}) as Promise<any>;
|
||||
};
|
||||
|
||||
// Note: The tests cast `payload` as any
|
||||
// so we can simulate possible runtime payloads
|
||||
// that don't satisfy the TypeScript specs.
|
||||
describe('ML - validateJob', () => {
|
||||
it('calling factory without payload throws an error', done => {
|
||||
validateJob(callWithRequest).then(
|
||||
|
@ -61,7 +69,7 @@ describe('ML - validateJob', () => {
|
|||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_empty',
|
||||
'detectors_empty',
|
||||
'bucket_span_empty',
|
||||
|
@ -70,10 +78,14 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
});
|
||||
|
||||
const jobIdTests = (testIds, messageId) => {
|
||||
const jobIdTests = (testIds: string[], messageId: string) => {
|
||||
const promises = testIds.map(id => {
|
||||
const payload = { job: { analysis_config: { detectors: [] } } };
|
||||
payload.job.job_id = id;
|
||||
const payload = {
|
||||
job: {
|
||||
analysis_config: { detectors: [] },
|
||||
job_id: id,
|
||||
},
|
||||
};
|
||||
return validateJob(callWithRequest, payload).catch(() => {
|
||||
new Error('Promise should not fail for jobIdTests.');
|
||||
});
|
||||
|
@ -81,19 +93,21 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return Promise.all(promises).then(testResults => {
|
||||
testResults.forEach(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes(messageId)).to.equal(true);
|
||||
expect(Array.isArray(messages)).toBe(true);
|
||||
if (Array.isArray(messages)) {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes(messageId)).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const jobGroupIdTest = (testIds, messageId) => {
|
||||
const payload = { job: { analysis_config: { detectors: [] } } };
|
||||
payload.job.groups = testIds;
|
||||
const jobGroupIdTest = (testIds: string[], messageId: string) => {
|
||||
const payload = { job: { analysis_config: { detectors: [] }, groups: testIds } };
|
||||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes(messageId)).to.equal(true);
|
||||
expect(ids.includes(messageId)).toBe(true);
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -126,10 +140,9 @@ describe('ML - validateJob', () => {
|
|||
return jobGroupIdTest(validTestIds, 'job_group_id_valid');
|
||||
});
|
||||
|
||||
const bucketSpanFormatTests = (testFormats, messageId) => {
|
||||
const bucketSpanFormatTests = (testFormats: string[], messageId: string) => {
|
||||
const promises = testFormats.map(format => {
|
||||
const payload = { job: { analysis_config: { detectors: [] } } };
|
||||
payload.job.analysis_config.bucket_span = format;
|
||||
const payload = { job: { analysis_config: { bucket_span: format, detectors: [] } } };
|
||||
return validateJob(callWithRequest, payload).catch(() => {
|
||||
new Error('Promise should not fail for bucketSpanFormatTests.');
|
||||
});
|
||||
|
@ -137,8 +150,11 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return Promise.all(promises).then(testResults => {
|
||||
testResults.forEach(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes(messageId)).to.equal(true);
|
||||
expect(Array.isArray(messages)).toBe(true);
|
||||
if (Array.isArray(messages)) {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes(messageId)).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
@ -152,7 +168,7 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('at least one detector function is empty', () => {
|
||||
const payload = { job: { analysis_config: { detectors: [] } } };
|
||||
const payload = { job: { analysis_config: { detectors: [] as Array<{ function?: string }> } } };
|
||||
payload.job.analysis_config.detectors.push({
|
||||
function: 'count',
|
||||
});
|
||||
|
@ -165,19 +181,19 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes('detectors_function_empty')).to.equal(true);
|
||||
expect(ids.includes('detectors_function_empty')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('detector function is not empty', () => {
|
||||
const payload = { job: { analysis_config: { detectors: [] } } };
|
||||
const payload = { job: { analysis_config: { detectors: [] as Array<{ function?: string }> } } };
|
||||
payload.job.analysis_config.detectors.push({
|
||||
function: 'count',
|
||||
});
|
||||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes('detectors_function_not_empty')).to.equal(true);
|
||||
expect(ids.includes('detectors_function_not_empty')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -189,7 +205,7 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes('index_fields_invalid')).to.equal(true);
|
||||
expect(ids.includes('index_fields_invalid')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -201,11 +217,11 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids.includes('index_fields_valid')).to.equal(true);
|
||||
expect(ids.includes('index_fields_valid')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
const getBasicPayload = () => ({
|
||||
const getBasicPayload = (): any => ({
|
||||
job: {
|
||||
job_id: 'test',
|
||||
analysis_config: {
|
||||
|
@ -214,7 +230,7 @@ describe('ML - validateJob', () => {
|
|||
{
|
||||
function: 'count',
|
||||
},
|
||||
],
|
||||
] as Array<{ function: string; by_field_name?: string; partition_field_name?: string }>,
|
||||
influencers: [],
|
||||
},
|
||||
data_description: { time_field: '@timestamp' },
|
||||
|
@ -224,7 +240,7 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('throws an error because job.analysis_config.influencers is not an Array', done => {
|
||||
const payload = getBasicPayload();
|
||||
const payload = getBasicPayload() as any;
|
||||
delete payload.job.analysis_config.influencers;
|
||||
|
||||
validateJob(callWithRequest, payload).then(
|
||||
|
@ -237,11 +253,11 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('detect duplicate detectors', () => {
|
||||
const payload = getBasicPayload();
|
||||
const payload = getBasicPayload() as any;
|
||||
payload.job.analysis_config.detectors.push({ function: 'count' });
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_valid',
|
||||
'detectors_function_not_empty',
|
||||
'detectors_duplicates',
|
||||
|
@ -253,7 +269,7 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('dedupe duplicate messages', () => {
|
||||
const payload = getBasicPayload();
|
||||
const payload = getBasicPayload() as any;
|
||||
// in this test setup, the following configuration passes
|
||||
// the duplicate detectors check, but would return the same
|
||||
// 'field_not_aggregatable' message for both detectors.
|
||||
|
@ -264,7 +280,7 @@ describe('ML - validateJob', () => {
|
|||
];
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_valid',
|
||||
'detectors_function_not_empty',
|
||||
'index_fields_valid',
|
||||
|
@ -278,7 +294,7 @@ describe('ML - validateJob', () => {
|
|||
const payload = getBasicPayload();
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_valid',
|
||||
'detectors_function_not_empty',
|
||||
'index_fields_valid',
|
||||
|
@ -288,7 +304,7 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('categorization job using mlcategory passes aggregatable field check', () => {
|
||||
const payload = {
|
||||
const payload: any = {
|
||||
job: {
|
||||
job_id: 'categorization_test',
|
||||
analysis_config: {
|
||||
|
@ -310,7 +326,7 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_valid',
|
||||
'detectors_function_not_empty',
|
||||
'index_fields_valid',
|
||||
|
@ -322,7 +338,7 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('non-existent field reported as non aggregatable', () => {
|
||||
const payload = {
|
||||
const payload: any = {
|
||||
job: {
|
||||
job_id: 'categorization_test',
|
||||
analysis_config: {
|
||||
|
@ -343,7 +359,7 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_valid',
|
||||
'detectors_function_not_empty',
|
||||
'index_fields_valid',
|
||||
|
@ -354,7 +370,7 @@ describe('ML - validateJob', () => {
|
|||
});
|
||||
|
||||
it('script field not reported as non aggregatable', () => {
|
||||
const payload = {
|
||||
const payload: any = {
|
||||
job: {
|
||||
job_id: 'categorization_test',
|
||||
analysis_config: {
|
||||
|
@ -385,7 +401,7 @@ describe('ML - validateJob', () => {
|
|||
|
||||
return validateJob(callWithRequest, payload).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([
|
||||
expect(ids).toStrictEqual([
|
||||
'job_id_valid',
|
||||
'detectors_function_not_empty',
|
||||
'index_fields_valid',
|
||||
|
@ -399,19 +415,19 @@ describe('ML - validateJob', () => {
|
|||
// the following two tests validate the correct template rendering of
|
||||
// urls in messages with {{version}} in them to be replaced with the
|
||||
// specified version. (defaulting to 'current')
|
||||
const docsTestPayload = getBasicPayload();
|
||||
const docsTestPayload = getBasicPayload() as any;
|
||||
docsTestPayload.job.analysis_config.detectors = [{ function: 'count', by_field_name: 'airline' }];
|
||||
it('creates a docs url pointing to the current docs version', () => {
|
||||
return validateJob(callWithRequest, docsTestPayload).then(messages => {
|
||||
const message = messages[messages.findIndex(m => m.id === 'field_not_aggregatable')];
|
||||
expect(message.url.search('/current/')).not.to.be(-1);
|
||||
expect(message.url.search('/current/')).not.toBe(-1);
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a docs url pointing to the master docs version', () => {
|
||||
return validateJob(callWithRequest, docsTestPayload, 'master').then(messages => {
|
||||
const message = messages[messages.findIndex(m => m.id === 'field_not_aggregatable')];
|
||||
expect(message.url.search('/master/')).not.to.be(-1);
|
||||
expect(message.url.search('/master/')).not.toBe(-1);
|
||||
});
|
||||
});
|
||||
});
|
10
x-pack/plugins/ml/server/models/job_validation/messages.d.ts
vendored
Normal file
10
x-pack/plugins/ml/server/models/job_validation/messages.d.ts
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export interface ValidationMessage {
|
||||
id: string;
|
||||
url: string;
|
||||
}
|
|
@ -4,22 +4,24 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { validateBucketSpan } from '../validate_bucket_span';
|
||||
import { SKIP_BUCKET_SPAN_ESTIMATION } from '../../../../common/constants/validation';
|
||||
import { SKIP_BUCKET_SPAN_ESTIMATION } from '../../../common/constants/validation';
|
||||
|
||||
import { ValidationMessage } from './messages';
|
||||
// @ts-ignore
|
||||
import { validateBucketSpan } from './validate_bucket_span';
|
||||
|
||||
// farequote2017 snapshot snapshot mock search response
|
||||
// it returns a mock for the response of PolledDataChecker's search request
|
||||
// to get an aggregation of non_empty_buckets with an interval of 1m.
|
||||
// this allows us to test bucket span estimation.
|
||||
import mockFareQuoteSearchResponse from './mock_farequote_search_response';
|
||||
import mockFareQuoteSearchResponse from './__mocks__/mock_farequote_search_response.json';
|
||||
|
||||
// it_ops_app_logs 2017 snapshot mock search response
|
||||
// sparse data with a low number of buckets
|
||||
import mockItSearchResponse from './mock_it_search_response';
|
||||
import mockItSearchResponse from './__mocks__/mock_it_search_response.json';
|
||||
|
||||
// mock callWithRequestFactory
|
||||
const callWithRequestFactory = mockSearchResponse => {
|
||||
const callWithRequestFactory = (mockSearchResponse: any) => {
|
||||
return () => {
|
||||
return new Promise(resolve => {
|
||||
resolve(mockSearchResponse);
|
||||
|
@ -86,17 +88,17 @@ describe('ML - validateBucketSpan', () => {
|
|||
};
|
||||
|
||||
return validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), job).then(
|
||||
messages => {
|
||||
(messages: ValidationMessage[]) => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([]);
|
||||
expect(ids).toStrictEqual([]);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
const getJobConfig = bucketSpan => ({
|
||||
const getJobConfig = (bucketSpan: string) => ({
|
||||
analysis_config: {
|
||||
bucket_span: bucketSpan,
|
||||
detectors: [],
|
||||
detectors: [] as Array<{ function?: string }>,
|
||||
influencers: [],
|
||||
},
|
||||
data_description: { time_field: '@timestamp' },
|
||||
|
@ -111,9 +113,9 @@ describe('ML - validateBucketSpan', () => {
|
|||
callWithRequestFactory(mockFareQuoteSearchResponse),
|
||||
job,
|
||||
duration
|
||||
).then(messages => {
|
||||
).then((messages: ValidationMessage[]) => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['success_bucket_span']);
|
||||
expect(ids).toStrictEqual(['success_bucket_span']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -125,9 +127,9 @@ describe('ML - validateBucketSpan', () => {
|
|||
callWithRequestFactory(mockFareQuoteSearchResponse),
|
||||
job,
|
||||
duration
|
||||
).then(messages => {
|
||||
).then((messages: ValidationMessage[]) => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['bucket_span_high']);
|
||||
expect(ids).toStrictEqual(['bucket_span_high']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -135,14 +137,18 @@ describe('ML - validateBucketSpan', () => {
|
|||
return;
|
||||
}
|
||||
|
||||
const testBucketSpan = (bucketSpan, mockSearchResponse, test) => {
|
||||
const testBucketSpan = (
|
||||
bucketSpan: string,
|
||||
mockSearchResponse: any,
|
||||
test: (ids: string[]) => void
|
||||
) => {
|
||||
const job = getJobConfig(bucketSpan);
|
||||
job.analysis_config.detectors.push({
|
||||
function: 'count',
|
||||
});
|
||||
|
||||
return validateBucketSpan(callWithRequestFactory(mockSearchResponse), job, {}).then(
|
||||
messages => {
|
||||
(messages: ValidationMessage[]) => {
|
||||
const ids = messages.map(m => m.id);
|
||||
test(ids);
|
||||
}
|
||||
|
@ -151,13 +157,13 @@ describe('ML - validateBucketSpan', () => {
|
|||
|
||||
it('farequote count detector, bucket span estimation matches 15m', () => {
|
||||
return testBucketSpan('15m', mockFareQuoteSearchResponse, ids => {
|
||||
expect(ids).to.eql(['success_bucket_span']);
|
||||
expect(ids).toStrictEqual(['success_bucket_span']);
|
||||
});
|
||||
});
|
||||
|
||||
it('farequote count detector, bucket span estimation does not match 1m', () => {
|
||||
return testBucketSpan('1m', mockFareQuoteSearchResponse, ids => {
|
||||
expect(ids).to.eql(['bucket_span_estimation_mismatch']);
|
||||
expect(ids).toStrictEqual(['bucket_span_estimation_mismatch']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -167,7 +173,7 @@ describe('ML - validateBucketSpan', () => {
|
|||
// should result in a lower bucket span estimation.
|
||||
it('it_ops_app_logs count detector, bucket span estimation matches 6h', () => {
|
||||
return testBucketSpan('6h', mockItSearchResponse, ids => {
|
||||
expect(ids).to.eql(['success_bucket_span']);
|
||||
expect(ids).toStrictEqual(['success_bucket_span']);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -7,4 +7,7 @@
|
|||
import { APICaller } from 'kibana/server';
|
||||
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
|
||||
|
||||
export function validateCardinality(callAsCurrentUser: APICaller, job: CombinedJob): any[];
|
||||
export function validateCardinality(
|
||||
callAsCurrentUser: APICaller,
|
||||
job?: CombinedJob
|
||||
): Promise<any[]>;
|
||||
|
|
|
@ -5,11 +5,15 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import expect from '@kbn/expect';
|
||||
import { validateCardinality } from '../validate_cardinality';
|
||||
|
||||
import mockFareQuoteCardinality from './mock_farequote_cardinality';
|
||||
import mockFieldCaps from './mock_field_caps';
|
||||
import { APICaller } from 'kibana/server';
|
||||
|
||||
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
|
||||
|
||||
import mockFareQuoteCardinality from './__mocks__/mock_farequote_cardinality.json';
|
||||
import mockFieldCaps from './__mocks__/mock_field_caps.json';
|
||||
|
||||
import { validateCardinality } from './validate_cardinality';
|
||||
|
||||
const mockResponses = {
|
||||
search: mockFareQuoteCardinality,
|
||||
|
@ -17,8 +21,8 @@ const mockResponses = {
|
|||
};
|
||||
|
||||
// mock callWithRequestFactory
|
||||
const callWithRequestFactory = (responses, fail = false) => {
|
||||
return requestName => {
|
||||
const callWithRequestFactory = (responses: Record<string, any>, fail = false): APICaller => {
|
||||
return (requestName: string) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const response = responses[requestName];
|
||||
if (fail) {
|
||||
|
@ -26,7 +30,7 @@ const callWithRequestFactory = (responses, fail = false) => {
|
|||
} else {
|
||||
resolve(response);
|
||||
}
|
||||
});
|
||||
}) as Promise<any>;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -39,21 +43,23 @@ describe('ML - validateCardinality', () => {
|
|||
});
|
||||
|
||||
it('called with non-valid job argument #1, missing analysis_config', done => {
|
||||
validateCardinality(callWithRequestFactory(mockResponses), {}).then(
|
||||
validateCardinality(callWithRequestFactory(mockResponses), {} as CombinedJob).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
});
|
||||
|
||||
it('called with non-valid job argument #2, missing datafeed_config', done => {
|
||||
validateCardinality(callWithRequestFactory(mockResponses), { analysis_config: {} }).then(
|
||||
validateCardinality(callWithRequestFactory(mockResponses), {
|
||||
analysis_config: {},
|
||||
} as CombinedJob).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
});
|
||||
|
||||
it('called with non-valid job argument #3, missing datafeed_config.indices', done => {
|
||||
const job = { analysis_config: {}, datafeed_config: {} };
|
||||
const job = { analysis_config: {}, datafeed_config: {} } as CombinedJob;
|
||||
validateCardinality(callWithRequestFactory(mockResponses), job).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
|
@ -61,7 +67,10 @@ describe('ML - validateCardinality', () => {
|
|||
});
|
||||
|
||||
it('called with non-valid job argument #4, missing data_description', done => {
|
||||
const job = { analysis_config: {}, datafeed_config: { indices: [] } };
|
||||
const job = ({
|
||||
analysis_config: {},
|
||||
datafeed_config: { indices: [] },
|
||||
} as unknown) as CombinedJob;
|
||||
validateCardinality(callWithRequestFactory(mockResponses), job).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
|
@ -69,7 +78,11 @@ describe('ML - validateCardinality', () => {
|
|||
});
|
||||
|
||||
it('called with non-valid job argument #5, missing data_description.time_field', done => {
|
||||
const job = { analysis_config: {}, data_description: {}, datafeed_config: { indices: [] } };
|
||||
const job = ({
|
||||
analysis_config: {},
|
||||
data_description: {},
|
||||
datafeed_config: { indices: [] },
|
||||
} as unknown) as CombinedJob;
|
||||
validateCardinality(callWithRequestFactory(mockResponses), job).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
|
@ -77,11 +90,11 @@ describe('ML - validateCardinality', () => {
|
|||
});
|
||||
|
||||
it('called with non-valid job argument #6, missing analysis_config.influencers', done => {
|
||||
const job = {
|
||||
const job = ({
|
||||
analysis_config: {},
|
||||
datafeed_config: { indices: [] },
|
||||
data_description: { time_field: '@timestamp' },
|
||||
};
|
||||
} as unknown) as CombinedJob;
|
||||
validateCardinality(callWithRequestFactory(mockResponses), job).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
|
@ -89,21 +102,21 @@ describe('ML - validateCardinality', () => {
|
|||
});
|
||||
|
||||
it('minimum job configuration to pass cardinality check code', () => {
|
||||
const job = {
|
||||
const job = ({
|
||||
analysis_config: { detectors: [], influencers: [] },
|
||||
data_description: { time_field: '@timestamp' },
|
||||
datafeed_config: {
|
||||
indices: [],
|
||||
},
|
||||
};
|
||||
} as unknown) as CombinedJob;
|
||||
|
||||
return validateCardinality(callWithRequestFactory(mockResponses), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([]);
|
||||
expect(ids).toStrictEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
const getJobConfig = fieldName => ({
|
||||
const getJobConfig = (fieldName: string) => ({
|
||||
analysis_config: {
|
||||
detectors: [
|
||||
{
|
||||
|
@ -119,11 +132,18 @@ describe('ML - validateCardinality', () => {
|
|||
},
|
||||
});
|
||||
|
||||
const testCardinality = (fieldName, cardinality, test) => {
|
||||
const testCardinality = (
|
||||
fieldName: string,
|
||||
cardinality: number,
|
||||
test: (ids: string[]) => void
|
||||
) => {
|
||||
const job = getJobConfig(fieldName);
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(callWithRequestFactory(mockCardinality), job, {}).then(messages => {
|
||||
return validateCardinality(
|
||||
callWithRequestFactory(mockCardinality),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
test(ids);
|
||||
});
|
||||
|
@ -132,26 +152,34 @@ describe('ML - validateCardinality', () => {
|
|||
it(`field '_source' not aggregatable`, () => {
|
||||
const job = getJobConfig('partition_field_name');
|
||||
job.analysis_config.detectors[0].partition_field_name = '_source';
|
||||
return validateCardinality(callWithRequestFactory(mockResponses), job).then(messages => {
|
||||
return validateCardinality(
|
||||
callWithRequestFactory(mockResponses),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['field_not_aggregatable']);
|
||||
expect(ids).toStrictEqual(['field_not_aggregatable']);
|
||||
});
|
||||
});
|
||||
|
||||
it(`field 'airline' aggregatable`, () => {
|
||||
const job = getJobConfig('partition_field_name');
|
||||
return validateCardinality(callWithRequestFactory(mockResponses), job).then(messages => {
|
||||
return validateCardinality(
|
||||
callWithRequestFactory(mockResponses),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['success_cardinality']);
|
||||
expect(ids).toStrictEqual(['success_cardinality']);
|
||||
});
|
||||
});
|
||||
|
||||
it('field not aggregatable', () => {
|
||||
const job = getJobConfig('partition_field_name');
|
||||
return validateCardinality(callWithRequestFactory({}), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['field_not_aggregatable']);
|
||||
});
|
||||
return validateCardinality(callWithRequestFactory({}), (job as unknown) as CombinedJob).then(
|
||||
messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).toStrictEqual(['field_not_aggregatable']);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('fields not aggregatable', () => {
|
||||
|
@ -160,107 +188,110 @@ describe('ML - validateCardinality', () => {
|
|||
function: 'count',
|
||||
partition_field_name: 'airline',
|
||||
});
|
||||
return validateCardinality(callWithRequestFactory({}, true), job).then(messages => {
|
||||
return validateCardinality(
|
||||
callWithRequestFactory({}, true),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['fields_not_aggregatable']);
|
||||
expect(ids).toStrictEqual(['fields_not_aggregatable']);
|
||||
});
|
||||
});
|
||||
|
||||
it('valid partition field cardinality', () => {
|
||||
return testCardinality('partition_field_name', 50, ids => {
|
||||
expect(ids).to.eql(['success_cardinality']);
|
||||
expect(ids).toStrictEqual(['success_cardinality']);
|
||||
});
|
||||
});
|
||||
|
||||
it('too high partition field cardinality', () => {
|
||||
return testCardinality('partition_field_name', 1001, ids => {
|
||||
expect(ids).to.eql(['cardinality_partition_field']);
|
||||
expect(ids).toStrictEqual(['cardinality_partition_field']);
|
||||
});
|
||||
});
|
||||
|
||||
it('valid by field cardinality', () => {
|
||||
return testCardinality('by_field_name', 50, ids => {
|
||||
expect(ids).to.eql(['success_cardinality']);
|
||||
expect(ids).toStrictEqual(['success_cardinality']);
|
||||
});
|
||||
});
|
||||
|
||||
it('too high by field cardinality', () => {
|
||||
return testCardinality('by_field_name', 1001, ids => {
|
||||
expect(ids).to.eql(['cardinality_by_field']);
|
||||
expect(ids).toStrictEqual(['cardinality_by_field']);
|
||||
});
|
||||
});
|
||||
|
||||
it('valid over field cardinality', () => {
|
||||
return testCardinality('over_field_name', 50, ids => {
|
||||
expect(ids).to.eql(['success_cardinality']);
|
||||
expect(ids).toStrictEqual(['success_cardinality']);
|
||||
});
|
||||
});
|
||||
|
||||
it('too low over field cardinality', () => {
|
||||
return testCardinality('over_field_name', 9, ids => {
|
||||
expect(ids).to.eql(['cardinality_over_field_low']);
|
||||
expect(ids).toStrictEqual(['cardinality_over_field_low']);
|
||||
});
|
||||
});
|
||||
|
||||
it('too high over field cardinality', () => {
|
||||
return testCardinality('over_field_name', 1000001, ids => {
|
||||
expect(ids).to.eql(['cardinality_over_field_high']);
|
||||
expect(ids).toStrictEqual(['cardinality_over_field_high']);
|
||||
});
|
||||
});
|
||||
|
||||
const cardinality = 10000;
|
||||
it(`disabled model_plot, over field cardinality of ${cardinality} doesn't trigger a warning`, () => {
|
||||
const job = getJobConfig('over_field_name');
|
||||
const job = (getJobConfig('over_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: false };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(callWithRequestFactory(mockCardinality), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['success_cardinality']);
|
||||
expect(ids).toStrictEqual(['success_cardinality']);
|
||||
});
|
||||
});
|
||||
|
||||
it(`enabled model_plot, over field cardinality of ${cardinality} triggers a model plot warning`, () => {
|
||||
const job = getJobConfig('over_field_name');
|
||||
const job = (getJobConfig('over_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: true };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(callWithRequestFactory(mockCardinality), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['cardinality_model_plot_high']);
|
||||
expect(ids).toStrictEqual(['cardinality_model_plot_high']);
|
||||
});
|
||||
});
|
||||
|
||||
it(`disabled model_plot, by field cardinality of ${cardinality} triggers a field cardinality warning`, () => {
|
||||
const job = getJobConfig('by_field_name');
|
||||
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: false };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(callWithRequestFactory(mockCardinality), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['cardinality_by_field']);
|
||||
expect(ids).toStrictEqual(['cardinality_by_field']);
|
||||
});
|
||||
});
|
||||
|
||||
it(`enabled model_plot, by field cardinality of ${cardinality} triggers a model plot warning and field cardinality warning`, () => {
|
||||
const job = getJobConfig('by_field_name');
|
||||
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: true };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(callWithRequestFactory(mockCardinality), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['cardinality_model_plot_high', 'cardinality_by_field']);
|
||||
expect(ids).toStrictEqual(['cardinality_model_plot_high', 'cardinality_by_field']);
|
||||
});
|
||||
});
|
||||
|
||||
it(`enabled model_plot with terms, by field cardinality of ${cardinality} triggers just field cardinality warning`, () => {
|
||||
const job = getJobConfig('by_field_name');
|
||||
const job = (getJobConfig('by_field_name') as unknown) as CombinedJob;
|
||||
job.model_plot_config = { enabled: true, terms: 'AAL,AAB' };
|
||||
const mockCardinality = _.cloneDeep(mockResponses);
|
||||
mockCardinality.search.aggregations.airline_cardinality.value = cardinality;
|
||||
return validateCardinality(callWithRequestFactory(mockCardinality), job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['cardinality_by_field']);
|
||||
expect(ids).toStrictEqual(['cardinality_by_field']);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -4,19 +4,25 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { validateInfluencers } from '../validate_influencers';
|
||||
import { APICaller } from 'kibana/server';
|
||||
|
||||
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
|
||||
|
||||
import { validateInfluencers } from './validate_influencers';
|
||||
|
||||
describe('ML - validateInfluencers', () => {
|
||||
it('called without arguments throws an error', done => {
|
||||
validateInfluencers().then(
|
||||
validateInfluencers(
|
||||
(undefined as unknown) as APICaller,
|
||||
(undefined as unknown) as CombinedJob
|
||||
).then(
|
||||
() => done(new Error('Promise should not resolve for this test without job argument.')),
|
||||
() => done()
|
||||
);
|
||||
});
|
||||
|
||||
it('called with non-valid job argument #1, missing analysis_config', done => {
|
||||
validateInfluencers(undefined, {}).then(
|
||||
validateInfluencers((undefined as unknown) as APICaller, ({} as unknown) as CombinedJob).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -28,7 +34,7 @@ describe('ML - validateInfluencers', () => {
|
|||
datafeed_config: { indices: [] },
|
||||
data_description: { time_field: '@timestamp' },
|
||||
};
|
||||
validateInfluencers(undefined, job).then(
|
||||
validateInfluencers((undefined as unknown) as APICaller, (job as unknown) as CombinedJob).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -40,25 +46,29 @@ describe('ML - validateInfluencers', () => {
|
|||
datafeed_config: { indices: [] },
|
||||
data_description: { time_field: '@timestamp' },
|
||||
};
|
||||
validateInfluencers(undefined, job).then(
|
||||
validateInfluencers((undefined as unknown) as APICaller, (job as unknown) as CombinedJob).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
});
|
||||
|
||||
const getJobConfig = (influencers = [], detectors = []) => ({
|
||||
analysis_config: { detectors, influencers },
|
||||
data_description: { time_field: '@timestamp' },
|
||||
datafeed_config: {
|
||||
indices: [],
|
||||
},
|
||||
});
|
||||
const getJobConfig: (
|
||||
influencers?: string[],
|
||||
detectors?: CombinedJob['analysis_config']['detectors']
|
||||
) => CombinedJob = (influencers = [], detectors = []) =>
|
||||
(({
|
||||
analysis_config: { detectors, influencers },
|
||||
data_description: { time_field: '@timestamp' },
|
||||
datafeed_config: {
|
||||
indices: [],
|
||||
},
|
||||
} as unknown) as CombinedJob);
|
||||
|
||||
it('success_influencer', () => {
|
||||
const job = getJobConfig(['airline']);
|
||||
return validateInfluencers(undefined, job).then(messages => {
|
||||
return validateInfluencers((undefined as unknown) as APICaller, job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['success_influencers']);
|
||||
expect(ids).toStrictEqual(['success_influencers']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -69,31 +79,30 @@ describe('ML - validateInfluencers', () => {
|
|||
{
|
||||
detector_description: 'count',
|
||||
function: 'count',
|
||||
rules: [],
|
||||
detector_index: 0,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
return validateInfluencers(undefined, job).then(messages => {
|
||||
return validateInfluencers((undefined as unknown) as APICaller, job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql([]);
|
||||
expect(ids).toStrictEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
it('influencer_low', () => {
|
||||
const job = getJobConfig();
|
||||
return validateInfluencers(undefined, job).then(messages => {
|
||||
return validateInfluencers((undefined as unknown) as APICaller, job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['influencer_low']);
|
||||
expect(ids).toStrictEqual(['influencer_low']);
|
||||
});
|
||||
});
|
||||
|
||||
it('influencer_high', () => {
|
||||
const job = getJobConfig(['i1', 'i2', 'i3', 'i4']);
|
||||
return validateInfluencers(undefined, job).then(messages => {
|
||||
return validateInfluencers((undefined as unknown) as APICaller, job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['influencer_high']);
|
||||
expect(ids).toStrictEqual(['influencer_high']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -105,14 +114,13 @@ describe('ML - validateInfluencers', () => {
|
|||
detector_description: 'count',
|
||||
function: 'count',
|
||||
partition_field_name: 'airline',
|
||||
rules: [],
|
||||
detector_index: 0,
|
||||
},
|
||||
]
|
||||
);
|
||||
return validateInfluencers(undefined, job).then(messages => {
|
||||
return validateInfluencers((undefined as unknown) as APICaller, job).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['influencer_low_suggestion']);
|
||||
expect(ids).toStrictEqual(['influencer_low_suggestion']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -124,27 +132,24 @@ describe('ML - validateInfluencers', () => {
|
|||
detector_description: 'count',
|
||||
function: 'count',
|
||||
partition_field_name: 'partition_field',
|
||||
rules: [],
|
||||
detector_index: 0,
|
||||
},
|
||||
{
|
||||
detector_description: 'count',
|
||||
function: 'count',
|
||||
by_field_name: 'by_field',
|
||||
rules: [],
|
||||
detector_index: 0,
|
||||
},
|
||||
{
|
||||
detector_description: 'count',
|
||||
function: 'count',
|
||||
over_field_name: 'over_field',
|
||||
rules: [],
|
||||
detector_index: 0,
|
||||
},
|
||||
]
|
||||
);
|
||||
return validateInfluencers(undefined, job).then(messages => {
|
||||
expect(messages).to.eql([
|
||||
return validateInfluencers((undefined as unknown) as APICaller, job).then(messages => {
|
||||
expect(messages).toStrictEqual([
|
||||
{
|
||||
id: 'influencer_low_suggestions',
|
||||
influencerSuggestion: '["partition_field","by_field","over_field"]',
|
|
@ -4,19 +4,23 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { APICaller } from 'kibana/server';
|
||||
|
||||
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
|
||||
|
||||
import { validateJobObject } from './validate_job_object';
|
||||
|
||||
const INFLUENCER_LOW_THRESHOLD = 0;
|
||||
const INFLUENCER_HIGH_THRESHOLD = 4;
|
||||
const DETECTOR_FIELD_NAMES_THRESHOLD = 1;
|
||||
|
||||
export async function validateInfluencers(callWithRequest, job) {
|
||||
export async function validateInfluencers(callWithRequest: APICaller, job: CombinedJob) {
|
||||
validateJobObject(job);
|
||||
|
||||
const messages = [];
|
||||
const influencers = job.analysis_config.influencers;
|
||||
|
||||
const detectorFieldNames = [];
|
||||
const detectorFieldNames: string[] = [];
|
||||
job.analysis_config.detectors.forEach(d => {
|
||||
if (d.by_field_name) {
|
||||
detectorFieldNames.push(d.by_field_name);
|
|
@ -5,28 +5,32 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import expect from '@kbn/expect';
|
||||
import { isValidTimeField, validateTimeRange } from '../validate_time_range';
|
||||
|
||||
import mockTimeField from './mock_time_field';
|
||||
import mockTimeFieldNested from './mock_time_field_nested';
|
||||
import mockTimeRange from './mock_time_range';
|
||||
import { APICaller } from 'kibana/server';
|
||||
|
||||
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
|
||||
|
||||
import { isValidTimeField, validateTimeRange } from './validate_time_range';
|
||||
|
||||
import mockTimeField from './__mocks__/mock_time_field.json';
|
||||
import mockTimeFieldNested from './__mocks__/mock_time_field_nested.json';
|
||||
import mockTimeRange from './__mocks__/mock_time_range.json';
|
||||
|
||||
const mockSearchResponse = {
|
||||
fieldCaps: mockTimeField,
|
||||
search: mockTimeRange,
|
||||
};
|
||||
|
||||
const callWithRequestFactory = resp => {
|
||||
return path => {
|
||||
const callWithRequestFactory = (resp: any): APICaller => {
|
||||
return (path: string) => {
|
||||
return new Promise(resolve => {
|
||||
resolve(resp[path]);
|
||||
});
|
||||
}) as Promise<any>;
|
||||
};
|
||||
};
|
||||
|
||||
function getMinimalValidJob() {
|
||||
return {
|
||||
return ({
|
||||
analysis_config: {
|
||||
bucket_span: '15m',
|
||||
detectors: [],
|
||||
|
@ -36,12 +40,15 @@ function getMinimalValidJob() {
|
|||
datafeed_config: {
|
||||
indices: [],
|
||||
},
|
||||
};
|
||||
} as unknown) as CombinedJob;
|
||||
}
|
||||
|
||||
describe('ML - isValidTimeField', () => {
|
||||
it('called without job config argument triggers Promise rejection', done => {
|
||||
isValidTimeField(callWithRequestFactory(mockSearchResponse)).then(
|
||||
isValidTimeField(
|
||||
callWithRequestFactory(mockSearchResponse),
|
||||
(undefined as unknown) as CombinedJob
|
||||
).then(
|
||||
() => done(new Error('Promise should not resolve for this test without job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -50,7 +57,7 @@ describe('ML - isValidTimeField', () => {
|
|||
it('time_field `@timestamp`', done => {
|
||||
isValidTimeField(callWithRequestFactory(mockSearchResponse), getMinimalValidJob()).then(
|
||||
valid => {
|
||||
expect(valid).to.be(true);
|
||||
expect(valid).toBe(true);
|
||||
done();
|
||||
},
|
||||
() => done(new Error('isValidTimeField Promise failed for time_field `@timestamp`.'))
|
||||
|
@ -71,7 +78,7 @@ describe('ML - isValidTimeField', () => {
|
|||
mockJobConfigNestedDate
|
||||
).then(
|
||||
valid => {
|
||||
expect(valid).to.be(true);
|
||||
expect(valid).toBe(true);
|
||||
done();
|
||||
},
|
||||
() => done(new Error('isValidTimeField Promise failed for time_field `metadata.timestamp`.'))
|
||||
|
@ -81,14 +88,19 @@ describe('ML - isValidTimeField', () => {
|
|||
|
||||
describe('ML - validateTimeRange', () => {
|
||||
it('called without arguments', done => {
|
||||
validateTimeRange(callWithRequestFactory(mockSearchResponse)).then(
|
||||
validateTimeRange(
|
||||
callWithRequestFactory(mockSearchResponse),
|
||||
(undefined as unknown) as CombinedJob
|
||||
).then(
|
||||
() => done(new Error('Promise should not resolve for this test without job argument.')),
|
||||
() => done()
|
||||
);
|
||||
});
|
||||
|
||||
it('called with non-valid job argument #2, missing datafeed_config', done => {
|
||||
validateTimeRange(callWithRequestFactory(mockSearchResponse), { analysis_config: {} }).then(
|
||||
validateTimeRange(callWithRequestFactory(mockSearchResponse), ({
|
||||
analysis_config: {},
|
||||
} as unknown) as CombinedJob).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -96,7 +108,10 @@ describe('ML - validateTimeRange', () => {
|
|||
|
||||
it('called with non-valid job argument #3, missing datafeed_config.indices', done => {
|
||||
const job = { analysis_config: {}, datafeed_config: {} };
|
||||
validateTimeRange(callWithRequestFactory(mockSearchResponse), job).then(
|
||||
validateTimeRange(
|
||||
callWithRequestFactory(mockSearchResponse),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -104,7 +119,10 @@ describe('ML - validateTimeRange', () => {
|
|||
|
||||
it('called with non-valid job argument #4, missing data_description', done => {
|
||||
const job = { analysis_config: {}, datafeed_config: { indices: [] } };
|
||||
validateTimeRange(callWithRequestFactory(mockSearchResponse), job).then(
|
||||
validateTimeRange(
|
||||
callWithRequestFactory(mockSearchResponse),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -112,7 +130,10 @@ describe('ML - validateTimeRange', () => {
|
|||
|
||||
it('called with non-valid job argument #5, missing data_description.time_field', done => {
|
||||
const job = { analysis_config: {}, data_description: {}, datafeed_config: { indices: [] } };
|
||||
validateTimeRange(callWithRequestFactory(mockSearchResponse), job).then(
|
||||
validateTimeRange(
|
||||
callWithRequestFactory(mockSearchResponse),
|
||||
(job as unknown) as CombinedJob
|
||||
).then(
|
||||
() => done(new Error('Promise should not resolve for this test without valid job argument.')),
|
||||
() => done()
|
||||
);
|
||||
|
@ -128,7 +149,7 @@ describe('ML - validateTimeRange', () => {
|
|||
duration
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['time_field_invalid']);
|
||||
expect(ids).toStrictEqual(['time_field_invalid']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -142,7 +163,7 @@ describe('ML - validateTimeRange', () => {
|
|||
duration
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['time_range_short']);
|
||||
expect(ids).toStrictEqual(['time_range_short']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -154,7 +175,7 @@ describe('ML - validateTimeRange', () => {
|
|||
duration
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['time_range_short']);
|
||||
expect(ids).toStrictEqual(['time_range_short']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -166,7 +187,7 @@ describe('ML - validateTimeRange', () => {
|
|||
duration
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['time_range_short']);
|
||||
expect(ids).toStrictEqual(['time_range_short']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -178,7 +199,7 @@ describe('ML - validateTimeRange', () => {
|
|||
duration
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['success_time_range']);
|
||||
expect(ids).toStrictEqual(['success_time_range']);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -190,7 +211,7 @@ describe('ML - validateTimeRange', () => {
|
|||
duration
|
||||
).then(messages => {
|
||||
const ids = messages.map(m => m.id);
|
||||
expect(ids).to.eql(['time_range_before_epoch']);
|
||||
expect(ids).toStrictEqual(['time_range_before_epoch']);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -37,9 +37,9 @@ export async function isValidTimeField(callAsCurrentUser: APICaller, job: Combin
|
|||
fields: [timeField],
|
||||
});
|
||||
|
||||
let fieldType = fieldCaps.fields[timeField]?.date?.type;
|
||||
let fieldType = fieldCaps?.fields[timeField]?.date?.type;
|
||||
if (fieldType === undefined) {
|
||||
fieldType = fieldCaps.fields[timeField]?.date_nanos?.type;
|
||||
fieldType = fieldCaps?.fields[timeField]?.date_nanos?.type;
|
||||
}
|
||||
return fieldType === ES_FIELD_TYPES.DATE || fieldType === ES_FIELD_TYPES.DATE_NANOS;
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ export async function isValidTimeField(callAsCurrentUser: APICaller, job: Combin
|
|||
export async function validateTimeRange(
|
||||
callAsCurrentUser: APICaller,
|
||||
job: CombinedJob,
|
||||
timeRange: TimeRange | undefined
|
||||
timeRange?: TimeRange
|
||||
) {
|
||||
const messages: ValidateTimeRangeMessage[] = [];
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue