mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[ML] Add option for per-partition categorization to categorization job wizard (#75061)
This commit is contained in:
parent
b944dd3c96
commit
3469e164f4
33 changed files with 1230 additions and 34 deletions
|
@ -22,3 +22,5 @@ export enum ANOMALY_THRESHOLD {
|
|||
}
|
||||
|
||||
export const PARTITION_FIELDS = ['partition_field', 'over_field', 'by_field'] as const;
|
||||
export const JOB_ID = 'job_id';
|
||||
export const PARTITION_FIELD_VALUE = 'partition_field_value';
|
||||
|
|
|
@ -43,6 +43,34 @@ export const getMessages = once(() => {
|
|||
const createJobsDocsUrl = `https://www.elastic.co/guide/en/machine-learning/{{version}}/create-jobs.html`;
|
||||
|
||||
return {
|
||||
categorizer_detector_missing_per_partition_field: {
|
||||
status: VALIDATION_STATUS.ERROR,
|
||||
text: i18n.translate(
|
||||
'xpack.ml.models.jobValidation.messages.categorizerMissingPerPartitionFieldMessage',
|
||||
{
|
||||
defaultMessage:
|
||||
'Partition field must be set for detectors that reference "mlcategory" when per-partition categorization is enabled.',
|
||||
}
|
||||
),
|
||||
url:
|
||||
'https://www.elastic.co/guide/en/machine-learning/{{version}}/ml-configuring-categories.html',
|
||||
},
|
||||
categorizer_varying_per_partition_fields: {
|
||||
status: VALIDATION_STATUS.ERROR,
|
||||
text: i18n.translate(
|
||||
'xpack.ml.models.jobValidation.messages.categorizerVaryingPerPartitionFieldNamesMessage',
|
||||
{
|
||||
defaultMessage:
|
||||
'Detectors with keyword "mlcategory" cannot have different partition_field_name when per-partition categorization is enabled. Found [{fields}].',
|
||||
|
||||
values: {
|
||||
fields: '"{{fields}}"',
|
||||
},
|
||||
}
|
||||
),
|
||||
url:
|
||||
'https://www.elastic.co/guide/en/machine-learning/{{version}}/ml-configuring-categories.html',
|
||||
},
|
||||
field_not_aggregatable: {
|
||||
status: VALIDATION_STATUS.ERROR,
|
||||
text: i18n.translate('xpack.ml.models.jobValidation.messages.fieldNotAggregatableMessage', {
|
||||
|
|
|
@ -57,3 +57,20 @@ export interface AnomaliesTableRecord {
|
|||
}
|
||||
|
||||
export type PartitionFieldsType = typeof PARTITION_FIELDS[number];
|
||||
|
||||
export interface AnomalyCategorizerStatsDoc {
|
||||
[key: string]: any;
|
||||
job_id: string;
|
||||
result_type: 'categorizer_stats';
|
||||
partition_field_name?: string;
|
||||
partition_field_value?: string;
|
||||
categorized_doc_count: number;
|
||||
total_category_count: number;
|
||||
frequent_category_count: number;
|
||||
rare_category_count: number;
|
||||
dead_category_count: number;
|
||||
failed_category_count: number;
|
||||
categorization_status: 'ok' | 'warn';
|
||||
log_time: number;
|
||||
timestamp: number;
|
||||
}
|
||||
|
|
|
@ -93,6 +93,6 @@ export interface CustomRule {
|
|||
}
|
||||
|
||||
export interface PerPartitionCategorization {
|
||||
enabled: boolean;
|
||||
enabled?: boolean;
|
||||
stop_on_warn?: boolean;
|
||||
}
|
||||
|
|
9
x-pack/plugins/ml/common/types/results.ts
Normal file
9
x-pack/plugins/ml/common/types/results.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export interface GetStoppedPartitionResult {
|
||||
jobs: string[] | Record<string, string[]>;
|
||||
}
|
|
@ -23,6 +23,7 @@ import { EntityField } from './anomaly_utils';
|
|||
import { MlServerLimits } from '../types/ml_server_info';
|
||||
import { JobValidationMessage, JobValidationMessageId } from '../constants/messages';
|
||||
import { ES_AGGREGATION, ML_JOB_AGGREGATION } from '../constants/aggregation_types';
|
||||
import { MLCATEGORY } from '../constants/field_types';
|
||||
|
||||
export interface ValidationResults {
|
||||
valid: boolean;
|
||||
|
@ -86,9 +87,9 @@ export function isSourceDataChartableForDetector(job: CombinedJob, detectorIndex
|
|||
// whereas the 'function_description' field holds an ML-built display hint for function e.g. 'count'.
|
||||
isSourceDataChartable =
|
||||
mlFunctionToESAggregation(functionName) !== null &&
|
||||
dtr.by_field_name !== 'mlcategory' &&
|
||||
dtr.partition_field_name !== 'mlcategory' &&
|
||||
dtr.over_field_name !== 'mlcategory';
|
||||
dtr.by_field_name !== MLCATEGORY &&
|
||||
dtr.partition_field_name !== MLCATEGORY &&
|
||||
dtr.over_field_name !== MLCATEGORY;
|
||||
|
||||
// If the datafeed uses script fields, we can only plot the time series if
|
||||
// model plot is enabled. Without model plot it will be very difficult or impossible
|
||||
|
@ -380,16 +381,25 @@ export function basicJobValidation(
|
|||
valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
let categorizerDetectorMissingPartitionField = false;
|
||||
if (job.analysis_config.detectors.length === 0) {
|
||||
messages.push({ id: 'detectors_empty' });
|
||||
valid = false;
|
||||
} else {
|
||||
let v = true;
|
||||
|
||||
each(job.analysis_config.detectors, (d) => {
|
||||
if (isEmpty(d.function)) {
|
||||
v = false;
|
||||
}
|
||||
// if detector has an ml category, check if the partition_field is missing
|
||||
const needToHavePartitionFieldName =
|
||||
job.analysis_config.per_partition_categorization?.enabled === true &&
|
||||
(d.by_field_name === MLCATEGORY || d.over_field_name === MLCATEGORY);
|
||||
|
||||
if (needToHavePartitionFieldName && d.partition_field_name === undefined) {
|
||||
categorizerDetectorMissingPartitionField = true;
|
||||
}
|
||||
});
|
||||
if (v) {
|
||||
messages.push({ id: 'detectors_function_not_empty' });
|
||||
|
@ -397,10 +407,46 @@ export function basicJobValidation(
|
|||
messages.push({ id: 'detectors_function_empty' });
|
||||
valid = false;
|
||||
}
|
||||
if (categorizerDetectorMissingPartitionField) {
|
||||
messages.push({ id: 'categorizer_detector_missing_per_partition_field' });
|
||||
valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
// check for duplicate detectors
|
||||
if (job.analysis_config.detectors.length >= 2) {
|
||||
// check if the detectors with mlcategory might have different per_partition_field values
|
||||
// if per_partition_categorization is enabled
|
||||
if (job.analysis_config.per_partition_categorization !== undefined) {
|
||||
if (
|
||||
job.analysis_config.per_partition_categorization.enabled ||
|
||||
(job.analysis_config.per_partition_categorization.stop_on_warn &&
|
||||
Array.isArray(job.analysis_config.detectors) &&
|
||||
job.analysis_config.detectors.length >= 2)
|
||||
) {
|
||||
const categorizationDetectors = job.analysis_config.detectors.filter(
|
||||
(d) =>
|
||||
d.by_field_name === MLCATEGORY ||
|
||||
d.over_field_name === MLCATEGORY ||
|
||||
d.partition_field_name === MLCATEGORY
|
||||
);
|
||||
const uniqPartitions = [
|
||||
...new Set(
|
||||
categorizationDetectors
|
||||
.map((d) => d.partition_field_name)
|
||||
.filter((name) => name !== undefined)
|
||||
),
|
||||
];
|
||||
if (uniqPartitions.length > 1) {
|
||||
valid = false;
|
||||
messages.push({
|
||||
id: 'categorizer_varying_per_partition_fields',
|
||||
fields: uniqPartitions.join(', '),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// check for duplicate detectors
|
||||
// create an array of objects with a subset of the attributes
|
||||
// where we want to make sure they are not be the same across detectors
|
||||
const compareSubSet = job.analysis_config.detectors.map((d) =>
|
||||
|
|
|
@ -205,7 +205,7 @@ export class Explorer extends React.Component {
|
|||
updateLanguage = (language) => this.setState({ language });
|
||||
|
||||
render() {
|
||||
const { showCharts, severity } = this.props;
|
||||
const { showCharts, severity, stoppedPartitions } = this.props;
|
||||
|
||||
const {
|
||||
annotations,
|
||||
|
@ -298,6 +298,23 @@ export class Explorer extends React.Component {
|
|||
|
||||
<div className={mainColumnClasses}>
|
||||
<EuiSpacer size="m" />
|
||||
|
||||
{stoppedPartitions && (
|
||||
<EuiCallOut
|
||||
size={'s'}
|
||||
title={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.explorer.stoppedPartitionsExistCallout"
|
||||
defaultMessage="There may be fewer results than there could have been because stop_on_warn is turned on. Both categorization and subsequent anomaly detection have stopped for some partitions in {jobsWithStoppedPartitions, plural, one {job} other {jobs}} [{stoppedPartitions}] where the categorization status has changed to warn."
|
||||
values={{
|
||||
jobsWithStoppedPartitions: stoppedPartitions.length,
|
||||
stoppedPartitions: stoppedPartitions.join(', '),
|
||||
}}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
|
||||
<AnomalyTimeline
|
||||
explorerState={this.props.explorerState}
|
||||
setSelectedCells={this.props.setSelectedCells}
|
||||
|
|
|
@ -41,6 +41,7 @@ export class CategorizationJobCreator extends JobCreator {
|
|||
ML_JOB_AGGREGATION.COUNT;
|
||||
private _categorizationAnalyzer: CategorizationAnalyzer = {};
|
||||
private _defaultCategorizationAnalyzer: CategorizationAnalyzer;
|
||||
private _partitionFieldName: string | null = null;
|
||||
|
||||
constructor(
|
||||
indexPattern: IndexPattern,
|
||||
|
@ -75,6 +76,11 @@ export class CategorizationJobCreator extends JobCreator {
|
|||
private _createDetector(agg: Aggregation, field: Field) {
|
||||
const dtr: Detector = createBasicDetector(agg, field);
|
||||
dtr.by_field_name = mlCategory.id;
|
||||
|
||||
// API requires if per_partition_categorization is enabled, add partition field to the detector
|
||||
if (this.perPartitionCategorization && this.categorizationPerPartitionField !== null) {
|
||||
dtr.partition_field_name = this.categorizationPerPartitionField;
|
||||
}
|
||||
this._addDetector(dtr, agg, mlCategory);
|
||||
}
|
||||
|
||||
|
@ -173,4 +179,29 @@ export class CategorizationJobCreator extends JobCreator {
|
|||
this.bucketSpan = bs;
|
||||
}
|
||||
}
|
||||
|
||||
public get categorizationPerPartitionField() {
|
||||
return this._partitionFieldName;
|
||||
}
|
||||
|
||||
public set categorizationPerPartitionField(fieldName: string | null) {
|
||||
if (fieldName === null) {
|
||||
this._detectors.forEach((detector) => {
|
||||
delete detector.partition_field_name;
|
||||
});
|
||||
if (this._partitionFieldName !== null) this.removeInfluencer(this._partitionFieldName);
|
||||
this._partitionFieldName = null;
|
||||
} else {
|
||||
if (this._partitionFieldName !== fieldName) {
|
||||
// remove the previous field from list of influencers
|
||||
// and add the new one
|
||||
if (this._partitionFieldName !== null) this.removeInfluencer(this._partitionFieldName);
|
||||
this.addInfluencer(fieldName);
|
||||
this._partitionFieldName = fieldName;
|
||||
this._detectors.forEach((detector) => {
|
||||
detector.partition_field_name = fieldName;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -622,6 +622,36 @@ export class JobCreator {
|
|||
return JSON.stringify(this._datafeed_config, null, 2);
|
||||
}
|
||||
|
||||
private _initPerPartitionCategorization() {
|
||||
if (this._job_config.analysis_config.per_partition_categorization === undefined) {
|
||||
this._job_config.analysis_config.per_partition_categorization = {};
|
||||
}
|
||||
if (this._job_config.analysis_config.per_partition_categorization?.enabled === undefined) {
|
||||
this._job_config.analysis_config.per_partition_categorization!.enabled = false;
|
||||
}
|
||||
if (this._job_config.analysis_config.per_partition_categorization?.stop_on_warn === undefined) {
|
||||
this._job_config.analysis_config.per_partition_categorization!.stop_on_warn = false;
|
||||
}
|
||||
}
|
||||
|
||||
public get perPartitionCategorization() {
|
||||
return this._job_config.analysis_config.per_partition_categorization?.enabled === true;
|
||||
}
|
||||
|
||||
public set perPartitionCategorization(enabled: boolean) {
|
||||
this._initPerPartitionCategorization();
|
||||
this._job_config.analysis_config.per_partition_categorization!.enabled = enabled;
|
||||
}
|
||||
|
||||
public get perPartitionStopOnWarn() {
|
||||
return this._job_config.analysis_config.per_partition_categorization?.stop_on_warn === true;
|
||||
}
|
||||
|
||||
public set perPartitionStopOnWarn(enabled: boolean) {
|
||||
this._initPerPartitionCategorization();
|
||||
this._job_config.analysis_config.per_partition_categorization!.stop_on_warn = enabled;
|
||||
}
|
||||
|
||||
protected _overrideConfigs(job: Job, datafeed: Datafeed) {
|
||||
this._job_config = job;
|
||||
this._datafeed_config = datafeed;
|
||||
|
|
|
@ -51,6 +51,8 @@ export interface BasicValidations {
|
|||
queryDelay: Validation;
|
||||
frequency: Validation;
|
||||
scrollSize: Validation;
|
||||
categorizerMissingPerPartition: Validation;
|
||||
categorizerVaryingPerPartitionField: Validation;
|
||||
}
|
||||
|
||||
export interface AdvancedValidations {
|
||||
|
@ -76,6 +78,8 @@ export class JobValidator {
|
|||
queryDelay: { valid: true },
|
||||
frequency: { valid: true },
|
||||
scrollSize: { valid: true },
|
||||
categorizerMissingPerPartition: { valid: true },
|
||||
categorizerVaryingPerPartitionField: { valid: true },
|
||||
};
|
||||
private _advancedValidations: AdvancedValidations = {
|
||||
categorizationFieldValid: { valid: true },
|
||||
|
@ -273,6 +277,14 @@ export class JobValidator {
|
|||
this._advancedValidations.categorizationFieldValid.valid = valid;
|
||||
}
|
||||
|
||||
public get categorizerMissingPerPartition() {
|
||||
return this._basicValidations.categorizerMissingPerPartition;
|
||||
}
|
||||
|
||||
public get categorizerVaryingPerPartitionField() {
|
||||
return this._basicValidations.categorizerVaryingPerPartitionField;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates if the Pick Fields step has a valid input
|
||||
*/
|
||||
|
@ -283,6 +295,8 @@ export class JobValidator {
|
|||
(this._jobCreator.type === JOB_TYPE.ADVANCED && this.modelMemoryLimit.valid)) &&
|
||||
this.bucketSpan.valid &&
|
||||
this.duplicateDetectors.valid &&
|
||||
this.categorizerMissingPerPartition.valid &&
|
||||
this.categorizerVaryingPerPartitionField.valid &&
|
||||
!this.validating &&
|
||||
(this._jobCreator.type !== JOB_TYPE.CATEGORIZATION ||
|
||||
(this._jobCreator.type === JOB_TYPE.CATEGORIZATION && this.categorizationField))
|
||||
|
|
|
@ -130,6 +130,29 @@ export function populateValidationMessages(
|
|||
basicValidations.duplicateDetectors.message = msg;
|
||||
}
|
||||
|
||||
if (validationResults.contains('categorizer_detector_missing_per_partition_field')) {
|
||||
basicValidations.categorizerMissingPerPartition.valid = false;
|
||||
const msg = i18n.translate(
|
||||
'xpack.ml.newJob.wizard.validateJob.categorizerMissingPerPartitionFieldMessage',
|
||||
{
|
||||
defaultMessage:
|
||||
'Partition field must be set for detectors that reference "mlcategory" when per-partition categorization is enabled.',
|
||||
}
|
||||
);
|
||||
basicValidations.categorizerMissingPerPartition.message = msg;
|
||||
}
|
||||
if (validationResults.contains('categorizer_varying_per_partition_fields')) {
|
||||
basicValidations.categorizerVaryingPerPartitionField.valid = false;
|
||||
const msg = i18n.translate(
|
||||
'xpack.ml.newJob.wizard.validateJob.categorizerVaryingPerPartitionFieldNamesMessage',
|
||||
{
|
||||
defaultMessage:
|
||||
'Detectors with keyword "mlcategory" cannot have different partition_field_name when per-partition categorization is enabled.',
|
||||
}
|
||||
);
|
||||
basicValidations.categorizerVaryingPerPartitionField.message = msg;
|
||||
}
|
||||
|
||||
if (validationResults.contains('bucket_span_empty')) {
|
||||
basicValidations.bucketSpan.valid = false;
|
||||
const msg = i18n.translate(
|
||||
|
|
|
@ -46,7 +46,15 @@ export const DetectorList: FC<Props> = ({ isActive, onEditJob, onDeleteJob }) =>
|
|||
}, [jobCreatorUpdated]);
|
||||
|
||||
useEffect(() => {
|
||||
setValidation(jobValidator.duplicateDetectors);
|
||||
if (!jobValidator.duplicateDetectors.valid) {
|
||||
setValidation(jobValidator.duplicateDetectors);
|
||||
}
|
||||
if (!jobValidator.categorizerVaryingPerPartitionField.valid) {
|
||||
setValidation(jobValidator.categorizerVaryingPerPartitionField);
|
||||
}
|
||||
if (!jobValidator.categorizerMissingPerPartition.valid) {
|
||||
setValidation(jobValidator.categorizerMissingPerPartition);
|
||||
}
|
||||
}, [jobValidatorUpdated]);
|
||||
|
||||
const Buttons: FC<{ index: number }> = ({ index }) => {
|
||||
|
@ -129,7 +137,7 @@ export const DetectorList: FC<Props> = ({ isActive, onEditJob, onDeleteJob }) =>
|
|||
</EuiFlexItem>
|
||||
))}
|
||||
</EuiFlexGrid>
|
||||
<DuplicateDetectorsWarning validation={validation} />
|
||||
<DetectorsValidationWarning validation={validation} />
|
||||
</Fragment>
|
||||
);
|
||||
};
|
||||
|
@ -159,7 +167,7 @@ const NoDetectorsWarning: FC<{ show: boolean }> = ({ show }) => {
|
|||
);
|
||||
};
|
||||
|
||||
const DuplicateDetectorsWarning: FC<{ validation: Validation }> = ({ validation }) => {
|
||||
const DetectorsValidationWarning: FC<{ validation: Validation }> = ({ validation }) => {
|
||||
if (validation.valid === true) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -4,13 +4,19 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { Fragment, FC } from 'react';
|
||||
import React, { Fragment, FC, useContext } from 'react';
|
||||
import { EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
|
||||
|
||||
import { SummaryCountField } from '../summary_count_field';
|
||||
import { CategorizationField } from '../categorization_field';
|
||||
import { CategorizationPerPartitionField } from '../categorization_partition_field';
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
import { isAdvancedJobCreator } from '../../../../../common/job_creator';
|
||||
|
||||
export const ExtraSettings: FC = () => {
|
||||
const { jobCreator } = useContext(JobCreatorContext);
|
||||
const showCategorizationPerPartitionField =
|
||||
isAdvancedJobCreator(jobCreator) && jobCreator.categorizationFieldName !== null;
|
||||
return (
|
||||
<Fragment>
|
||||
<EuiFlexGroup gutterSize="xl">
|
||||
|
@ -21,6 +27,7 @@ export const ExtraSettings: FC = () => {
|
|||
<SummaryCountField />
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
{showCategorizationPerPartitionField && <CategorizationPerPartitionField />}
|
||||
</Fragment>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { FC, useContext } from 'react';
|
||||
import React, { FC, useCallback, useContext, useMemo } from 'react';
|
||||
import { EuiComboBox, EuiComboBoxOptionOption } from '@elastic/eui';
|
||||
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
|
@ -18,24 +18,25 @@ interface Props {
|
|||
}
|
||||
|
||||
export const CategorizationFieldSelect: FC<Props> = ({ fields, changeHandler, selectedField }) => {
|
||||
const { jobCreator } = useContext(JobCreatorContext);
|
||||
const options: EuiComboBoxOptionOption[] = [
|
||||
...createFieldOptions(fields, jobCreator.additionalFields),
|
||||
];
|
||||
const { jobCreator, jobCreatorUpdated } = useContext(JobCreatorContext);
|
||||
const options: EuiComboBoxOptionOption[] = useMemo(
|
||||
() => [...createFieldOptions(fields, jobCreator.additionalFields)],
|
||||
[fields, jobCreatorUpdated]
|
||||
);
|
||||
|
||||
const selection: EuiComboBoxOptionOption[] = [];
|
||||
if (selectedField !== null) {
|
||||
selection.push({ label: selectedField });
|
||||
}
|
||||
|
||||
function onChange(selectedOptions: EuiComboBoxOptionOption[]) {
|
||||
const option = selectedOptions[0];
|
||||
if (typeof option !== 'undefined') {
|
||||
changeHandler(option.label);
|
||||
} else {
|
||||
changeHandler(null);
|
||||
const selection: EuiComboBoxOptionOption[] = useMemo(() => {
|
||||
const selectedOptions: EuiComboBoxOptionOption[] = [];
|
||||
if (selectedField !== null) {
|
||||
selectedOptions.push({ label: selectedField });
|
||||
}
|
||||
}
|
||||
return selectedOptions;
|
||||
}, [selectedField]);
|
||||
|
||||
const onChange = useCallback(
|
||||
(selectedOptions: EuiComboBoxOptionOption[]) =>
|
||||
changeHandler((selectedOptions[0] && selectedOptions[0].label) ?? null),
|
||||
[changeHandler]
|
||||
);
|
||||
|
||||
return (
|
||||
<EuiComboBox
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { FC, useContext, useEffect, useState } from 'react';
|
||||
import { EuiFormRow } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
import {
|
||||
AdvancedJobCreator,
|
||||
CategorizationJobCreator,
|
||||
isCategorizationJobCreator,
|
||||
} from '../../../../../common/job_creator';
|
||||
|
||||
import { Description } from './description';
|
||||
|
||||
import { CategorizationPerPartitionSwitch } from './categorization_per_partition_switch';
|
||||
import { CategorizationPerPartitionStopOnWarnSwitch } from './categorization_stop_on_warn_switch';
|
||||
import { CategorizationPerPartitionFieldDropdown } from './categorization_per_partition_dropdown';
|
||||
|
||||
export const CategorizationPerPartitionField: FC = () => {
|
||||
const { jobCreator: jc, jobCreatorUpdated } = useContext(JobCreatorContext);
|
||||
const jobCreator = jc as AdvancedJobCreator | CategorizationJobCreator;
|
||||
const [enablePerPartitionCategorization, setEnablePerPartitionCategorization] = useState(false);
|
||||
useEffect(() => {
|
||||
setEnablePerPartitionCategorization(jobCreator.perPartitionCategorization);
|
||||
}, [jobCreatorUpdated]);
|
||||
|
||||
return (
|
||||
<Description>
|
||||
<EuiFormRow
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.newJob.wizard.extraStep.categorizationJob.perPartitionCategorizationLabel"
|
||||
defaultMessage="Enable per-partition categorization"
|
||||
/>
|
||||
}
|
||||
>
|
||||
<CategorizationPerPartitionSwitch />
|
||||
</EuiFormRow>
|
||||
|
||||
{enablePerPartitionCategorization && (
|
||||
<>
|
||||
<EuiFormRow
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.newJob.wizard.extraStep.categorizationJob.stopOnWarnLabel"
|
||||
defaultMessage="Stop on warn"
|
||||
/>
|
||||
}
|
||||
>
|
||||
<CategorizationPerPartitionStopOnWarnSwitch />
|
||||
</EuiFormRow>
|
||||
</>
|
||||
)}
|
||||
{isCategorizationJobCreator(jobCreator) && enablePerPartitionCategorization && (
|
||||
<CategorizationPerPartitionFieldDropdown
|
||||
setEnablePerPartitionCategorization={setEnablePerPartitionCategorization}
|
||||
/>
|
||||
)}
|
||||
</Description>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { Dispatch, SetStateAction, useContext, useEffect, useState, useMemo } from 'react';
|
||||
import { EuiFormRow } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
import { CategorizationJobCreator } from '../../../../../common/job_creator';
|
||||
import { newJobCapsService } from '../../../../../../../services/new_job_capabilities_service';
|
||||
import { CategorizationPerPartitionFieldSelect } from './categorization_per_partition_input';
|
||||
|
||||
export const CategorizationPerPartitionFieldDropdown = ({
|
||||
setEnablePerPartitionCategorization,
|
||||
}: {
|
||||
setEnablePerPartitionCategorization: Dispatch<SetStateAction<boolean>>;
|
||||
}) => {
|
||||
const { jobCreator: jc, jobCreatorUpdate, jobCreatorUpdated } = useContext(JobCreatorContext);
|
||||
const jobCreator = jc as CategorizationJobCreator;
|
||||
|
||||
const [categorizationPartitionFieldName, setCategorizationPartitionFieldName] = useState<
|
||||
string | null
|
||||
>(jobCreator.categorizationPerPartitionField);
|
||||
const { categoryFields } = newJobCapsService;
|
||||
|
||||
const filteredCategories = useMemo(
|
||||
() => categoryFields.filter((c) => c.id !== jobCreator.categorizationFieldName),
|
||||
[categoryFields, jobCreatorUpdated]
|
||||
);
|
||||
useEffect(() => {
|
||||
jobCreator.categorizationPerPartitionField = categorizationPartitionFieldName;
|
||||
jobCreatorUpdate();
|
||||
}, [categorizationPartitionFieldName]);
|
||||
|
||||
useEffect(() => {
|
||||
// set the first item in category as partition field by default
|
||||
// because API requires partition_field to be defined in each detector with mlcategory
|
||||
// if per-partition categorization is enabled
|
||||
if (
|
||||
jobCreator.perPartitionCategorization &&
|
||||
jobCreator.categorizationPerPartitionField === null &&
|
||||
filteredCategories.length > 0
|
||||
) {
|
||||
jobCreator.categorizationPerPartitionField = filteredCategories[0].id;
|
||||
}
|
||||
setCategorizationPartitionFieldName(jobCreator.categorizationPerPartitionField);
|
||||
setEnablePerPartitionCategorization(jobCreator.perPartitionCategorization);
|
||||
}, [jobCreatorUpdated]);
|
||||
return (
|
||||
<EuiFormRow
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.newJob.wizard.extraStep.categorizationJob.categorizationPerPartitionFieldLabel"
|
||||
defaultMessage="Partition field"
|
||||
/>
|
||||
}
|
||||
>
|
||||
<CategorizationPerPartitionFieldSelect
|
||||
fields={filteredCategories}
|
||||
changeHandler={setCategorizationPartitionFieldName}
|
||||
selectedField={categorizationPartitionFieldName || ''}
|
||||
/>
|
||||
</EuiFormRow>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { FC, useCallback, useContext, useMemo } from 'react';
|
||||
import { EuiComboBox, EuiComboBoxOptionOption } from '@elastic/eui';
|
||||
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
import { Field } from '../../../../../../../../../common/types/fields';
|
||||
import { createFieldOptions } from '../../../../../common/job_creator/util/general';
|
||||
|
||||
interface Props {
|
||||
fields: Field[];
|
||||
changeHandler(i: string | null): void;
|
||||
selectedField: string | null;
|
||||
}
|
||||
|
||||
export const CategorizationPerPartitionFieldSelect: FC<Props> = ({
|
||||
fields,
|
||||
changeHandler,
|
||||
selectedField,
|
||||
}) => {
|
||||
const { jobCreator, jobCreatorUpdated } = useContext(JobCreatorContext);
|
||||
const options: EuiComboBoxOptionOption[] = useMemo(
|
||||
() => [...createFieldOptions(fields, jobCreator.additionalFields)],
|
||||
[fields, jobCreatorUpdated]
|
||||
);
|
||||
|
||||
const selection: EuiComboBoxOptionOption[] = useMemo(() => {
|
||||
const selectedOptions: EuiComboBoxOptionOption[] = [];
|
||||
if (selectedField !== null) {
|
||||
selectedOptions.push({ label: selectedField });
|
||||
}
|
||||
return selectedOptions;
|
||||
}, [selectedField]);
|
||||
|
||||
const onChange = useCallback(
|
||||
(selectedOptions: EuiComboBoxOptionOption[]) =>
|
||||
changeHandler((selectedOptions[0] && selectedOptions[0].label) ?? null),
|
||||
[changeHandler]
|
||||
);
|
||||
|
||||
return (
|
||||
<EuiComboBox
|
||||
singleSelection={{ asPlainText: true }}
|
||||
options={options}
|
||||
selectedOptions={selection}
|
||||
onChange={onChange}
|
||||
isClearable={true}
|
||||
data-test-subj="mlJobWizardCategorizationPerPartitionFieldNameSelect"
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { FC, useContext, useEffect, useCallback, useState } from 'react';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { EuiSwitch } from '@elastic/eui';
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
import { AdvancedJobCreator, CategorizationJobCreator } from '../../../../../common/job_creator';
|
||||
|
||||
export const CategorizationPerPartitionSwitch: FC = () => {
|
||||
const { jobCreator: jc, jobCreatorUpdate, jobCreatorUpdated } = useContext(JobCreatorContext);
|
||||
const jobCreator = jc as AdvancedJobCreator | CategorizationJobCreator;
|
||||
const [enablePerPartitionCategorization, setEnablePerPartitionCategorization] = useState(
|
||||
jobCreator.perPartitionCategorization
|
||||
);
|
||||
|
||||
const toggleEnablePerPartitionCategorization = useCallback(
|
||||
() => setEnablePerPartitionCategorization(!enablePerPartitionCategorization),
|
||||
[enablePerPartitionCategorization]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
setEnablePerPartitionCategorization(jobCreator.perPartitionCategorization);
|
||||
}, [jobCreatorUpdated]);
|
||||
|
||||
useEffect(() => {
|
||||
// also turn off stop on warn if per_partition_categorization is turned off
|
||||
if (enablePerPartitionCategorization === false) {
|
||||
jobCreator.perPartitionStopOnWarn = false;
|
||||
}
|
||||
|
||||
jobCreator.perPartitionCategorization = enablePerPartitionCategorization;
|
||||
jobCreatorUpdate();
|
||||
}, [enablePerPartitionCategorization]);
|
||||
|
||||
return (
|
||||
<EuiSwitch
|
||||
name="categorizationPerPartitionSwitch"
|
||||
disabled={false}
|
||||
checked={enablePerPartitionCategorization}
|
||||
onChange={toggleEnablePerPartitionCategorization}
|
||||
data-test-subj="mlJobWizardSwitchCategorizationPerPartition"
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.newJob.wizard.perPartitionCategorizationSwitchLabel"
|
||||
defaultMessage="Enable per-partition categorization"
|
||||
/>
|
||||
}
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { FC, useCallback, useContext, useEffect, useState } from 'react';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { EuiSwitch } from '@elastic/eui';
|
||||
import { JobCreatorContext } from '../../../job_creator_context';
|
||||
import { AdvancedJobCreator, CategorizationJobCreator } from '../../../../../common/job_creator';
|
||||
|
||||
export const CategorizationPerPartitionStopOnWarnSwitch: FC = () => {
|
||||
const { jobCreator: jc, jobCreatorUpdate, jobCreatorUpdated } = useContext(JobCreatorContext);
|
||||
const jobCreator = jc as AdvancedJobCreator | CategorizationJobCreator;
|
||||
const [stopOnWarn, setStopOnWarn] = useState(jobCreator.perPartitionStopOnWarn);
|
||||
|
||||
const toggleStopOnWarn = useCallback(() => setStopOnWarn(!stopOnWarn), [stopOnWarn]);
|
||||
|
||||
useEffect(() => {
|
||||
jobCreator.perPartitionStopOnWarn = stopOnWarn;
|
||||
jobCreatorUpdate();
|
||||
}, [stopOnWarn]);
|
||||
|
||||
useEffect(() => {
|
||||
setStopOnWarn(jobCreator.perPartitionStopOnWarn);
|
||||
}, [jobCreatorUpdated]);
|
||||
|
||||
return (
|
||||
<EuiSwitch
|
||||
name="categorizationPerPartitionStopOnWarnSwitch"
|
||||
disabled={false}
|
||||
checked={stopOnWarn}
|
||||
onChange={toggleStopOnWarn}
|
||||
data-test-subj="mlJobWizardSwitchCategorizationPerPartitionStopOnWarn"
|
||||
label={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.newJob.wizard.perPartitionCategorizationtopOnWarnSwitchLabel"
|
||||
defaultMessage="Stop on warn"
|
||||
/>
|
||||
}
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { memo, FC } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { EuiDescribedFormGroup } from '@elastic/eui';
|
||||
|
||||
interface Props {
|
||||
children: React.ReactNode;
|
||||
}
|
||||
export const Description: FC<Props> = memo(({ children }) => {
|
||||
const title = i18n.translate('xpack.ml.newJob.wizard.perPartitionCategorization.enable.title', {
|
||||
defaultMessage: 'Enable per-partition categorization',
|
||||
});
|
||||
return (
|
||||
<EuiDescribedFormGroup
|
||||
title={<h3>{title}</h3>}
|
||||
description={
|
||||
<FormattedMessage
|
||||
id="xpack.ml.newJob.wizard.perPartitionCategorization.enable.description"
|
||||
defaultMessage="If per-partition categorization is enabled then categories are determined independently for each value of the partition field."
|
||||
/>
|
||||
}
|
||||
>
|
||||
<>{children}</>
|
||||
</EuiDescribedFormGroup>
|
||||
);
|
||||
});
|
|
@ -0,0 +1,6 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
export { CategorizationPerPartitionField } from './categorization_per_partition';
|
|
@ -12,6 +12,8 @@ import { JobCreatorContext } from '../../../job_creator_context';
|
|||
import { CategorizationJobCreator } from '../../../../../common/job_creator';
|
||||
import { CategorizationField } from '../categorization_field';
|
||||
import { CategorizationDetector } from '../categorization_detector';
|
||||
import { CategorizationPerPartitionField } from '../categorization_partition_field';
|
||||
|
||||
import { FieldExamples } from './field_examples';
|
||||
import { ExamplesValidCallout } from './examples_valid_callout';
|
||||
import {
|
||||
|
@ -126,6 +128,8 @@ export const CategorizationDetectors: FC<Props> = ({ setIsValid }) => {
|
|||
<FieldExamples fieldExamples={fieldExamples} />
|
||||
</>
|
||||
)}
|
||||
<EuiHorizontalRule />
|
||||
<CategorizationPerPartitionField />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React, { FC, useEffect, useState } from 'react';
|
||||
import React, { FC, useEffect, useState, useCallback } from 'react';
|
||||
import useObservable from 'react-use/lib/useObservable';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
@ -32,6 +32,7 @@ import { useUrlState } from '../../util/url_state';
|
|||
import { getBreadcrumbWithUrlForApp } from '../breadcrumbs';
|
||||
import { useTimefilter } from '../../contexts/kibana';
|
||||
import { isViewBySwimLaneData } from '../../explorer/swimlane_container';
|
||||
import { JOB_ID } from '../../../../common/constants/anomalies';
|
||||
|
||||
export const explorerRouteFactory = (navigateToPath: NavigateToPath): MlRoute => ({
|
||||
path: '/explorer',
|
||||
|
@ -70,6 +71,8 @@ const ExplorerUrlStateManager: FC<ExplorerUrlStateManagerProps> = ({ jobsWithTim
|
|||
const [appState, setAppState] = useUrlState('_a');
|
||||
const [globalState, setGlobalState] = useUrlState('_g');
|
||||
const [lastRefresh, setLastRefresh] = useState(0);
|
||||
const [stoppedPartitions, setStoppedPartitions] = useState<string[] | undefined>();
|
||||
|
||||
const timefilter = useTimefilter({ timeRangeSelector: true, autoRefreshSelector: true });
|
||||
|
||||
const { jobIds } = useJobSelection(jobsWithTimeRange);
|
||||
|
@ -109,9 +112,31 @@ const ExplorerUrlStateManager: FC<ExplorerUrlStateManagerProps> = ({ jobsWithTim
|
|||
}
|
||||
}, [globalState?.time?.from, globalState?.time?.to]);
|
||||
|
||||
const getJobsWithStoppedPartitions = useCallback(async (selectedJobIds: string[]) => {
|
||||
try {
|
||||
const fetchedStoppedPartitions = await ml.results.getCategoryStoppedPartitions(
|
||||
selectedJobIds,
|
||||
JOB_ID
|
||||
);
|
||||
if (
|
||||
fetchedStoppedPartitions &&
|
||||
Array.isArray(fetchedStoppedPartitions.jobs) &&
|
||||
fetchedStoppedPartitions.jobs.length > 0
|
||||
) {
|
||||
setStoppedPartitions(fetchedStoppedPartitions.jobs);
|
||||
} else {
|
||||
setStoppedPartitions(undefined);
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (jobIds.length > 0) {
|
||||
explorerService.updateJobSelection(jobIds);
|
||||
getJobsWithStoppedPartitions(jobIds);
|
||||
} else {
|
||||
explorerService.clearJobs();
|
||||
}
|
||||
|
@ -209,6 +234,7 @@ const ExplorerUrlStateManager: FC<ExplorerUrlStateManagerProps> = ({ jobsWithTim
|
|||
setSelectedCells,
|
||||
showCharts,
|
||||
severity: tableSeverity.val,
|
||||
stoppedPartitions,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
*/
|
||||
|
||||
// Service for obtaining data for the ML Results dashboards.
|
||||
import { GetStoppedPartitionResult } from '../../../../common/types/results';
|
||||
import { HttpService } from '../http_service';
|
||||
|
||||
import { basePath } from './index';
|
||||
|
||||
import { JobId } from '../../../../common/types/anomaly_detection_jobs';
|
||||
import { JOB_ID, PARTITION_FIELD_VALUE } from '../../../../common/constants/anomalies';
|
||||
import { PartitionFieldsDefinition } from '../results_service/result_service_rx';
|
||||
|
||||
export const resultsApiProvider = (httpService: HttpService) => ({
|
||||
|
@ -114,4 +114,19 @@ export const resultsApiProvider = (httpService: HttpService) => ({
|
|||
body,
|
||||
});
|
||||
},
|
||||
|
||||
getCategoryStoppedPartitions(
|
||||
jobIds: string[],
|
||||
fieldToBucket?: typeof JOB_ID | typeof PARTITION_FIELD_VALUE
|
||||
) {
|
||||
const body = JSON.stringify({
|
||||
jobIds,
|
||||
fieldToBucket,
|
||||
});
|
||||
return httpService.http<GetStoppedPartitionResult>({
|
||||
path: `${basePath()}/results/category_stopped_partitions`,
|
||||
method: 'POST',
|
||||
body,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import Boom from 'boom';
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
|
||||
import { TypeOf } from '@kbn/config-schema';
|
||||
import { fieldsServiceProvider } from '../fields_service';
|
||||
import { renderTemplate } from '../../../common/util/string_utils';
|
||||
|
|
|
@ -10,11 +10,19 @@ import get from 'lodash/get';
|
|||
import moment from 'moment';
|
||||
import { SearchResponse } from 'elasticsearch';
|
||||
import { ILegacyScopedClusterClient } from 'kibana/server';
|
||||
import Boom from 'boom';
|
||||
import { buildAnomalyTableItems } from './build_anomaly_table_items';
|
||||
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
|
||||
import { ANOMALIES_TABLE_DEFAULT_QUERY_SIZE } from '../../../common/constants/search';
|
||||
import { getPartitionFieldsValuesFactory } from './get_partition_fields_values';
|
||||
import { AnomaliesTableRecord, AnomalyRecordDoc } from '../../../common/types/anomalies';
|
||||
import {
|
||||
AnomaliesTableRecord,
|
||||
AnomalyCategorizerStatsDoc,
|
||||
AnomalyRecordDoc,
|
||||
} from '../../../common/types/anomalies';
|
||||
import { JOB_ID, PARTITION_FIELD_VALUE } from '../../../common/constants/anomalies';
|
||||
import { GetStoppedPartitionResult } from '../../../common/types/results';
|
||||
import { MlJobsResponse } from '../job_service/jobs';
|
||||
|
||||
// Service for carrying out Elasticsearch queries to obtain data for the
|
||||
// ML Results dashboards.
|
||||
|
@ -432,6 +440,154 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
return definition;
|
||||
}
|
||||
|
||||
async function getCategorizerStats(jobId: string, partitionByValue?: string) {
|
||||
const mustMatchClauses: Array<Record<'match', Record<string, string>>> = [
|
||||
{
|
||||
match: {
|
||||
result_type: 'categorizer_stats',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
if (typeof partitionByValue === 'string') {
|
||||
mustMatchClauses.push({
|
||||
match: {
|
||||
partition_by_value: partitionByValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
const results: SearchResponse<AnomalyCategorizerStatsDoc> = await callAsInternalUser('search', {
|
||||
index: ML_RESULTS_INDEX_PATTERN,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
must: mustMatchClauses,
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
job_id: jobId,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
return results ? results.hits.hits.map((r) => r._source) : [];
|
||||
}
|
||||
|
||||
async function getCategoryStoppedPartitions(
|
||||
jobIds: string[],
|
||||
fieldToBucket: typeof JOB_ID | typeof PARTITION_FIELD_VALUE = PARTITION_FIELD_VALUE
|
||||
): Promise<GetStoppedPartitionResult> {
|
||||
let finalResults: GetStoppedPartitionResult = {
|
||||
jobs: {},
|
||||
};
|
||||
// first determine from job config if stop_on_warn is true
|
||||
// if false return []
|
||||
const jobConfigResponse: MlJobsResponse = await callAsInternalUser('ml.jobs', {
|
||||
jobId: jobIds,
|
||||
});
|
||||
|
||||
if (!jobConfigResponse || jobConfigResponse.jobs.length < 1) {
|
||||
throw Boom.notFound(`Unable to find anomaly detector jobs ${jobIds.join(', ')}`);
|
||||
}
|
||||
|
||||
const jobIdsWithStopOnWarnSet = jobConfigResponse.jobs
|
||||
.filter(
|
||||
(jobConfig) =>
|
||||
jobConfig.analysis_config?.per_partition_categorization?.stop_on_warn === true
|
||||
)
|
||||
.map((j) => j.job_id);
|
||||
|
||||
let aggs: any;
|
||||
if (fieldToBucket === JOB_ID) {
|
||||
// if bucketing by job_id, then return list of job_ids with at least one stopped_partitions
|
||||
aggs = {
|
||||
unique_terms: {
|
||||
terms: {
|
||||
field: JOB_ID,
|
||||
},
|
||||
},
|
||||
};
|
||||
} else {
|
||||
// if bucketing by partition field value, then return list of unique stopped_partitions for each job
|
||||
aggs = {
|
||||
jobs: {
|
||||
terms: {
|
||||
field: JOB_ID,
|
||||
},
|
||||
aggs: {
|
||||
unique_stopped_partitions: {
|
||||
terms: {
|
||||
field: PARTITION_FIELD_VALUE,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (jobIdsWithStopOnWarnSet.length > 0) {
|
||||
// search for categorizer_stats documents for the current job where the categorization_status is warn
|
||||
// Return all the partition_field_value values from the documents found
|
||||
const mustMatchClauses: Array<Record<'match', Record<string, string>>> = [
|
||||
{
|
||||
match: {
|
||||
result_type: 'categorizer_stats',
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
categorization_status: 'warn',
|
||||
},
|
||||
},
|
||||
];
|
||||
const results: SearchResponse<any> = await callAsInternalUser('search', {
|
||||
index: ML_RESULTS_INDEX_PATTERN,
|
||||
size: 0,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
must: mustMatchClauses,
|
||||
filter: [
|
||||
{
|
||||
terms: {
|
||||
job_id: jobIdsWithStopOnWarnSet,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs,
|
||||
},
|
||||
});
|
||||
if (fieldToBucket === JOB_ID) {
|
||||
finalResults = {
|
||||
jobs: results.aggregations?.unique_terms?.buckets.map(
|
||||
(b: { key: string; doc_count: number }) => b.key
|
||||
),
|
||||
};
|
||||
} else if (fieldToBucket === PARTITION_FIELD_VALUE) {
|
||||
const jobs: Record<string, string[]> = jobIdsWithStopOnWarnSet.reduce(
|
||||
(obj: Record<string, string[]>, jobId: string) => {
|
||||
obj[jobId] = [];
|
||||
return obj;
|
||||
},
|
||||
{}
|
||||
);
|
||||
results.aggregations.jobs.buckets.forEach(
|
||||
(bucket: { key: string | number; unique_stopped_partitions: { buckets: any[] } }) => {
|
||||
jobs[bucket.key] = bucket.unique_stopped_partitions.buckets.map((b) => b.key);
|
||||
}
|
||||
);
|
||||
finalResults.jobs = jobs;
|
||||
}
|
||||
}
|
||||
|
||||
return finalResults;
|
||||
}
|
||||
|
||||
return {
|
||||
getAnomaliesTableData,
|
||||
getCategoryDefinition,
|
||||
|
@ -439,5 +595,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
|
|||
getLatestBucketTimestampByJob,
|
||||
getMaxAnomalyScore,
|
||||
getPartitionFieldsValues: getPartitionFieldsValuesFactory(mlClusterClient),
|
||||
getCategorizerStats,
|
||||
getCategoryStoppedPartitions,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ import {
|
|||
getModelSnapshotsSchema,
|
||||
updateModelSnapshotSchema,
|
||||
} from './schemas/anomaly_detectors_schema';
|
||||
|
||||
/**
|
||||
* Routes for the anomaly detectors
|
||||
*/
|
||||
|
|
|
@ -49,6 +49,8 @@
|
|||
"GetCategoryExamples",
|
||||
"GetPartitionFieldsValues",
|
||||
"AnomalySearch",
|
||||
"GetCategorizerStats",
|
||||
"GetCategoryStoppedPartitions",
|
||||
|
||||
"Modules",
|
||||
"DataRecognizer",
|
||||
|
|
|
@ -17,6 +17,11 @@ import {
|
|||
} from './schemas/results_service_schema';
|
||||
import { resultsServiceProvider } from '../models/results_service';
|
||||
import { ML_RESULTS_INDEX_PATTERN } from '../../common/constants/index_patterns';
|
||||
import { jobIdSchema } from './schemas/anomaly_detectors_schema';
|
||||
import {
|
||||
getCategorizerStatsSchema,
|
||||
getCategorizerStoppedPartitionsSchema,
|
||||
} from './schemas/results_service_schema';
|
||||
|
||||
function getAnomaliesTableData(legacyClient: ILegacyScopedClusterClient, payload: any) {
|
||||
const rs = resultsServiceProvider(legacyClient);
|
||||
|
@ -71,6 +76,19 @@ function getPartitionFieldsValues(legacyClient: ILegacyScopedClusterClient, payl
|
|||
return rs.getPartitionFieldsValues(jobId, searchTerm, criteriaFields, earliestMs, latestMs);
|
||||
}
|
||||
|
||||
function getCategorizerStats(legacyClient: ILegacyScopedClusterClient, params: any, query: any) {
|
||||
const { jobId } = params;
|
||||
const { partitionByValue } = query;
|
||||
const rs = resultsServiceProvider(legacyClient);
|
||||
return rs.getCategorizerStats(jobId, partitionByValue);
|
||||
}
|
||||
|
||||
function getCategoryStoppedPartitions(legacyClient: ILegacyScopedClusterClient, payload: any) {
|
||||
const { jobIds, fieldToBucket } = payload;
|
||||
const rs = resultsServiceProvider(legacyClient);
|
||||
return rs.getCategoryStoppedPartitions(jobIds, fieldToBucket);
|
||||
}
|
||||
|
||||
/**
|
||||
* Routes for results service
|
||||
*/
|
||||
|
@ -265,4 +283,66 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
|
|||
}
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup ResultsService
|
||||
*
|
||||
* @api {get} /api/ml/results/:jobId/categorizer_stats
|
||||
* @apiName GetCategorizerStats
|
||||
* @apiDescription Returns the categorizer stats for the specified job ID
|
||||
* @apiSchema (params) jobIdSchema
|
||||
* @apiSchema (query) getCategorizerStatsSchema
|
||||
*/
|
||||
router.get(
|
||||
{
|
||||
path: '/api/ml/results/{jobId}/categorizer_stats',
|
||||
validate: {
|
||||
params: jobIdSchema,
|
||||
query: getCategorizerStatsSchema,
|
||||
},
|
||||
options: {
|
||||
tags: ['access:ml:canGetJobs'],
|
||||
},
|
||||
},
|
||||
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
|
||||
try {
|
||||
const resp = await getCategorizerStats(legacyClient, request.params, request.query);
|
||||
return response.ok({
|
||||
body: resp,
|
||||
});
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup ResultsService
|
||||
*
|
||||
* @api {get} /api/ml/results/category_stopped_partitions
|
||||
* @apiName GetCategoryStoppedPartitions
|
||||
* @apiDescription Returns information on the partitions that have stopped being categorized due to the categorization status changing from ok to warn. Can return either the list of stopped partitions for each job, or just the list of job IDs.
|
||||
* @apiSchema (body) getCategorizerStoppedPartitionsSchema
|
||||
*/
|
||||
router.post(
|
||||
{
|
||||
path: '/api/ml/results/category_stopped_partitions',
|
||||
validate: {
|
||||
body: getCategorizerStoppedPartitionsSchema,
|
||||
},
|
||||
options: {
|
||||
tags: ['access:ml:canGetJobs'],
|
||||
},
|
||||
},
|
||||
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
|
||||
try {
|
||||
const resp = await getCategoryStoppedPartitions(legacyClient, request.body);
|
||||
return response.ok({
|
||||
body: resp,
|
||||
});
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -52,3 +52,26 @@ export const partitionFieldValuesSchema = schema.object({
|
|||
earliestMs: schema.number(),
|
||||
latestMs: schema.number(),
|
||||
});
|
||||
|
||||
export const getCategorizerStatsSchema = schema.nullable(
|
||||
schema.object({
|
||||
/**
|
||||
* Optional value to fetch the categorizer stats
|
||||
* where results are filtered by partition_by_value = value
|
||||
*/
|
||||
partitionByValue: schema.maybe(schema.string()),
|
||||
})
|
||||
);
|
||||
|
||||
export const getCategorizerStoppedPartitionsSchema = schema.object({
|
||||
/**
|
||||
* List of jobIds to fetch the categorizer partitions for
|
||||
*/
|
||||
jobIds: schema.arrayOf(schema.string()),
|
||||
/**
|
||||
* Field to aggregate results by: 'job_id' or 'partition_field_value'
|
||||
* If by job_id, will return list of jobIds with at least one partition that have stopped
|
||||
* If by partition_field_value, it will return a list of categorizer stopped partitions for each job_id
|
||||
*/
|
||||
fieldToBucket: schema.maybe(schema.string()),
|
||||
});
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
import { USER } from '../../../../functional/services/ml/security_common';
|
||||
import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common';
|
||||
import { Datafeed } from '../../../../../plugins/ml/common/types/anomaly_detection_jobs';
|
||||
import { AnomalyCategorizerStatsDoc } from '../../../../../plugins/ml/common/types/anomalies';
|
||||
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const supertest = getService('supertestWithoutAuth');
|
||||
const ml = getService('ml');
|
||||
|
||||
const jobId = `sample_logs_${Date.now()}`;
|
||||
const PARTITION_FIELD_NAME = 'event.dataset';
|
||||
const testJobConfig = {
|
||||
job_id: jobId,
|
||||
groups: ['sample_logs', 'bootstrap', 'categorization'],
|
||||
description: "count by mlcategory (message) on 'sample logs' dataset with 15m bucket span",
|
||||
analysis_config: {
|
||||
bucket_span: '15m',
|
||||
categorization_field_name: 'message',
|
||||
per_partition_categorization: { enabled: true, stop_on_warn: true },
|
||||
detectors: [
|
||||
{
|
||||
function: 'count',
|
||||
by_field_name: 'mlcategory',
|
||||
partition_field_name: PARTITION_FIELD_NAME,
|
||||
},
|
||||
],
|
||||
influencers: ['mlcategory'],
|
||||
},
|
||||
analysis_limits: { model_memory_limit: '26MB' },
|
||||
data_description: { time_field: '@timestamp', time_format: 'epoch_ms' },
|
||||
model_plot_config: { enabled: false, annotations_enabled: true },
|
||||
model_snapshot_retention_days: 10,
|
||||
daily_model_snapshot_retention_after_days: 1,
|
||||
allow_lazy_open: false,
|
||||
};
|
||||
const testDatafeedConfig: Datafeed = {
|
||||
datafeed_id: `datafeed-${jobId}`,
|
||||
indices: ['ft_module_sample_logs'],
|
||||
job_id: jobId,
|
||||
query: { bool: { must: [{ match_all: {} }] } },
|
||||
};
|
||||
|
||||
describe('get categorizer_stats', function () {
|
||||
before(async () => {
|
||||
await esArchiver.loadIfNeeded('ml/module_sample_logs');
|
||||
await ml.testResources.setKibanaTimeZoneToUTC();
|
||||
await ml.api.createAndRunAnomalyDetectionLookbackJob(testJobConfig, testDatafeedConfig);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await ml.testResources.deleteIndexPatternByTitle('ft_module_sample_logs');
|
||||
await ml.api.cleanMlIndices();
|
||||
});
|
||||
|
||||
it('should fetch all the categorizer stats for job id', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/ml/results/${jobId}/categorizer_stats`)
|
||||
.auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER))
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
|
||||
body.forEach((doc: AnomalyCategorizerStatsDoc) => {
|
||||
expect(doc.job_id).to.eql(jobId);
|
||||
expect(doc.result_type).to.eql('categorizer_stats');
|
||||
expect(doc.partition_field_name).to.be(PARTITION_FIELD_NAME);
|
||||
expect(doc.partition_field_value).to.not.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch categorizer stats for job id for user with view permission', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/ml/results/${jobId}/categorizer_stats`)
|
||||
.auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER))
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
|
||||
body.forEach((doc: AnomalyCategorizerStatsDoc) => {
|
||||
expect(doc.job_id).to.eql(jobId);
|
||||
expect(doc.result_type).to.eql('categorizer_stats');
|
||||
expect(doc.partition_field_name).to.be(PARTITION_FIELD_NAME);
|
||||
expect(doc.partition_field_value).to.not.be(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not fetch categorizer stats for job id for unauthorized user', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/ml/results/${jobId}/categorizer_stats`)
|
||||
.auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED))
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(404);
|
||||
|
||||
expect(body.error).to.be('Not Found');
|
||||
expect(body.message).to.be('Not Found');
|
||||
});
|
||||
|
||||
it('should fetch all the categorizer stats with per-partition value for job id', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/ml/results/${jobId}/categorizer_stats`)
|
||||
.query({ partitionByValue: 'sample_web_logs' })
|
||||
.auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER))
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
body.forEach((doc: AnomalyCategorizerStatsDoc) => {
|
||||
expect(doc.job_id).to.eql(jobId);
|
||||
expect(doc.result_type).to.eql('categorizer_stats');
|
||||
expect(doc.partition_field_name).to.be(PARTITION_FIELD_NAME);
|
||||
expect(doc.partition_field_value).to.be('sample_web_logs');
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch categorizer stats with per-partition value for user with view permission', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/ml/results/${jobId}/categorizer_stats`)
|
||||
.query({ partitionByValue: 'sample_web_logs' })
|
||||
.auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER))
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
|
||||
body.forEach((doc: AnomalyCategorizerStatsDoc) => {
|
||||
expect(doc.job_id).to.eql(jobId);
|
||||
expect(doc.result_type).to.eql('categorizer_stats');
|
||||
expect(doc.partition_field_name).to.be(PARTITION_FIELD_NAME);
|
||||
expect(doc.partition_field_value).to.be('sample_web_logs');
|
||||
});
|
||||
});
|
||||
|
||||
it('should not fetch categorizer stats with per-partition value for unauthorized user', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/ml/results/${jobId}/categorizer_stats`)
|
||||
.query({ partitionByValue: 'sample_web_logs' })
|
||||
.auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED))
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(404);
|
||||
|
||||
expect(body.error).to.be('Not Found');
|
||||
expect(body.message).to.be('Not Found');
|
||||
});
|
||||
});
|
||||
};
|
|
@ -0,0 +1,184 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { Datafeed, Job } from '../../../../../plugins/ml/common/types/anomaly_detection_jobs';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
import { USER } from '../../../../functional/services/ml/security_common';
|
||||
import { COMMON_REQUEST_HEADERS } from '../../../../functional/services/ml/common';
|
||||
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const supertest = getService('supertestWithoutAuth');
|
||||
const ml = getService('ml');
|
||||
|
||||
const testJobId = `sample_logs_${Date.now()}`;
|
||||
// non-aggregatable field to cause some partitions to change status to warn
|
||||
const PARTITION_FIELD_NAME = 'agent';
|
||||
|
||||
interface TestConfig {
|
||||
testDescription: string;
|
||||
jobId: string;
|
||||
jobConfig: Job;
|
||||
datafeedConfig: Datafeed;
|
||||
}
|
||||
const setupTestConfigs = (
|
||||
jobId: string,
|
||||
stopOnWarn: boolean,
|
||||
enabledPerPartitionCat: boolean = true
|
||||
): TestConfig => {
|
||||
const commonJobConfig = {
|
||||
groups: ['sample_logs', 'bootstrap', 'categorization'],
|
||||
description: "count by mlcategory (message) on 'sample logs' dataset with 15m bucket span",
|
||||
analysis_limits: { model_memory_limit: '26MB' },
|
||||
data_description: { time_field: '@timestamp', time_format: 'epoch_ms' },
|
||||
model_snapshot_retention_days: 10,
|
||||
daily_model_snapshot_retention_after_days: 1,
|
||||
allow_lazy_open: false,
|
||||
};
|
||||
const datafeedConfig: Datafeed = {
|
||||
datafeed_id: `datafeed-${jobId}`,
|
||||
indices: ['ft_module_sample_logs'],
|
||||
job_id: jobId,
|
||||
query: { bool: { must: [{ match_all: {} }] } },
|
||||
};
|
||||
|
||||
return {
|
||||
testDescription: `stop_on_warn is ${stopOnWarn}`,
|
||||
jobId,
|
||||
jobConfig: {
|
||||
job_id: jobId,
|
||||
...commonJobConfig,
|
||||
analysis_config: {
|
||||
bucket_span: '1m',
|
||||
categorization_field_name: 'message',
|
||||
per_partition_categorization: {
|
||||
enabled: enabledPerPartitionCat,
|
||||
stop_on_warn: stopOnWarn,
|
||||
},
|
||||
detectors: [
|
||||
{
|
||||
function: 'count',
|
||||
by_field_name: 'mlcategory',
|
||||
partition_field_name: PARTITION_FIELD_NAME,
|
||||
},
|
||||
],
|
||||
influencers: ['mlcategory'],
|
||||
},
|
||||
},
|
||||
datafeedConfig,
|
||||
};
|
||||
};
|
||||
|
||||
const testSetUps: TestConfig[] = [
|
||||
setupTestConfigs(`${testJobId}_t`, true),
|
||||
setupTestConfigs(`${testJobId}_f`, false),
|
||||
setupTestConfigs(`${testJobId}_viewer`, true),
|
||||
setupTestConfigs(`${testJobId}_unauthorized`, true),
|
||||
];
|
||||
|
||||
const testJobIds = testSetUps.map((t) => t.jobId);
|
||||
|
||||
describe('get stopped_partitions', function () {
|
||||
before(async () => {
|
||||
await esArchiver.loadIfNeeded('ml/module_sample_logs');
|
||||
await ml.testResources.setKibanaTimeZoneToUTC();
|
||||
for (const testData of testSetUps) {
|
||||
const { jobConfig, datafeedConfig } = testData;
|
||||
await ml.api.createAndRunAnomalyDetectionLookbackJob(jobConfig, datafeedConfig);
|
||||
}
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await ml.testResources.deleteIndexPatternByTitle('ft_module_sample_logs');
|
||||
await ml.api.cleanMlIndices();
|
||||
});
|
||||
|
||||
it('should fetch all the stopped partitions correctly', async () => {
|
||||
const { jobId } = testSetUps[0];
|
||||
const { body } = await supertest
|
||||
.post(`/api/ml/results/category_stopped_partitions`)
|
||||
.auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER))
|
||||
.send({ jobIds: [jobId] })
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
expect(body.jobs).to.not.be(undefined);
|
||||
expect(body.jobs[jobId]).to.be.an('array');
|
||||
expect(body.jobs[jobId].length).to.be.greaterThan(0);
|
||||
});
|
||||
|
||||
it('should not return jobId in response if stopped_on_warn is false', async () => {
|
||||
const { jobId } = testSetUps[1];
|
||||
const { body } = await supertest
|
||||
.post(`/api/ml/results/category_stopped_partitions`)
|
||||
.auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER))
|
||||
.send({ jobIds: [jobId] })
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
expect(body.jobs).to.not.be(undefined);
|
||||
expect(body.jobs).to.not.have.property(jobId);
|
||||
});
|
||||
|
||||
it('should fetch stopped partitions for user with view permission', async () => {
|
||||
const { jobId } = testSetUps[2];
|
||||
const { body } = await supertest
|
||||
.post(`/api/ml/results/category_stopped_partitions`)
|
||||
.auth(USER.ML_VIEWER, ml.securityCommon.getPasswordForUser(USER.ML_VIEWER))
|
||||
.send({ jobIds: [jobId] })
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
|
||||
expect(body.jobs).to.not.be(undefined);
|
||||
expect(body.jobs[jobId]).to.be.an('array');
|
||||
expect(body.jobs[jobId].length).to.be.greaterThan(0);
|
||||
});
|
||||
|
||||
it('should not fetch stopped partitions for unauthorized user', async () => {
|
||||
const { jobId } = testSetUps[3];
|
||||
|
||||
const { body } = await supertest
|
||||
.post(`/api/ml/results/category_stopped_partitions`)
|
||||
.auth(USER.ML_UNAUTHORIZED, ml.securityCommon.getPasswordForUser(USER.ML_UNAUTHORIZED))
|
||||
.send({ jobIds: [jobId] })
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(404);
|
||||
|
||||
expect(body.error).to.be('Not Found');
|
||||
expect(body.message).to.be('Not Found');
|
||||
});
|
||||
|
||||
it('should fetch stopped partitions for multiple job ids', async () => {
|
||||
const { body } = await supertest
|
||||
.post(`/api/ml/results/category_stopped_partitions`)
|
||||
.auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER))
|
||||
.send({ jobIds: testJobIds })
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
expect(body.jobs).to.not.be(undefined);
|
||||
expect(body.jobs).to.not.have.property(testSetUps[1].jobId);
|
||||
|
||||
Object.keys(body.jobs).forEach((currentJobId: string) => {
|
||||
expect(testJobIds).to.contain(currentJobId);
|
||||
expect(body.jobs[currentJobId]).to.be.an('array');
|
||||
expect(body.jobs[currentJobId].length).to.be.greaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('should return array of jobIds with stopped_partitions for multiple job ids when bucketed by job_id', async () => {
|
||||
const { body } = await supertest
|
||||
.post(`/api/ml/results/category_stopped_partitions`)
|
||||
.auth(USER.ML_POWERUSER, ml.securityCommon.getPasswordForUser(USER.ML_POWERUSER))
|
||||
.send({ jobIds: testJobIds, fieldToBucket: 'job_id' })
|
||||
.set(COMMON_REQUEST_HEADERS)
|
||||
.expect(200);
|
||||
|
||||
expect(body.jobs).to.not.be(undefined);
|
||||
body.jobs.forEach((currentJobId: string) => {
|
||||
expect(testJobIds).to.contain(currentJobId);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
|
@ -8,5 +8,7 @@ import { FtrProviderContext } from '../../../ftr_provider_context';
|
|||
export default function ({ loadTestFile }: FtrProviderContext) {
|
||||
describe('ResultsService', () => {
|
||||
loadTestFile(require.resolve('./get_anomalies_table_data'));
|
||||
loadTestFile(require.resolve('./get_categorizer_stats'));
|
||||
loadTestFile(require.resolve('./get_stopped_partitions'));
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue