mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Response Ops][Alerting] Generating framework alerts-as-data alert schema and type (#155626)
## Summary Auto-generation of alert schema and type that will be used for framework alerts as data. The function for auto-generation is run inside of a functional test because we want to access the alert definition that a rule type provides when it registers with the alerting framework. This definition tells us what rule type specific fields they expect as well as whether they use ECS. With this, we can build a complete `io-ts` schema and a type from that schema. This also updates the type returned by the `getSummarizedAlerts` function to use one of the auto-generated types. --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Mike Côté <mikecote@users.noreply.github.com>
This commit is contained in:
parent
7db4f278a0
commit
1fa7174770
27 changed files with 3574 additions and 26 deletions
|
@ -7,3 +7,4 @@
|
|||
*/
|
||||
|
||||
export * from './src/field_maps';
|
||||
export * from './src/schemas';
|
||||
|
|
|
@ -0,0 +1,355 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { set } from '@kbn/safer-lodash-set';
|
||||
import { get } from 'lodash';
|
||||
import { FieldMap } from '../..';
|
||||
import { createLineWriter, LineWriter } from './lib/line_writer';
|
||||
|
||||
const PLUGIN_DIR = path.resolve(path.join(__dirname, '..'));
|
||||
|
||||
interface CreateSchemaFromFieldMapOpts {
|
||||
outputFile: string;
|
||||
fieldMap: FieldMap;
|
||||
schemaPrefix: string;
|
||||
useAlert?: boolean;
|
||||
useEcs?: boolean;
|
||||
useLegacyAlerts?: boolean;
|
||||
flattened?: boolean;
|
||||
}
|
||||
export const createSchemaFromFieldMap = ({
|
||||
outputFile,
|
||||
fieldMap,
|
||||
schemaPrefix,
|
||||
useAlert = false,
|
||||
useEcs = false,
|
||||
useLegacyAlerts = false,
|
||||
flattened = false,
|
||||
}: CreateSchemaFromFieldMapOpts) => {
|
||||
const lineWriters = {
|
||||
IMPORTS: createLineWriter(),
|
||||
REQUIRED_FIELDS: createLineWriter(),
|
||||
OPTIONAL_FIELDS: createLineWriter(),
|
||||
INCLUDED_SCHEMAS: createLineWriter(''),
|
||||
};
|
||||
|
||||
if (useAlert) {
|
||||
lineWriters.IMPORTS.addLine(`import { AlertSchema } from './alert_schema';`);
|
||||
lineWriters.INCLUDED_SCHEMAS.addLine(`, AlertSchema`);
|
||||
}
|
||||
|
||||
if (useEcs) {
|
||||
lineWriters.IMPORTS.addLine(`import { EcsSchema } from './ecs_schema';`);
|
||||
lineWriters.INCLUDED_SCHEMAS.addLine(`, EcsSchema`);
|
||||
}
|
||||
if (useLegacyAlerts) {
|
||||
lineWriters.IMPORTS.addLine(`import { LegacyAlertSchema } from './legacy_alert_schema';`);
|
||||
lineWriters.INCLUDED_SCHEMAS.addLine(`, LegacyAlertSchema`);
|
||||
}
|
||||
|
||||
generateSchemaFromFieldMap({ lineWriters, fieldMap, flattened });
|
||||
|
||||
const contents = getSchemaFileContents(lineWriters, schemaPrefix);
|
||||
|
||||
writeGeneratedFile(outputFile, `${contents}\n`);
|
||||
};
|
||||
|
||||
interface GenerateSchemaFromFieldMapOpts {
|
||||
lineWriters: Record<string, LineWriter>;
|
||||
fieldMap: FieldMap;
|
||||
flattened: boolean;
|
||||
}
|
||||
const generateSchemaFromFieldMap = ({
|
||||
lineWriters,
|
||||
fieldMap,
|
||||
flattened,
|
||||
}: GenerateSchemaFromFieldMapOpts) => {
|
||||
const requiredFieldMap = { properties: {} };
|
||||
const optionalFieldMap = { properties: {} };
|
||||
|
||||
const getKeyWithProperties = (key: string) => key.split('.').join('.properties.');
|
||||
|
||||
// Generate required properties
|
||||
Object.keys(fieldMap)
|
||||
.filter((key: string) => fieldMap[key].required === true)
|
||||
.map((key: string) =>
|
||||
set(requiredFieldMap.properties, getKeyWithProperties(key), fieldMap[key])
|
||||
);
|
||||
generateSchemaLines({
|
||||
lineWriter: lineWriters.REQUIRED_FIELDS,
|
||||
propertyKey: null,
|
||||
required: true,
|
||||
flattened,
|
||||
fieldMap: requiredFieldMap,
|
||||
});
|
||||
|
||||
// Generate optional properties
|
||||
Object.keys(fieldMap)
|
||||
.filter((key: string) => fieldMap[key].required !== true)
|
||||
.map((key: string) =>
|
||||
set(optionalFieldMap.properties, getKeyWithProperties(key), fieldMap[key])
|
||||
);
|
||||
generateSchemaLines({
|
||||
lineWriter: lineWriters.OPTIONAL_FIELDS,
|
||||
propertyKey: null,
|
||||
required: false,
|
||||
flattened,
|
||||
fieldMap: optionalFieldMap,
|
||||
});
|
||||
};
|
||||
interface FieldMapProperty {
|
||||
properties: Record<string, FieldMapProperty>;
|
||||
}
|
||||
|
||||
interface GenerateSchemaLinesOpts {
|
||||
lineWriter: LineWriter;
|
||||
propertyKey: string | null;
|
||||
required: boolean;
|
||||
flattened: boolean;
|
||||
fieldMap: {
|
||||
properties: Record<string, FieldMapProperty>;
|
||||
};
|
||||
}
|
||||
|
||||
const getSchemaDefinition = (schemaPrefix: string, isArray: boolean): string => {
|
||||
if (isArray) {
|
||||
schemaPrefix = `${schemaPrefix}Array`;
|
||||
}
|
||||
return schemaPrefix;
|
||||
};
|
||||
|
||||
const generateSchemaLines = ({
|
||||
fieldMap,
|
||||
propertyKey,
|
||||
lineWriter,
|
||||
required,
|
||||
flattened,
|
||||
}: GenerateSchemaLinesOpts) => {
|
||||
if (fieldMap == null) return;
|
||||
|
||||
const type = get(fieldMap, 'type');
|
||||
const isArray = get(fieldMap, 'array', false);
|
||||
const isEnabled = get(fieldMap, 'enabled', true);
|
||||
|
||||
let keyToWrite = propertyKey;
|
||||
if (propertyKey?.includes('.') || propertyKey?.includes('@')) {
|
||||
keyToWrite = `'${propertyKey}'`;
|
||||
}
|
||||
|
||||
if (null != type) {
|
||||
switch (type) {
|
||||
case 'flattened':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaUnknown', isArray)},`);
|
||||
break;
|
||||
case 'object':
|
||||
case 'nested':
|
||||
if (!isEnabled) {
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaUnknown', isArray)},`);
|
||||
} else if (isArray && null != fieldMap.properties) {
|
||||
lineWriter.addLineAndIndent(`${keyToWrite}: rt.array(`);
|
||||
if (required) {
|
||||
lineWriter.addLineAndIndent(`rt.type({`);
|
||||
} else {
|
||||
lineWriter.addLineAndIndent(`rt.partial({`);
|
||||
}
|
||||
for (const prop of Object.keys(fieldMap.properties).sort()) {
|
||||
generateSchemaLines({
|
||||
lineWriter,
|
||||
propertyKey: prop,
|
||||
required,
|
||||
fieldMap: fieldMap.properties[prop],
|
||||
flattened,
|
||||
});
|
||||
}
|
||||
lineWriter.dedentAndAddLine(`})`);
|
||||
lineWriter.dedentAndAddLine(`),`);
|
||||
}
|
||||
break;
|
||||
case 'keyword':
|
||||
case 'ip':
|
||||
case 'constant_keyword':
|
||||
case 'match_only_text':
|
||||
case 'text':
|
||||
case 'version':
|
||||
case 'wildcard':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaString', isArray)},`);
|
||||
break;
|
||||
case 'date':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaDate', isArray)},`);
|
||||
break;
|
||||
case 'date_range':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaDateRange', isArray)},`);
|
||||
break;
|
||||
case 'geo_point':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaGeoPoint', isArray)},`);
|
||||
break;
|
||||
case 'long':
|
||||
case 'scaled_float':
|
||||
lineWriter.addLine(
|
||||
`${keyToWrite}: ${getSchemaDefinition('schemaStringOrNumber', isArray)},`
|
||||
);
|
||||
break;
|
||||
case 'float':
|
||||
case 'integer':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaNumber', isArray)},`);
|
||||
break;
|
||||
case 'boolean':
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaBoolean', isArray)},`);
|
||||
break;
|
||||
case 'alias':
|
||||
if (!flattened) {
|
||||
lineWriter.addLine(`${keyToWrite}: ${getSchemaDefinition('schemaUnknown', isArray)},`);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
logError(`unknown type ${type}: ${JSON.stringify(fieldMap)}`);
|
||||
break;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (null == get(fieldMap, 'properties')) {
|
||||
logError(`unknown properties ${propertyKey}: ${JSON.stringify(fieldMap)}`);
|
||||
}
|
||||
|
||||
if (null == propertyKey) {
|
||||
if (required) {
|
||||
lineWriter.addLineAndIndent(`rt.type({`);
|
||||
} else {
|
||||
lineWriter.addLineAndIndent(`rt.partial({`);
|
||||
}
|
||||
} else if (!flattened) {
|
||||
if (required) {
|
||||
lineWriter.addLineAndIndent(`${propertyKey}: rt.type({`);
|
||||
} else {
|
||||
lineWriter.addLineAndIndent(`${propertyKey}: rt.partial({`);
|
||||
}
|
||||
}
|
||||
|
||||
// write the object properties
|
||||
for (const prop of Object.keys(fieldMap.properties).sort()) {
|
||||
const key = propertyKey && flattened ? `${propertyKey}.${prop}` : prop;
|
||||
generateSchemaLines({
|
||||
lineWriter,
|
||||
propertyKey: key,
|
||||
required,
|
||||
flattened,
|
||||
fieldMap: fieldMap.properties[prop],
|
||||
});
|
||||
}
|
||||
|
||||
if (null == propertyKey || !flattened) {
|
||||
lineWriter.dedentAndAddLine(`}),`);
|
||||
}
|
||||
};
|
||||
|
||||
const SchemaFileTemplate = `
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
%%IMPORTS%%
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const %%schemaPrefix%%Required = %%REQUIRED_FIELDS%%;
|
||||
const %%schemaPrefix%%Optional = %%OPTIONAL_FIELDS%%;
|
||||
|
||||
// prettier-ignore
|
||||
export const %%schemaPrefix%%Schema = rt.intersection([%%schemaPrefix%%Required, %%schemaPrefix%%Optional%%INCLUDED_SCHEMAS%%]);
|
||||
// prettier-ignore
|
||||
export type %%schemaPrefix%% = rt.TypeOf<typeof %%schemaPrefix%%Schema>;
|
||||
|
||||
`.trim();
|
||||
|
||||
const getSchemaFileContents = (lineWriters: Record<string, LineWriter>, schemaPrefix: string) => {
|
||||
return Object.keys(lineWriters).reduce((currTemplate, key) => {
|
||||
const schemaLines = lineWriters[key].getContent().replace(/,$/, '');
|
||||
return currTemplate
|
||||
.replaceAll(`%%schemaPrefix%%`, schemaPrefix)
|
||||
.replace(`%%${key}%%`, schemaLines);
|
||||
}, SchemaFileTemplate);
|
||||
};
|
||||
|
||||
const writeGeneratedFile = (fileName: string, contents: string) => {
|
||||
const genFileName = path.join(PLUGIN_DIR, fileName);
|
||||
try {
|
||||
fs.writeFileSync(genFileName, contents);
|
||||
} catch (err) {
|
||||
logError(`error writing file: ${genFileName}: ${err.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const logError = (message: string) => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`error: ${message}`);
|
||||
process.exit(1);
|
||||
};
|
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const AlertRequired = rt.type({
|
||||
'@timestamp': schemaDate,
|
||||
kibana: rt.type({
|
||||
alert: rt.type({
|
||||
instance: rt.type({
|
||||
id: schemaString,
|
||||
}),
|
||||
rule: rt.type({
|
||||
category: schemaString,
|
||||
consumer: schemaString,
|
||||
name: schemaString,
|
||||
producer: schemaString,
|
||||
revision: schemaStringOrNumber,
|
||||
rule_type_id: schemaString,
|
||||
uuid: schemaString,
|
||||
}),
|
||||
status: schemaString,
|
||||
uuid: schemaString,
|
||||
}),
|
||||
space_ids: schemaStringArray,
|
||||
}),
|
||||
});
|
||||
const AlertOptional = rt.partial({
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
action_group: schemaString,
|
||||
case_ids: schemaStringArray,
|
||||
duration: rt.partial({
|
||||
us: schemaStringOrNumber,
|
||||
}),
|
||||
end: schemaDate,
|
||||
flapping: schemaBoolean,
|
||||
flapping_history: schemaBooleanArray,
|
||||
last_detected: schemaDate,
|
||||
maintenance_window_ids: schemaStringArray,
|
||||
reason: schemaString,
|
||||
rule: rt.partial({
|
||||
execution: rt.partial({
|
||||
uuid: schemaString,
|
||||
}),
|
||||
parameters: schemaUnknown,
|
||||
tags: schemaStringArray,
|
||||
}),
|
||||
start: schemaDate,
|
||||
time_range: schemaDateRange,
|
||||
url: schemaString,
|
||||
workflow_status: schemaString,
|
||||
}),
|
||||
version: schemaString,
|
||||
}),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const AlertSchema = rt.intersection([AlertRequired, AlertOptional]);
|
||||
// prettier-ignore
|
||||
export type Alert = rt.TypeOf<typeof AlertSchema>;
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const LegacyAlertRequired = rt.type({
|
||||
});
|
||||
const LegacyAlertOptional = rt.partial({
|
||||
ecs: rt.partial({
|
||||
version: schemaString,
|
||||
}),
|
||||
event: rt.partial({
|
||||
action: schemaString,
|
||||
kind: schemaString,
|
||||
}),
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
risk_score: schemaNumber,
|
||||
rule: rt.partial({
|
||||
author: schemaString,
|
||||
created_at: schemaDate,
|
||||
created_by: schemaString,
|
||||
description: schemaString,
|
||||
enabled: schemaString,
|
||||
from: schemaString,
|
||||
interval: schemaString,
|
||||
license: schemaString,
|
||||
note: schemaString,
|
||||
references: schemaStringArray,
|
||||
rule_id: schemaString,
|
||||
rule_name_override: schemaString,
|
||||
to: schemaString,
|
||||
type: schemaString,
|
||||
updated_at: schemaDate,
|
||||
updated_by: schemaString,
|
||||
version: schemaString,
|
||||
}),
|
||||
severity: schemaString,
|
||||
suppression: rt.partial({
|
||||
docs_count: schemaStringOrNumber,
|
||||
end: schemaDate,
|
||||
start: schemaDate,
|
||||
terms: rt.partial({
|
||||
field: schemaStringArray,
|
||||
value: schemaStringArray,
|
||||
}),
|
||||
}),
|
||||
system_status: schemaString,
|
||||
workflow_reason: schemaString,
|
||||
workflow_user: schemaString,
|
||||
}),
|
||||
}),
|
||||
tags: schemaStringArray,
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const LegacyAlertSchema = rt.intersection([LegacyAlertRequired, LegacyAlertOptional]);
|
||||
// prettier-ignore
|
||||
export type LegacyAlert = rt.TypeOf<typeof LegacyAlertSchema>;
|
|
@ -0,0 +1,107 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
import { AlertSchema } from './alert_schema';
|
||||
import { LegacyAlertSchema } from './legacy_alert_schema';
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const ObservabilityApmAlertRequired = rt.type({
|
||||
});
|
||||
const ObservabilityApmAlertOptional = rt.partial({
|
||||
agent: rt.partial({
|
||||
name: schemaString,
|
||||
}),
|
||||
error: rt.partial({
|
||||
grouping_key: schemaString,
|
||||
}),
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
evaluation: rt.partial({
|
||||
threshold: schemaStringOrNumber,
|
||||
value: schemaStringOrNumber,
|
||||
values: schemaStringOrNumberArray,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
processor: rt.partial({
|
||||
event: schemaString,
|
||||
}),
|
||||
service: rt.partial({
|
||||
environment: schemaString,
|
||||
language: rt.partial({
|
||||
name: schemaString,
|
||||
}),
|
||||
name: schemaString,
|
||||
}),
|
||||
transaction: rt.partial({
|
||||
name: schemaString,
|
||||
type: schemaString,
|
||||
}),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const ObservabilityApmAlertSchema = rt.intersection([ObservabilityApmAlertRequired, ObservabilityApmAlertOptional, AlertSchema, LegacyAlertSchema]);
|
||||
// prettier-ignore
|
||||
export type ObservabilityApmAlert = rt.TypeOf<typeof ObservabilityApmAlertSchema>;
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
import { AlertSchema } from './alert_schema';
|
||||
import { EcsSchema } from './ecs_schema';
|
||||
import { LegacyAlertSchema } from './legacy_alert_schema';
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const ObservabilityLogsAlertRequired = rt.type({
|
||||
});
|
||||
const ObservabilityLogsAlertOptional = rt.partial({
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
evaluation: rt.partial({
|
||||
threshold: schemaStringOrNumber,
|
||||
value: schemaStringOrNumber,
|
||||
values: schemaStringOrNumberArray,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const ObservabilityLogsAlertSchema = rt.intersection([ObservabilityLogsAlertRequired, ObservabilityLogsAlertOptional, AlertSchema, EcsSchema, LegacyAlertSchema]);
|
||||
// prettier-ignore
|
||||
export type ObservabilityLogsAlert = rt.TypeOf<typeof ObservabilityLogsAlertSchema>;
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
import { AlertSchema } from './alert_schema';
|
||||
import { EcsSchema } from './ecs_schema';
|
||||
import { LegacyAlertSchema } from './legacy_alert_schema';
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const ObservabilityMetricsAlertRequired = rt.type({
|
||||
});
|
||||
const ObservabilityMetricsAlertOptional = rt.partial({
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
evaluation: rt.partial({
|
||||
threshold: schemaStringOrNumber,
|
||||
value: schemaStringOrNumber,
|
||||
values: schemaStringOrNumberArray,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const ObservabilityMetricsAlertSchema = rt.intersection([ObservabilityMetricsAlertRequired, ObservabilityMetricsAlertOptional, AlertSchema, EcsSchema, LegacyAlertSchema]);
|
||||
// prettier-ignore
|
||||
export type ObservabilityMetricsAlert = rt.TypeOf<typeof ObservabilityMetricsAlertSchema>;
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
import { AlertSchema } from './alert_schema';
|
||||
import { LegacyAlertSchema } from './legacy_alert_schema';
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const ObservabilitySloAlertRequired = rt.type({
|
||||
});
|
||||
const ObservabilitySloAlertOptional = rt.partial({
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
evaluation: rt.partial({
|
||||
threshold: schemaStringOrNumber,
|
||||
value: schemaStringOrNumber,
|
||||
values: schemaStringOrNumberArray,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
slo: rt.partial({
|
||||
id: schemaString,
|
||||
revision: schemaStringOrNumber,
|
||||
}),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const ObservabilitySloAlertSchema = rt.intersection([ObservabilitySloAlertRequired, ObservabilitySloAlertOptional, AlertSchema, LegacyAlertSchema]);
|
||||
// prettier-ignore
|
||||
export type ObservabilitySloAlert = rt.TypeOf<typeof ObservabilitySloAlertSchema>;
|
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
import { AlertSchema } from './alert_schema';
|
||||
import { LegacyAlertSchema } from './legacy_alert_schema';
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const ObservabilityUptimeAlertRequired = rt.type({
|
||||
});
|
||||
const ObservabilityUptimeAlertOptional = rt.partial({
|
||||
agent: rt.partial({
|
||||
name: schemaString,
|
||||
}),
|
||||
anomaly: rt.partial({
|
||||
bucket_span: rt.partial({
|
||||
minutes: schemaString,
|
||||
}),
|
||||
start: schemaDate,
|
||||
}),
|
||||
error: rt.partial({
|
||||
message: schemaString,
|
||||
}),
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
evaluation: rt.partial({
|
||||
threshold: schemaStringOrNumber,
|
||||
value: schemaStringOrNumber,
|
||||
values: schemaStringOrNumberArray,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
monitor: rt.partial({
|
||||
id: schemaString,
|
||||
name: schemaString,
|
||||
type: schemaString,
|
||||
}),
|
||||
observer: rt.partial({
|
||||
geo: rt.partial({
|
||||
name: schemaString,
|
||||
}),
|
||||
}),
|
||||
tls: rt.partial({
|
||||
server: rt.partial({
|
||||
hash: rt.partial({
|
||||
sha256: schemaString,
|
||||
}),
|
||||
x509: rt.partial({
|
||||
issuer: rt.partial({
|
||||
common_name: schemaString,
|
||||
}),
|
||||
not_after: schemaDate,
|
||||
not_before: schemaDate,
|
||||
subject: rt.partial({
|
||||
common_name: schemaString,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
url: rt.partial({
|
||||
full: schemaString,
|
||||
}),
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const ObservabilityUptimeAlertSchema = rt.intersection([ObservabilityUptimeAlertRequired, ObservabilityUptimeAlertOptional, AlertSchema, LegacyAlertSchema]);
|
||||
// prettier-ignore
|
||||
export type ObservabilityUptimeAlert = rt.TypeOf<typeof ObservabilityUptimeAlertSchema>;
|
|
@ -0,0 +1,349 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
// this file was generated, and should not be edited by hand
|
||||
// ---------------------------------- WARNING ----------------------------------
|
||||
import * as rt from 'io-ts';
|
||||
import { Either } from 'fp-ts/lib/Either';
|
||||
import { AlertSchema } from './alert_schema';
|
||||
import { EcsSchema } from './ecs_schema';
|
||||
import { LegacyAlertSchema } from './legacy_alert_schema';
|
||||
const ISO_DATE_PATTERN = /^d{4}-d{2}-d{2}Td{2}:d{2}:d{2}.d{3}Z$/;
|
||||
export const IsoDateString = new rt.Type<string, string, unknown>(
|
||||
'IsoDateString',
|
||||
rt.string.is,
|
||||
(input, context): Either<rt.Errors, string> => {
|
||||
if (typeof input === 'string' && ISO_DATE_PATTERN.test(input)) {
|
||||
return rt.success(input);
|
||||
} else {
|
||||
return rt.failure(input, context);
|
||||
}
|
||||
},
|
||||
rt.identity
|
||||
);
|
||||
export type IsoDateStringC = typeof IsoDateString;
|
||||
export const schemaDate = IsoDateString;
|
||||
export const schemaDateArray = rt.array(IsoDateString);
|
||||
export const schemaDateRange = rt.partial({
|
||||
gte: schemaDate,
|
||||
lte: schemaDate,
|
||||
});
|
||||
export const schemaDateRangeArray = rt.array(schemaDateRange);
|
||||
export const schemaUnknown = rt.unknown;
|
||||
export const schemaUnknownArray = rt.array(rt.unknown);
|
||||
export const schemaString = rt.string;
|
||||
export const schemaStringArray = rt.array(schemaString);
|
||||
export const schemaNumber = rt.number;
|
||||
export const schemaNumberArray = rt.array(schemaNumber);
|
||||
export const schemaStringOrNumber = rt.union([schemaString, schemaNumber]);
|
||||
export const schemaStringOrNumberArray = rt.array(schemaStringOrNumber);
|
||||
export const schemaBoolean = rt.boolean;
|
||||
export const schemaBooleanArray = rt.array(schemaBoolean);
|
||||
const schemaGeoPointCoords = rt.type({
|
||||
type: schemaString,
|
||||
coordinates: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointString = schemaString;
|
||||
const schemaGeoPointLatLon = rt.type({
|
||||
lat: schemaNumber,
|
||||
lon: schemaNumber,
|
||||
});
|
||||
const schemaGeoPointLocation = rt.type({
|
||||
location: schemaNumberArray,
|
||||
});
|
||||
const schemaGeoPointLocationString = rt.type({
|
||||
location: schemaString,
|
||||
});
|
||||
export const schemaGeoPoint = rt.union([
|
||||
schemaGeoPointCoords,
|
||||
schemaGeoPointString,
|
||||
schemaGeoPointLatLon,
|
||||
schemaGeoPointLocation,
|
||||
schemaGeoPointLocationString,
|
||||
]);
|
||||
export const schemaGeoPointArray = rt.array(schemaGeoPoint);
|
||||
// prettier-ignore
|
||||
const SecurityAlertRequired = rt.type({
|
||||
'@timestamp': schemaDate,
|
||||
kibana: rt.type({
|
||||
alert: rt.type({
|
||||
ancestors: rt.array(
|
||||
rt.type({
|
||||
depth: schemaStringOrNumber,
|
||||
id: schemaString,
|
||||
index: schemaString,
|
||||
type: schemaString,
|
||||
})
|
||||
),
|
||||
depth: schemaStringOrNumber,
|
||||
instance: rt.type({
|
||||
id: schemaString,
|
||||
}),
|
||||
original_event: rt.type({
|
||||
action: schemaString,
|
||||
category: schemaStringArray,
|
||||
created: schemaDate,
|
||||
dataset: schemaString,
|
||||
id: schemaString,
|
||||
ingested: schemaDate,
|
||||
kind: schemaString,
|
||||
module: schemaString,
|
||||
original: schemaString,
|
||||
outcome: schemaString,
|
||||
provider: schemaString,
|
||||
sequence: schemaStringOrNumber,
|
||||
type: schemaStringArray,
|
||||
}),
|
||||
original_time: schemaDate,
|
||||
rule: rt.type({
|
||||
category: schemaString,
|
||||
consumer: schemaString,
|
||||
false_positives: schemaStringArray,
|
||||
max_signals: schemaStringOrNumberArray,
|
||||
name: schemaString,
|
||||
producer: schemaString,
|
||||
revision: schemaStringOrNumber,
|
||||
rule_type_id: schemaString,
|
||||
threat: rt.type({
|
||||
framework: schemaString,
|
||||
tactic: rt.type({
|
||||
id: schemaString,
|
||||
name: schemaString,
|
||||
reference: schemaString,
|
||||
}),
|
||||
technique: rt.type({
|
||||
id: schemaString,
|
||||
name: schemaString,
|
||||
reference: schemaString,
|
||||
subtechnique: rt.type({
|
||||
id: schemaString,
|
||||
name: schemaString,
|
||||
reference: schemaString,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
uuid: schemaString,
|
||||
}),
|
||||
status: schemaString,
|
||||
uuid: schemaString,
|
||||
}),
|
||||
space_ids: schemaStringArray,
|
||||
}),
|
||||
});
|
||||
const SecurityAlertOptional = rt.partial({
|
||||
ecs: rt.partial({
|
||||
version: schemaString,
|
||||
}),
|
||||
event: rt.partial({
|
||||
action: schemaString,
|
||||
kind: schemaString,
|
||||
}),
|
||||
kibana: rt.partial({
|
||||
alert: rt.partial({
|
||||
action_group: schemaString,
|
||||
ancestors: rt.partial({
|
||||
rule: schemaString,
|
||||
}),
|
||||
building_block_type: schemaString,
|
||||
case_ids: schemaStringArray,
|
||||
duration: rt.partial({
|
||||
us: schemaStringOrNumber,
|
||||
}),
|
||||
end: schemaDate,
|
||||
flapping: schemaBoolean,
|
||||
flapping_history: schemaBooleanArray,
|
||||
group: rt.partial({
|
||||
id: schemaString,
|
||||
index: schemaNumber,
|
||||
}),
|
||||
last_detected: schemaDate,
|
||||
maintenance_window_ids: schemaStringArray,
|
||||
new_terms: schemaStringArray,
|
||||
original_event: rt.partial({
|
||||
agent_id_status: schemaString,
|
||||
code: schemaString,
|
||||
duration: schemaString,
|
||||
end: schemaDate,
|
||||
hash: schemaString,
|
||||
reason: schemaString,
|
||||
reference: schemaString,
|
||||
risk_score: schemaNumber,
|
||||
risk_score_norm: schemaNumber,
|
||||
severity: schemaStringOrNumber,
|
||||
start: schemaDate,
|
||||
timezone: schemaString,
|
||||
url: schemaString,
|
||||
}),
|
||||
reason: schemaString,
|
||||
risk_score: schemaNumber,
|
||||
rule: rt.partial({
|
||||
author: schemaString,
|
||||
building_block_type: schemaString,
|
||||
created_at: schemaDate,
|
||||
created_by: schemaString,
|
||||
description: schemaString,
|
||||
enabled: schemaString,
|
||||
execution: rt.partial({
|
||||
uuid: schemaString,
|
||||
}),
|
||||
from: schemaString,
|
||||
immutable: schemaStringArray,
|
||||
interval: schemaString,
|
||||
license: schemaString,
|
||||
note: schemaString,
|
||||
parameters: schemaUnknown,
|
||||
references: schemaStringArray,
|
||||
rule_id: schemaString,
|
||||
rule_name_override: schemaString,
|
||||
tags: schemaStringArray,
|
||||
timeline_id: schemaStringArray,
|
||||
timeline_title: schemaStringArray,
|
||||
timestamp_override: schemaString,
|
||||
to: schemaString,
|
||||
type: schemaString,
|
||||
updated_at: schemaDate,
|
||||
updated_by: schemaString,
|
||||
version: schemaString,
|
||||
}),
|
||||
severity: schemaString,
|
||||
start: schemaDate,
|
||||
suppression: rt.partial({
|
||||
docs_count: schemaStringOrNumber,
|
||||
end: schemaDate,
|
||||
start: schemaDate,
|
||||
terms: rt.partial({
|
||||
field: schemaStringArray,
|
||||
value: schemaStringArray,
|
||||
}),
|
||||
}),
|
||||
system_status: schemaString,
|
||||
threshold_result: rt.partial({
|
||||
count: schemaStringOrNumber,
|
||||
from: schemaDate,
|
||||
terms: rt.array(
|
||||
rt.partial({
|
||||
field: schemaString,
|
||||
value: schemaString,
|
||||
})
|
||||
),
|
||||
}),
|
||||
time_range: schemaDateRange,
|
||||
url: schemaString,
|
||||
workflow_reason: schemaString,
|
||||
workflow_status: schemaString,
|
||||
workflow_user: schemaString,
|
||||
}),
|
||||
version: schemaString,
|
||||
}),
|
||||
signal: rt.partial({
|
||||
ancestors: rt.partial({
|
||||
depth: schemaUnknown,
|
||||
id: schemaUnknown,
|
||||
index: schemaUnknown,
|
||||
type: schemaUnknown,
|
||||
}),
|
||||
depth: schemaUnknown,
|
||||
group: rt.partial({
|
||||
id: schemaUnknown,
|
||||
index: schemaUnknown,
|
||||
}),
|
||||
original_event: rt.partial({
|
||||
action: schemaUnknown,
|
||||
category: schemaUnknown,
|
||||
code: schemaUnknown,
|
||||
created: schemaUnknown,
|
||||
dataset: schemaUnknown,
|
||||
duration: schemaUnknown,
|
||||
end: schemaUnknown,
|
||||
hash: schemaUnknown,
|
||||
id: schemaUnknown,
|
||||
kind: schemaUnknown,
|
||||
module: schemaUnknown,
|
||||
outcome: schemaUnknown,
|
||||
provider: schemaUnknown,
|
||||
reason: schemaUnknown,
|
||||
risk_score: schemaUnknown,
|
||||
risk_score_norm: schemaUnknown,
|
||||
sequence: schemaUnknown,
|
||||
severity: schemaUnknown,
|
||||
start: schemaUnknown,
|
||||
timezone: schemaUnknown,
|
||||
type: schemaUnknown,
|
||||
}),
|
||||
original_time: schemaUnknown,
|
||||
reason: schemaUnknown,
|
||||
rule: rt.partial({
|
||||
author: schemaUnknown,
|
||||
building_block_type: schemaUnknown,
|
||||
created_at: schemaUnknown,
|
||||
created_by: schemaUnknown,
|
||||
description: schemaUnknown,
|
||||
enabled: schemaUnknown,
|
||||
false_positives: schemaUnknown,
|
||||
from: schemaUnknown,
|
||||
id: schemaUnknown,
|
||||
immutable: schemaUnknown,
|
||||
interval: schemaUnknown,
|
||||
license: schemaUnknown,
|
||||
max_signals: schemaUnknown,
|
||||
name: schemaUnknown,
|
||||
note: schemaUnknown,
|
||||
references: schemaUnknown,
|
||||
risk_score: schemaUnknown,
|
||||
rule_id: schemaUnknown,
|
||||
rule_name_override: schemaUnknown,
|
||||
severity: schemaUnknown,
|
||||
tags: schemaUnknown,
|
||||
threat: rt.partial({
|
||||
framework: schemaUnknown,
|
||||
tactic: rt.partial({
|
||||
id: schemaUnknown,
|
||||
name: schemaUnknown,
|
||||
reference: schemaUnknown,
|
||||
}),
|
||||
technique: rt.partial({
|
||||
id: schemaUnknown,
|
||||
name: schemaUnknown,
|
||||
reference: schemaUnknown,
|
||||
subtechnique: rt.partial({
|
||||
id: schemaUnknown,
|
||||
name: schemaUnknown,
|
||||
reference: schemaUnknown,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
timeline_id: schemaUnknown,
|
||||
timeline_title: schemaUnknown,
|
||||
timestamp_override: schemaUnknown,
|
||||
to: schemaUnknown,
|
||||
type: schemaUnknown,
|
||||
updated_at: schemaUnknown,
|
||||
updated_by: schemaUnknown,
|
||||
version: schemaUnknown,
|
||||
}),
|
||||
status: schemaUnknown,
|
||||
threshold_result: rt.partial({
|
||||
cardinality: rt.partial({
|
||||
field: schemaUnknown,
|
||||
value: schemaUnknown,
|
||||
}),
|
||||
count: schemaUnknown,
|
||||
from: schemaUnknown,
|
||||
terms: rt.partial({
|
||||
field: schemaUnknown,
|
||||
value: schemaUnknown,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
tags: schemaStringArray,
|
||||
});
|
||||
|
||||
// prettier-ignore
|
||||
export const SecurityAlertSchema = rt.intersection([SecurityAlertRequired, SecurityAlertOptional, AlertSchema, EcsSchema, LegacyAlertSchema]);
|
||||
// prettier-ignore
|
||||
export type SecurityAlert = rt.TypeOf<typeof SecurityAlertSchema>;
|
34
packages/kbn-alerts-as-data-utils/src/schemas/index.ts
Normal file
34
packages/kbn-alerts-as-data-utils/src/schemas/index.ts
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Alert } from './generated/alert_schema';
|
||||
import type { ObservabilityApmAlert } from './generated/observability_apm_schema';
|
||||
import type { ObservabilityLogsAlert } from './generated/observability_logs_schema';
|
||||
import type { ObservabilityMetricsAlert } from './generated/observability_metrics_schema';
|
||||
import type { ObservabilitySloAlert } from './generated/observability_slo_schema';
|
||||
import type { ObservabilityUptimeAlert } from './generated/observability_uptime_schema';
|
||||
import type { SecurityAlert } from './generated/security_schema';
|
||||
|
||||
export * from './create_schema_from_field_map';
|
||||
|
||||
export type { Alert } from './generated/alert_schema';
|
||||
export type { ObservabilityApmAlert } from './generated/observability_apm_schema';
|
||||
export type { ObservabilityLogsAlert } from './generated/observability_logs_schema';
|
||||
export type { ObservabilityMetricsAlert } from './generated/observability_metrics_schema';
|
||||
export type { ObservabilitySloAlert } from './generated/observability_slo_schema';
|
||||
export type { ObservabilityUptimeAlert } from './generated/observability_uptime_schema';
|
||||
export type { SecurityAlert } from './generated/security_schema';
|
||||
|
||||
export type AADAlert =
|
||||
| Alert
|
||||
| ObservabilityApmAlert
|
||||
| ObservabilityLogsAlert
|
||||
| ObservabilityMetricsAlert
|
||||
| ObservabilitySloAlert
|
||||
| ObservabilityUptimeAlert
|
||||
| SecurityAlert;
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const INDENT_LENGTH = 2;
|
||||
const INDENT = ''.padStart(INDENT_LENGTH);
|
||||
|
||||
export class LineWriter {
|
||||
private _indent: string = '';
|
||||
private _lines: string[] = [];
|
||||
private _separator: string;
|
||||
|
||||
constructor(separator: string = '\n') {
|
||||
this._indent = '';
|
||||
this._lines = [];
|
||||
this._separator = separator;
|
||||
}
|
||||
|
||||
public addLine(line: string) {
|
||||
this._lines.push(`${this._indent}${line}`);
|
||||
}
|
||||
|
||||
public addLineAndIndent(line: string) {
|
||||
this._lines.push(`${this._indent}${line}`);
|
||||
this._indent = `${this._indent}${INDENT}`;
|
||||
}
|
||||
|
||||
public dedentAndAddLine(line: string) {
|
||||
this._indent = this._indent.substr(INDENT_LENGTH);
|
||||
this._lines.push(`${this._indent}${line}`);
|
||||
}
|
||||
|
||||
public indent() {
|
||||
this._indent = `${this._indent}${INDENT}`;
|
||||
}
|
||||
|
||||
public dedent() {
|
||||
this._indent = this._indent.substr(INDENT_LENGTH);
|
||||
}
|
||||
|
||||
public getContent() {
|
||||
return this._lines.join(this._separator);
|
||||
}
|
||||
}
|
||||
|
||||
export const createLineWriter = (separator: string = '\n') => new LineWriter(separator);
|
|
@ -16,5 +16,6 @@
|
|||
"kbn_references": [
|
||||
"@kbn/ecs",
|
||||
"@kbn/rule-data-utils",
|
||||
"@kbn/safer-lodash-set",
|
||||
]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { contextToSchemaName } from './context_to_schema_name';
|
||||
|
||||
describe('contextToSchemaName', () => {
|
||||
it('correctly converts context to schema name', () => {
|
||||
expect(contextToSchemaName('observability.logs')).toEqual(`ObservabilityLogsAlert`);
|
||||
expect(contextToSchemaName('security')).toEqual(`SecurityAlert`);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { capitalize } from 'lodash';
|
||||
|
||||
export const contextToSchemaName = (context: string) => {
|
||||
return `${context
|
||||
.split('.')
|
||||
.map((part: string) => capitalize(part))
|
||||
.join('')}Alert`;
|
||||
};
|
|
@ -7,3 +7,4 @@
|
|||
|
||||
export { mappingFromFieldMap } from './field_maps/mapping_from_field_map';
|
||||
export { getComponentTemplateFromFieldMap } from './field_maps/component_template_from_field_map';
|
||||
export { contextToSchemaName } from './context_to_schema_name';
|
||||
|
|
|
@ -46,7 +46,11 @@ export * from './maintenance_window';
|
|||
export * from './default_rule_aggregation';
|
||||
export * from './rule_tags_aggregation';
|
||||
|
||||
export { mappingFromFieldMap, getComponentTemplateFromFieldMap } from './alert_schema';
|
||||
export {
|
||||
mappingFromFieldMap,
|
||||
getComponentTemplateFromFieldMap,
|
||||
contextToSchemaName,
|
||||
} from './alert_schema';
|
||||
|
||||
export interface AlertingFrameworkHealth {
|
||||
isSufficientlySecure: boolean;
|
||||
|
|
|
@ -9,6 +9,7 @@ import sinon from 'sinon';
|
|||
import { Alert } from './alert';
|
||||
import { AlertInstanceState, AlertInstanceContext, DefaultActionGroupId } from '../../common';
|
||||
import { alertWithAnyUUID } from '../test_utils';
|
||||
import { CombinedSummarizedAlerts } from '../types';
|
||||
|
||||
let clock: sinon.SinonFakeTimers;
|
||||
|
||||
|
@ -668,8 +669,34 @@ describe('resetPendingRecoveredCount', () => {
|
|||
});
|
||||
|
||||
describe('isFilteredOut', () => {
|
||||
const summarizedAlerts = {
|
||||
all: { count: 1, data: [{ kibana: { alert: { uuid: '1' } } }] },
|
||||
const summarizedAlerts: CombinedSummarizedAlerts = {
|
||||
all: {
|
||||
count: 1,
|
||||
data: [
|
||||
{
|
||||
_id: '1',
|
||||
_index: '.alerts',
|
||||
'@timestamp': '',
|
||||
kibana: {
|
||||
alert: {
|
||||
instance: { id: 'a' },
|
||||
rule: {
|
||||
category: 'category',
|
||||
consumer: 'consumer',
|
||||
name: 'name',
|
||||
producer: 'producer',
|
||||
revision: 0,
|
||||
rule_type_id: 'rule_type_id',
|
||||
uuid: 'uuid',
|
||||
},
|
||||
status: 'status',
|
||||
uuid: '1',
|
||||
},
|
||||
space_ids: ['default'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
new: { count: 0, data: [] },
|
||||
ongoing: { count: 0, data: [] },
|
||||
recovered: { count: 0, data: [] },
|
||||
|
|
|
@ -6,9 +6,8 @@
|
|||
*/
|
||||
|
||||
import { v4 as uuidV4 } from 'uuid';
|
||||
import { get, isEmpty } from 'lodash';
|
||||
import { ALERT_UUID } from '@kbn/rule-data-utils';
|
||||
import { CombinedSummarizedAlerts } from '../types';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { AlertHit, CombinedSummarizedAlerts } from '../types';
|
||||
import {
|
||||
AlertInstanceMeta,
|
||||
AlertInstanceState,
|
||||
|
@ -293,8 +292,8 @@ export class Alert<
|
|||
// Related issue: https://github.com/elastic/kibana/issues/144862
|
||||
|
||||
return !summarizedAlerts.all.data.some(
|
||||
(alert) =>
|
||||
get(alert, ALERT_UUID) === this.getId() || get(alert, ALERT_UUID) === this.getUuid()
|
||||
(alert: AlertHit) =>
|
||||
alert?.kibana?.alert?.uuid === this.getId() || alert?.kibana?.alert?.uuid === this.getUuid()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ export type {
|
|||
AlertingApiRequestHandlerContext,
|
||||
RuleParamsAndRefs,
|
||||
GetSummarizedAlertsFnOpts,
|
||||
SummarizedAlertsChunk,
|
||||
ExecutorType,
|
||||
IRuleTypeAlerts,
|
||||
} from './types';
|
||||
|
|
|
@ -652,6 +652,10 @@ describe('Create Lifecycle', () => {
|
|||
validate: {
|
||||
params: schema.any(),
|
||||
},
|
||||
alerts: {
|
||||
context: 'test',
|
||||
mappings: { fieldMap: { foo: { type: 'keyword', required: false } } },
|
||||
},
|
||||
});
|
||||
const result = registry.list();
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
|
@ -672,6 +676,17 @@ describe('Create Lifecycle', () => {
|
|||
"params": Array [],
|
||||
"state": Array [],
|
||||
},
|
||||
"alerts": Object {
|
||||
"context": "test",
|
||||
"mappings": Object {
|
||||
"fieldMap": Object {
|
||||
"foo": Object {
|
||||
"required": false,
|
||||
"type": "keyword",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"defaultActionGroupId": "testActionGroup",
|
||||
"defaultScheduleInterval": undefined,
|
||||
"doesSetRecoveryContext": false,
|
||||
|
|
|
@ -358,6 +358,7 @@ export class RuleTypeRegistry {
|
|||
ruleTaskTimeout,
|
||||
defaultScheduleInterval,
|
||||
doesSetRecoveryContext,
|
||||
alerts,
|
||||
getSummarizedAlerts,
|
||||
},
|
||||
]: [string, UntypedNormalizedRuleType]) => ({
|
||||
|
@ -379,6 +380,7 @@ export class RuleTypeRegistry {
|
|||
minimumLicenseRequired
|
||||
).isValid,
|
||||
hasGetSummarizedAlerts: !!getSummarizedAlerts,
|
||||
...(alerts ? { alerts } : {}),
|
||||
})
|
||||
)
|
||||
);
|
||||
|
|
|
@ -22,7 +22,7 @@ import {
|
|||
} from '@kbn/core/server';
|
||||
import type { PublicMethodsOf } from '@kbn/utility-types';
|
||||
import { SharePluginStart } from '@kbn/share-plugin/server';
|
||||
import { type FieldMap } from '@kbn/alerts-as-data-utils';
|
||||
import { Alert, type FieldMap } from '@kbn/alerts-as-data-utils';
|
||||
import { Filter } from '@kbn/es-query';
|
||||
import { RuleTypeRegistry as OrigruleTypeRegistry } from './rule_type_registry';
|
||||
import { PluginSetupContract, PluginStartContract } from './plugin';
|
||||
|
@ -152,12 +152,13 @@ export interface GetSummarizedAlertsFnOpts {
|
|||
alertsFilter?: AlertsFilter | null;
|
||||
}
|
||||
|
||||
// TODO - add type for these alerts when we determine which alerts-as-data
|
||||
// fields will be made available in https://github.com/elastic/kibana/issues/143741
|
||||
|
||||
interface SummarizedAlertsChunk {
|
||||
export type AlertHit = Alert & {
|
||||
_id: string;
|
||||
_index: string;
|
||||
};
|
||||
export interface SummarizedAlertsChunk {
|
||||
count: number;
|
||||
data: unknown[];
|
||||
data: AlertHit[];
|
||||
}
|
||||
export interface SummarizedAlerts {
|
||||
new: SummarizedAlertsChunk;
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { merge } from 'lodash';
|
||||
import type { PublicContract } from '@kbn/utility-types';
|
||||
import { ESSearchRequest, ESSearchResponse } from '@kbn/es-types';
|
||||
import type { GetSummarizedAlertsFnOpts } from '@kbn/alerting-plugin/server';
|
||||
import type { SummarizedAlertsChunk, GetSummarizedAlertsFnOpts } from '@kbn/alerting-plugin/server';
|
||||
import {
|
||||
ALERT_END,
|
||||
ALERT_RULE_EXECUTION_UUID,
|
||||
|
@ -24,6 +24,7 @@ import {
|
|||
SearchTotalHits,
|
||||
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { AlertsFilter } from '@kbn/alerting-plugin/common';
|
||||
import { AlertHit, SummarizedAlerts } from '@kbn/alerting-plugin/server/types';
|
||||
import { ParsedTechnicalFields } from '../../common';
|
||||
import { ParsedExperimentalFields } from '../../common/parse_experimental_fields';
|
||||
import { IRuleDataClient, IRuleDataReader } from '../rule_data_client';
|
||||
|
@ -49,7 +50,7 @@ export const createGetSummarizedAlertsFn =
|
|||
spaceId,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
}: GetSummarizedAlertsFnOpts) => {
|
||||
}: GetSummarizedAlertsFnOpts): Promise<SummarizedAlerts> => {
|
||||
if (!ruleId || !spaceId) {
|
||||
throw new Error(`Must specify both rule ID and space ID for summarized alert query.`);
|
||||
}
|
||||
|
@ -112,7 +113,7 @@ const getAlertsByExecutionUuid = async ({
|
|||
excludedAlertInstanceIds,
|
||||
formatAlert,
|
||||
alertsFilter,
|
||||
}: GetAlertsByExecutionUuidOpts) => {
|
||||
}: GetAlertsByExecutionUuidOpts): Promise<SummarizedAlerts> => {
|
||||
if (isLifecycleAlert) {
|
||||
return getLifecycleAlertsByExecutionUuid({
|
||||
executionUuid,
|
||||
|
@ -150,7 +151,7 @@ const getPersistentAlertsByExecutionUuid = async <TSearchRequest extends ESSearc
|
|||
excludedAlertInstanceIds,
|
||||
formatAlert,
|
||||
alertsFilter,
|
||||
}: GetAlertsByExecutionUuidHelperOpts) => {
|
||||
}: GetAlertsByExecutionUuidHelperOpts): Promise<SummarizedAlerts> => {
|
||||
// persistent alerts only create new alerts so query by execution UUID to
|
||||
// get all alerts created during an execution
|
||||
const request = getQueryByExecutionUuid({
|
||||
|
@ -181,7 +182,7 @@ const getLifecycleAlertsByExecutionUuid = async ({
|
|||
excludedAlertInstanceIds,
|
||||
formatAlert,
|
||||
alertsFilter,
|
||||
}: GetAlertsByExecutionUuidHelperOpts) => {
|
||||
}: GetAlertsByExecutionUuidHelperOpts): Promise<SummarizedAlerts> => {
|
||||
// lifecycle alerts assign a different action to an alert depending
|
||||
// on whether it is new/ongoing/recovered. query for each action in order
|
||||
// to get the count of each action type as well as up to the maximum number
|
||||
|
@ -238,9 +239,9 @@ const expandFlattenedAlert = (alert: object) => {
|
|||
};
|
||||
|
||||
const getHitsWithCount = <TSearchRequest extends ESSearchRequest>(
|
||||
response: ESSearchResponse<AlertDocument, TSearchRequest>,
|
||||
response: ESSearchResponse<AlertHit, TSearchRequest>,
|
||||
formatAlert?: (alert: AlertDocument) => AlertDocument
|
||||
) => {
|
||||
): SummarizedAlertsChunk => {
|
||||
return {
|
||||
count: (response.hits.total as SearchTotalHits).value,
|
||||
data: response.hits.hits.map((hit) => {
|
||||
|
@ -253,7 +254,7 @@ const getHitsWithCount = <TSearchRequest extends ESSearchRequest>(
|
|||
_id,
|
||||
_index,
|
||||
...expandedSource,
|
||||
};
|
||||
} as AlertHit;
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
@ -262,9 +263,9 @@ const doSearch = async (
|
|||
ruleDataClientReader: IRuleDataReader,
|
||||
request: ESSearchRequest,
|
||||
formatAlert?: (alert: AlertDocument) => AlertDocument
|
||||
) => {
|
||||
): Promise<SummarizedAlertsChunk> => {
|
||||
const response = await ruleDataClientReader.search(request);
|
||||
return getHitsWithCount(response, formatAlert);
|
||||
return getHitsWithCount(response as ESSearchResponse<AlertHit>, formatAlert);
|
||||
};
|
||||
|
||||
interface GetQueryByExecutionUuidParams {
|
||||
|
@ -359,7 +360,7 @@ const getAlertsByTimeRange = async ({
|
|||
excludedAlertInstanceIds,
|
||||
formatAlert,
|
||||
alertsFilter,
|
||||
}: GetAlertsByTimeRangeOpts) => {
|
||||
}: GetAlertsByTimeRangeOpts): Promise<SummarizedAlerts> => {
|
||||
if (isLifecycleAlert) {
|
||||
return getLifecycleAlertsByTimeRange({
|
||||
start,
|
||||
|
@ -440,7 +441,7 @@ const getLifecycleAlertsByTimeRange = async ({
|
|||
formatAlert,
|
||||
excludedAlertInstanceIds,
|
||||
alertsFilter,
|
||||
}: GetAlertsByTimeRangeHelperOpts) => {
|
||||
}: GetAlertsByTimeRangeHelperOpts): Promise<SummarizedAlerts> => {
|
||||
const requests = [
|
||||
getQueryByTimeRange(start, end, ruleId, excludedAlertInstanceIds, AlertTypes.NEW, alertsFilter),
|
||||
getQueryByTimeRange(
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import execa from 'execa';
|
||||
import { RuleType } from '@kbn/alerting-plugin/server';
|
||||
import {
|
||||
alertFieldMap,
|
||||
ecsFieldMap,
|
||||
legacyAlertFieldMap,
|
||||
createSchemaFromFieldMap,
|
||||
} from '@kbn/alerts-as-data-utils';
|
||||
import { contextToSchemaName } from '@kbn/alerting-plugin/common';
|
||||
import { FtrProviderContext } from '../../../../common/ftr_provider_context';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default function checkAlertSchemasTest({ getService }: FtrProviderContext) {
|
||||
const supertest = getService('supertest');
|
||||
|
||||
// We are using this test to generate the schemas and types for alerts as data objects
|
||||
// because we need to access the alert definition that rule types provide on registration,
|
||||
// which is easiest to do after Kibana has started up and we can get the list of registered
|
||||
// rule types. If you add a new field to the alert field map or to the field map specific
|
||||
// to your rule type, this test will fail. To resolve, run this test locally
|
||||
//
|
||||
// node scripts/functional_tests_server.js --config x-pack/test/alerting_api_integration/spaces_only/tests/alerting/group4/config.ts
|
||||
// node scripts/functional_test_runner --config=x-pack/test/alerting_api_integration/spaces_only/tests/alerting/group4/config.ts --grep "check alert schemas"
|
||||
//
|
||||
// and commit the changed schema files in packages/kbn-alerts-as-data-utils/src/schemas/generated/
|
||||
|
||||
describe('check alert schemas', () => {
|
||||
it('should not have discrepancies from the alert field map or the field map specific to a rule type', async () => {
|
||||
// Generate base alert schema
|
||||
createSchemaFromFieldMap({
|
||||
outputFile: `schemas/generated/alert_schema.ts`,
|
||||
fieldMap: alertFieldMap,
|
||||
schemaPrefix: 'Alert',
|
||||
});
|
||||
|
||||
// Generate legacy alert schema
|
||||
createSchemaFromFieldMap({
|
||||
outputFile: `schemas/generated/legacy_alert_schema.ts`,
|
||||
fieldMap: legacyAlertFieldMap,
|
||||
schemaPrefix: 'LegacyAlert',
|
||||
});
|
||||
|
||||
// Generate ECS schema
|
||||
createSchemaFromFieldMap({
|
||||
outputFile: `schemas/generated/ecs_schema.ts`,
|
||||
fieldMap: ecsFieldMap,
|
||||
schemaPrefix: 'Ecs',
|
||||
});
|
||||
|
||||
const ruleTypes = await supertest
|
||||
.get('/api/alerting/rule_types')
|
||||
.expect(200)
|
||||
.then((response) => response.body);
|
||||
|
||||
const processedContexts: string[] = [];
|
||||
ruleTypes
|
||||
.filter((ruleType: RuleType) => !ruleType.id.startsWith('test.') && ruleType.alerts)
|
||||
.forEach((ruleType: RuleType) => {
|
||||
const alertsDefinition = ruleType.alerts!;
|
||||
if (!processedContexts.includes(alertsDefinition.context)) {
|
||||
// Generate schema for this context
|
||||
const name = contextToSchemaName(alertsDefinition.context);
|
||||
|
||||
createSchemaFromFieldMap({
|
||||
outputFile: `schemas/generated/${alertsDefinition.context.replaceAll(
|
||||
'.',
|
||||
'_'
|
||||
)}_schema.ts`,
|
||||
fieldMap: alertsDefinition.mappings.fieldMap,
|
||||
schemaPrefix: name,
|
||||
useAlert: true,
|
||||
useEcs: alertsDefinition.useEcs ?? false,
|
||||
useLegacyAlerts: alertsDefinition.useLegacyAlerts ?? false,
|
||||
});
|
||||
processedContexts.push(alertsDefinition.context);
|
||||
}
|
||||
});
|
||||
|
||||
const { stdout } = await execa('git', ['ls-files', '--modified']);
|
||||
expect(stdout).not.to.contain('packages/kbn-alerts-as-data-utils/src/schemas/generated');
|
||||
});
|
||||
});
|
||||
}
|
|
@ -27,6 +27,7 @@ export default function alertingTests({ loadTestFile, getService }: FtrProviderC
|
|||
loadTestFile(require.resolve('./flapping_history'));
|
||||
loadTestFile(require.resolve('./check_registered_rule_types'));
|
||||
loadTestFile(require.resolve('./alerts_as_data'));
|
||||
loadTestFile(require.resolve('./generate_alert_schemas'));
|
||||
// Do not place test files here, due to https://github.com/elastic/kibana/issues/123059
|
||||
|
||||
// note that this test will destroy existing spaces
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue