[ML] Adding support for saved object based ml modules (#92855)

* [ML] Adding support for saved object based ml modules

* updating icon mapping

* cleaning up code

* missed private variable

* removing mappings json file

* renaming module id

* updating test

* removing unrelated file

* type clean up

* changing logo type

* changes based on review

* removing fleet changes

* updating type guards

* fixing list module return type

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
James Gowdy 2021-03-11 14:08:48 +00:00 committed by GitHub
parent 33fbe74e4e
commit dfb4eac520
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 319 additions and 182 deletions

View file

@ -16,6 +16,7 @@ export interface ModuleJob {
export interface ModuleDatafeed {
id: string;
job_id: string;
config: Omit<Datafeed, 'datafeed_id'>;
}
@ -48,7 +49,8 @@ export interface Module {
title: string;
description: string;
type: string;
logoFile: string;
logoFile?: string;
logo?: Logo;
defaultIndexPattern: string;
query: any;
jobs: ModuleJob[];
@ -56,6 +58,18 @@ export interface Module {
kibana: KibanaObjects;
}
export interface FileBasedModule extends Omit<Module, 'jobs' | 'datafeeds' | 'kibana'> {
jobs: Array<{ file: string; id: string }>;
datafeeds: Array<{ file: string; job_id: string; id: string }>;
kibana: {
search: Array<{ file: string; id: string }>;
visualization: Array<{ file: string; id: string }>;
dashboard: Array<{ file: string; id: string }>;
};
}
export type Logo = { icon: string } | null;
export interface ResultItem {
id: string;
success?: boolean;

View file

@ -7,6 +7,7 @@
export type JobType = 'anomaly-detector' | 'data-frame-analytics';
export const ML_SAVED_OBJECT_TYPE = 'ml-job';
export const ML_MODULE_SAVED_OBJECT_TYPE = 'ml-module';
export interface SavedObjectResult {
[jobId: string]: { success: boolean; error?: any };

View file

@ -9,6 +9,7 @@ import fs from 'fs';
import Boom from '@hapi/boom';
import numeral from '@elastic/numeral';
import { KibanaRequest, IScopedClusterClient, SavedObjectsClientContract } from 'kibana/server';
import moment from 'moment';
import { IndexPatternAttributes } from 'src/plugins/data/server';
import { merge } from 'lodash';
@ -16,12 +17,15 @@ import { AnalysisLimits } from '../../../common/types/anomaly_detection_jobs';
import { getAuthorizationHeader } from '../../lib/request_authorization';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
import type { MlClient } from '../../lib/ml_client';
import { ML_MODULE_SAVED_OBJECT_TYPE } from '../../../common/types/saved_objects';
import {
KibanaObjects,
KibanaObjectConfig,
ModuleDatafeed,
ModuleJob,
Module,
FileBasedModule,
Logo,
JobOverride,
DatafeedOverride,
GeneralJobsOverride,
@ -45,7 +49,10 @@ import { jobServiceProvider } from '../job_service';
import { resultsServiceProvider } from '../results_service';
import { JobExistResult, JobStat } from '../../../common/types/data_recognizer';
import { MlJobsStatsResponse } from '../../../common/types/job_service';
import { Datafeed } from '../../../common/types/anomaly_detection_jobs';
import { JobSavedObjectService } from '../../saved_objects';
import { isDefined } from '../../../common/types/guards';
import { isPopulatedObject } from '../../../common/util/object_utils';
const ML_DIR = 'ml';
const KIBANA_DIR = 'kibana';
@ -57,26 +64,18 @@ export const SAVED_OBJECT_TYPES = {
VISUALIZATION: 'visualization',
};
interface RawModuleConfig {
id: string;
title: string;
description: string;
type: string;
logoFile: string;
defaultIndexPattern: string;
query: any;
jobs: Array<{ file: string; id: string }>;
datafeeds: Array<{ file: string; job_id: string; id: string }>;
kibana: {
search: Array<{ file: string; id: string }>;
visualization: Array<{ file: string; id: string }>;
dashboard: Array<{ file: string; id: string }>;
};
function isModule(arg: unknown): arg is Module {
return isPopulatedObject(arg) && Array.isArray(arg.jobs) && arg.jobs[0]?.config !== undefined;
}
function isFileBasedModule(arg: unknown): arg is FileBasedModule {
return isPopulatedObject(arg) && Array.isArray(arg.jobs) && arg.jobs[0]?.file !== undefined;
}
interface Config {
dirName: any;
json: RawModuleConfig;
dirName?: string;
module: FileBasedModule | Module;
isSavedObject: boolean;
}
export interface RecognizeResult {
@ -84,7 +83,7 @@ export interface RecognizeResult {
title: string;
query: any;
description: string;
logo: { icon: string } | null;
logo: Logo;
}
interface ObjectExistResult {
@ -125,7 +124,7 @@ export class DataRecognizer {
/**
* List of the module jobs that require model memory estimation
*/
jobsForModelMemoryEstimation: Array<{ job: ModuleJob; query: any }> = [];
private _jobsForModelMemoryEstimation: Array<{ job: ModuleJob; query: any }> = [];
constructor(
mlClusterClient: IScopedClusterClient,
@ -146,7 +145,7 @@ export class DataRecognizer {
}
// list all directories under the given directory
async listDirs(dirName: string): Promise<string[]> {
private async _listDirs(dirName: string): Promise<string[]> {
const dirs: string[] = [];
return new Promise((resolve, reject) => {
fs.readdir(dirName, (err, fileNames) => {
@ -164,7 +163,7 @@ export class DataRecognizer {
});
}
async readFile(fileName: string): Promise<string> {
private async _readFile(fileName: string): Promise<string> {
return new Promise((resolve, reject) => {
fs.readFile(fileName, 'utf-8', (err, content) => {
if (err) {
@ -176,14 +175,14 @@ export class DataRecognizer {
});
}
async loadManifestFiles(): Promise<Config[]> {
private async _loadConfigs(): Promise<Config[]> {
const configs: Config[] = [];
const dirs = await this.listDirs(this._modulesDir);
const dirs = await this._listDirs(this._modulesDir);
await Promise.all(
dirs.map(async (dir) => {
let file: string | undefined;
try {
file = await this.readFile(`${this._modulesDir}/${dir}/manifest.json`);
file = await this._readFile(`${this._modulesDir}/${dir}/manifest.json`);
} catch (error) {
mlLog.warn(`Data recognizer skipping folder ${dir} as manifest.json cannot be read`);
}
@ -192,7 +191,8 @@ export class DataRecognizer {
try {
configs.push({
dirName: dir,
json: JSON.parse(file),
module: JSON.parse(file),
isSavedObject: false,
});
} catch (error) {
mlLog.warn(`Data recognizer error parsing ${dir}/manifest.json. ${error}`);
@ -201,26 +201,40 @@ export class DataRecognizer {
})
);
return configs;
const savedObjectConfigs = (await this._loadSavedObjectModules()).map((module) => ({
module,
isSavedObject: true,
}));
return [...configs, ...savedObjectConfigs];
}
private async _loadSavedObjectModules() {
const jobs = await this._savedObjectsClient.find<Module>({
type: ML_MODULE_SAVED_OBJECT_TYPE,
perPage: 10000,
});
return jobs.saved_objects.map((o) => o.attributes);
}
// get the manifest.json file for a specified id, e.g. "nginx"
async getManifestFile(id: string) {
const manifestFiles = await this.loadManifestFiles();
return manifestFiles.find((i) => i.json.id === id);
private async _findConfig(id: string) {
const configs = await this._loadConfigs();
return configs.find((i) => i.module.id === id);
}
// called externally by an endpoint
async findMatches(indexPattern: string): Promise<RecognizeResult[]> {
const manifestFiles = await this.loadManifestFiles();
public async findMatches(indexPattern: string): Promise<RecognizeResult[]> {
const manifestFiles = await this._loadConfigs();
const results: RecognizeResult[] = [];
await Promise.all(
manifestFiles.map(async (i) => {
const moduleConfig = i.json;
const moduleConfig = i.module;
let match = false;
try {
match = await this.searchForFields(moduleConfig, indexPattern);
match = await this._searchForFields(moduleConfig, indexPattern);
} catch (error) {
mlLog.warn(
`Data recognizer error running query defined for module ${moduleConfig.id}. ${error}`
@ -228,13 +242,15 @@ export class DataRecognizer {
}
if (match === true) {
let logo = null;
if (moduleConfig.logoFile) {
let logo: Logo = null;
if (moduleConfig.logo) {
logo = moduleConfig.logo;
} else if (moduleConfig.logoFile) {
try {
logo = await this.readFile(
const logoFile = await this._readFile(
`${this._modulesDir}/${i.dirName}/${moduleConfig.logoFile}`
);
logo = JSON.parse(logo);
logo = JSON.parse(logoFile);
} catch (e) {
logo = null;
}
@ -255,7 +271,7 @@ export class DataRecognizer {
return results;
}
async searchForFields(moduleConfig: RawModuleConfig, indexPattern: string) {
private async _searchForFields(moduleConfig: FileBasedModule | Module, indexPattern: string) {
if (moduleConfig.query === undefined) {
return false;
}
@ -275,29 +291,34 @@ export class DataRecognizer {
return body.hits.total.value > 0;
}
async listModules() {
const manifestFiles = await this.loadManifestFiles();
const ids = manifestFiles.map(({ json }) => json.id).sort((a, b) => a.localeCompare(b)); // sort as json files are read from disk and could be in any order.
public async listModules() {
const manifestFiles = await this._loadConfigs();
manifestFiles.sort((a, b) => a.module.id.localeCompare(b.module.id)); // sort as json files are read from disk and could be in any order.
const modules = [];
for (let i = 0; i < ids.length; i++) {
const module = await this.getModule(ids[i]);
modules.push(module);
const configs: Array<Module | FileBasedModule> = [];
for (const config of manifestFiles) {
if (config.isSavedObject) {
configs.push(config.module);
} else {
configs.push(await this.getModule(config.module.id));
}
}
return modules;
// casting return as Module[] so not to break external plugins who rely on this function
// once FileBasedModules are removed this function will only deal with Modules
return configs as Module[];
}
// called externally by an endpoint
// supplying an optional prefix will add the prefix
// to the job and datafeed configs
async getModule(id: string, prefix = ''): Promise<Module> {
let manifestJSON: RawModuleConfig | null = null;
public async getModule(id: string, prefix = ''): Promise<Module> {
let module: FileBasedModule | Module | null = null;
let dirName: string | null = null;
const manifestFile = await this.getManifestFile(id);
if (manifestFile !== undefined) {
manifestJSON = manifestFile.json;
dirName = manifestFile.dirName;
const config = await this._findConfig(id);
if (config !== undefined) {
module = config.module;
dirName = config.dirName ?? null;
} else {
throw Boom.notFound(`Module with the id "${id}" not found`);
}
@ -306,81 +327,102 @@ export class DataRecognizer {
const datafeeds: ModuleDatafeed[] = [];
const kibana: KibanaObjects = {};
// load all of the job configs
await Promise.all(
manifestJSON.jobs.map(async (job) => {
if (isModule(module)) {
const tempJobs: ModuleJob[] = module.jobs.map((j) => ({
id: `${prefix}${j.id}`,
config: j.config,
}));
jobs.push(...tempJobs);
const tempDatafeeds: ModuleDatafeed[] = module.datafeeds.map((d) => {
const jobId = `${prefix}${d.job_id}`;
return {
id: prefixDatafeedId(d.id, prefix),
job_id: jobId,
config: {
...d.config,
job_id: jobId,
},
};
});
datafeeds.push(...tempDatafeeds);
} else if (isFileBasedModule(module)) {
const tempJobs = module.jobs.map(async (job) => {
try {
const jobConfig = await this.readFile(
const jobConfig = await this._readFile(
`${this._modulesDir}/${dirName}/${ML_DIR}/${job.file}`
);
// use the file name for the id
jobs.push({
return {
id: `${prefix}${job.id}`,
config: JSON.parse(jobConfig),
});
};
} catch (error) {
mlLog.warn(
`Data recognizer error loading config for job ${job.id} for module ${id}. ${error}`
);
}
})
);
});
jobs.push(...(await Promise.all(tempJobs)).filter(isDefined));
// load all of the datafeed configs
await Promise.all(
manifestJSON.datafeeds.map(async (datafeed) => {
// load all of the datafeed configs
const tempDatafeed = module.datafeeds.map(async (datafeed) => {
try {
const datafeedConfig = await this.readFile(
const datafeedConfigString = await this._readFile(
`${this._modulesDir}/${dirName}/${ML_DIR}/${datafeed.file}`
);
const config = JSON.parse(datafeedConfig);
// use the job id from the manifestFile
config.job_id = `${prefix}${datafeed.job_id}`;
const datafeedConfig = JSON.parse(datafeedConfigString) as Datafeed;
// use the job id from the module
datafeedConfig.job_id = `${prefix}${datafeed.job_id}`;
datafeeds.push({
return {
id: prefixDatafeedId(datafeed.id, prefix),
config,
});
job_id: datafeedConfig.job_id,
config: datafeedConfig,
};
} catch (error) {
mlLog.warn(
`Data recognizer error loading config for datafeed ${datafeed.id} for module ${id}. ${error}`
);
}
})
);
});
datafeeds.push(...(await Promise.all(tempDatafeed)).filter(isDefined));
}
// load all of the kibana saved objects
if (manifestJSON.kibana !== undefined) {
const kKeys = Object.keys(manifestJSON.kibana) as Array<keyof RawModuleConfig['kibana']>;
if (module.kibana !== undefined) {
const kKeys = Object.keys(module.kibana) as Array<keyof FileBasedModule['kibana']>;
await Promise.all(
kKeys.map(async (key) => {
kibana[key] = [];
await Promise.all(
manifestJSON!.kibana[key].map(async (obj) => {
try {
const kConfig = await this.readFile(
`${this._modulesDir}/${dirName}/${KIBANA_DIR}/${key}/${obj.file}`
);
// use the file name for the id
const kId = obj.file.replace('.json', '');
const config = JSON.parse(kConfig);
kibana[key]!.push({
id: kId,
title: config.title,
config,
});
} catch (error) {
mlLog.warn(
`Data recognizer error loading config for ${key} ${obj.id} for module ${id}. ${error}`
);
}
})
);
if (isFileBasedModule(module)) {
await Promise.all(
module.kibana[key].map(async (obj) => {
try {
const kConfigString = await this._readFile(
`${this._modulesDir}/${dirName}/${KIBANA_DIR}/${key}/${obj.file}`
);
// use the file name for the id
const kId = obj.file.replace('.json', '');
const kConfig = JSON.parse(kConfigString);
kibana[key]!.push({
id: kId,
title: kConfig.title,
config: kConfig,
});
} catch (error) {
mlLog.warn(
`Data recognizer error loading config for ${key} ${obj.id} for module ${id}. ${error}`
);
}
})
);
}
})
);
}
return {
...manifestJSON,
...module,
jobs,
datafeeds,
kibana,
@ -391,7 +433,7 @@ export class DataRecognizer {
// takes a module config id, an optional jobPrefix and the request object
// creates all of the jobs, datafeeds and savedObjects listed in the module config.
// if any of the savedObjects already exist, they will not be overwritten.
async setup(
public async setup(
moduleId: string,
jobPrefix?: string,
groups?: string[],
@ -417,11 +459,11 @@ export class DataRecognizer {
this._indexPatternName =
indexPatternName === undefined ? moduleConfig.defaultIndexPattern : indexPatternName;
this._indexPatternId = await this.getIndexPatternId(this._indexPatternName);
this._indexPatternId = await this._getIndexPatternId(this._indexPatternName);
// the module's jobs contain custom URLs which require an index patten id
// but there is no corresponding index pattern, throw an error
if (this._indexPatternId === undefined && this.doJobUrlsContainIndexPatternId(moduleConfig)) {
if (this._indexPatternId === undefined && this._doJobUrlsContainIndexPatternId(moduleConfig)) {
throw Boom.badRequest(
`Module's jobs contain custom URLs which require a kibana index pattern (${this._indexPatternName}) which cannot be found.`
);
@ -431,7 +473,7 @@ export class DataRecognizer {
// but there is no corresponding index pattern, throw an error
if (
this._indexPatternId === undefined &&
this.doSavedObjectsContainIndexPatternId(moduleConfig)
this._doSavedObjectsContainIndexPatternId(moduleConfig)
) {
throw Boom.badRequest(
`Module's saved objects contain custom URLs which require a kibana index pattern (${this._indexPatternName}) which cannot be found.`
@ -439,23 +481,23 @@ export class DataRecognizer {
}
// create an empty results object
const results = this.createResultsTemplate(moduleConfig);
const results = this._createResultsTemplate(moduleConfig);
const saveResults: SaveResults = {
jobs: [] as JobResponse[],
datafeeds: [] as DatafeedResponse[],
savedObjects: [] as KibanaObjectResponse[],
};
this.jobsForModelMemoryEstimation = moduleConfig.jobs.map((job) => ({
this._jobsForModelMemoryEstimation = moduleConfig.jobs.map((job) => ({
job,
query: moduleConfig.datafeeds.find((d) => d.config.job_id === job.id)?.config.query ?? null,
}));
this.applyJobConfigOverrides(moduleConfig, jobOverrides, jobPrefix);
this.applyDatafeedConfigOverrides(moduleConfig, datafeedOverrides, jobPrefix);
this.updateDatafeedIndices(moduleConfig);
this.updateJobUrlIndexPatterns(moduleConfig);
await this.updateModelMemoryLimits(moduleConfig, estimateModelMemory, start, end);
this._updateDatafeedIndices(moduleConfig);
this._updateJobUrlIndexPatterns(moduleConfig);
await this._updateModelMemoryLimits(moduleConfig, estimateModelMemory, start, end);
// create the jobs
if (moduleConfig.jobs && moduleConfig.jobs.length) {
@ -468,7 +510,7 @@ export class DataRecognizer {
if (useDedicatedIndex === true) {
moduleConfig.jobs.forEach((job) => (job.config.results_index_name = job.id));
}
saveResults.jobs = await this.saveJobs(moduleConfig.jobs, applyToAllSpaces);
saveResults.jobs = await this._saveJobs(moduleConfig.jobs, applyToAllSpaces);
}
// create the datafeeds
@ -478,7 +520,7 @@ export class DataRecognizer {
df.config.query = query;
});
}
saveResults.datafeeds = await this.saveDatafeeds(moduleConfig.datafeeds);
saveResults.datafeeds = await this._saveDatafeeds(moduleConfig.datafeeds);
if (startDatafeed) {
const savedDatafeeds = moduleConfig.datafeeds.filter((df) => {
@ -486,7 +528,7 @@ export class DataRecognizer {
return datafeedResult !== undefined && datafeedResult.success === true;
});
const startResults = await this.startDatafeeds(savedDatafeeds, start, end);
const startResults = await this._startDatafeeds(savedDatafeeds, start, end);
saveResults.datafeeds.forEach((df) => {
const startedDatafeed = startResults[df.id];
if (startedDatafeed !== undefined) {
@ -503,26 +545,26 @@ export class DataRecognizer {
// create the savedObjects
if (moduleConfig.kibana) {
// update the saved objects with the index pattern id
this.updateSavedObjectIndexPatterns(moduleConfig);
this._updateSavedObjectIndexPatterns(moduleConfig);
const savedObjects = await this.createSavedObjectsToSave(moduleConfig);
const savedObjects = await this._createSavedObjectsToSave(moduleConfig);
// update the exists flag in the results
this.updateKibanaResults(results.kibana, savedObjects);
this._updateKibanaResults(results.kibana, savedObjects);
// create the savedObjects
try {
saveResults.savedObjects = await this.saveKibanaObjects(savedObjects);
saveResults.savedObjects = await this._saveKibanaObjects(savedObjects);
} catch (error) {
// only one error is returned for the bulk create saved object request
// so populate every saved object with the same error.
this.populateKibanaResultErrors(results.kibana, error.output?.payload);
this._populateKibanaResultErrors(results.kibana, error.output?.payload);
}
}
// merge all the save results
this.updateResults(results, saveResults);
this._updateResults(results, saveResults);
return results;
}
async dataRecognizerJobsExist(moduleId: string): Promise<JobExistResult> {
public async dataRecognizerJobsExist(moduleId: string): Promise<JobExistResult> {
const results = {} as JobExistResult;
// Load the module with the specified ID and check if the jobs
@ -573,7 +615,7 @@ export class DataRecognizer {
return results;
}
async loadIndexPatterns() {
private async _loadIndexPatterns() {
return await this._savedObjectsClient.find<IndexPatternAttributes>({
type: 'index-pattern',
perPage: 1000,
@ -581,9 +623,9 @@ export class DataRecognizer {
}
// returns a id based on an index pattern name
async getIndexPatternId(name: string) {
private async _getIndexPatternId(name: string) {
try {
const indexPatterns = await this.loadIndexPatterns();
const indexPatterns = await this._loadIndexPatterns();
if (indexPatterns === undefined || indexPatterns.saved_objects === undefined) {
return;
}
@ -598,9 +640,9 @@ export class DataRecognizer {
// create a list of objects which are used to save the savedObjects.
// each has an exists flag and those which do not already exist
// contain a savedObject object which is sent to the server to save
async createSavedObjectsToSave(moduleConfig: Module) {
private async _createSavedObjectsToSave(moduleConfig: Module) {
// first check if the saved objects already exist.
const savedObjectExistResults = await this.checkIfSavedObjectsExist(moduleConfig.kibana);
const savedObjectExistResults = await this._checkIfSavedObjectsExist(moduleConfig.kibana);
// loop through the kibanaSaveResults and update
Object.keys(moduleConfig.kibana).forEach((type) => {
// type e.g. dashboard, search ,visualization
@ -624,7 +666,7 @@ export class DataRecognizer {
}
// update the exists flags in the kibana results
updateKibanaResults(
private _updateKibanaResults(
kibanaSaveResults: DataRecognizerConfigResponse['kibana'],
objectExistResults: ObjectExistResult[]
) {
@ -640,7 +682,7 @@ export class DataRecognizer {
// add an error object to every kibana saved object,
// if it doesn't already exist.
populateKibanaResultErrors(
private _populateKibanaResultErrors(
kibanaSaveResults: DataRecognizerConfigResponse['kibana'],
error: any
) {
@ -661,11 +703,13 @@ export class DataRecognizer {
// load existing savedObjects for each type and compare to find out if
// items with the same id already exist.
// returns a flat list of objects with exists flags set
async checkIfSavedObjectsExist(kibanaObjects: KibanaObjects): Promise<ObjectExistResponse[]> {
private async _checkIfSavedObjectsExist(
kibanaObjects: KibanaObjects
): Promise<ObjectExistResponse[]> {
const types = Object.keys(kibanaObjects);
const results: ObjectExistResponse[][] = await Promise.all(
types.map(async (type) => {
const existingObjects = await this.loadExistingSavedObjects(type);
const existingObjects = await this._loadExistingSavedObjects(type);
return kibanaObjects[type]!.map((obj) => {
const existingObject = existingObjects.saved_objects.find(
(o) => o.attributes && o.attributes.title === obj.title
@ -683,13 +727,13 @@ export class DataRecognizer {
}
// find all existing savedObjects for a given type
loadExistingSavedObjects(type: string) {
private _loadExistingSavedObjects(type: string) {
// TODO: define saved object type
return this._savedObjectsClient.find<any>({ type, perPage: 1000 });
}
// save the savedObjects if they do not exist already
async saveKibanaObjects(objectExistResults: ObjectExistResponse[]) {
private async _saveKibanaObjects(objectExistResults: ObjectExistResponse[]) {
let results = { saved_objects: [] as any[] };
const filteredSavedObjects = objectExistResults
.filter((o) => o.exists === false)
@ -710,13 +754,16 @@ export class DataRecognizer {
// save the jobs.
// if any fail (e.g. it already exists), catch the error and mark the result
// as success: false
async saveJobs(jobs: ModuleJob[], applyToAllSpaces: boolean = false): Promise<JobResponse[]> {
private async _saveJobs(
jobs: ModuleJob[],
applyToAllSpaces: boolean = false
): Promise<JobResponse[]> {
const resp = await Promise.all(
jobs.map(async (job) => {
const jobId = job.id;
try {
job.id = jobId;
await this.saveJob(job);
await this._saveJob(job);
return { id: jobId, success: true };
} catch ({ body }) {
return { id: jobId, success: false, error: body };
@ -738,18 +785,18 @@ export class DataRecognizer {
return resp;
}
async saveJob(job: ModuleJob) {
private async _saveJob(job: ModuleJob) {
return this._mlClient.putJob({ job_id: job.id, body: job.config });
}
// save the datafeeds.
// if any fail (e.g. it already exists), catch the error and mark the result
// as success: false
async saveDatafeeds(datafeeds: ModuleDatafeed[]) {
private async _saveDatafeeds(datafeeds: ModuleDatafeed[]) {
return await Promise.all(
datafeeds.map(async (datafeed) => {
try {
await this.saveDatafeed(datafeed);
await this._saveDatafeed(datafeed);
return {
id: datafeed.id,
success: true,
@ -769,7 +816,7 @@ export class DataRecognizer {
);
}
async saveDatafeed(datafeed: ModuleDatafeed) {
private async _saveDatafeed(datafeed: ModuleDatafeed) {
return this._mlClient.putDatafeed(
{
datafeed_id: datafeed.id,
@ -779,19 +826,19 @@ export class DataRecognizer {
);
}
async startDatafeeds(
private async _startDatafeeds(
datafeeds: ModuleDatafeed[],
start?: number,
end?: number
): Promise<{ [key: string]: DatafeedResponse }> {
const results = {} as { [key: string]: DatafeedResponse };
for (const datafeed of datafeeds) {
results[datafeed.id] = await this.startDatafeed(datafeed, start, end);
results[datafeed.id] = await this._startDatafeed(datafeed, start, end);
}
return results;
}
async startDatafeed(
private async _startDatafeed(
datafeed: ModuleDatafeed,
start: number | undefined,
end: number | undefined
@ -845,7 +892,7 @@ export class DataRecognizer {
// merge all of the save results into one result object
// which is returned from the endpoint
async updateResults(results: DataRecognizerConfigResponse, saveResults: SaveResults) {
private async _updateResults(results: DataRecognizerConfigResponse, saveResults: SaveResults) {
// update job results
results.jobs.forEach((j) => {
saveResults.jobs.forEach((j2) => {
@ -894,7 +941,7 @@ export class DataRecognizer {
// creates an empty results object,
// listing each job/datafeed/savedObject with a save success boolean
createResultsTemplate(moduleConfig: Module): DataRecognizerConfigResponse {
private _createResultsTemplate(moduleConfig: Module): DataRecognizerConfigResponse {
const results: DataRecognizerConfigResponse = {} as DataRecognizerConfigResponse;
const reducedConfig = {
jobs: moduleConfig.jobs,
@ -932,7 +979,7 @@ export class DataRecognizer {
// if an override index pattern has been specified,
// update all of the datafeeds.
updateDatafeedIndices(moduleConfig: Module) {
private _updateDatafeedIndices(moduleConfig: Module) {
// if the supplied index pattern contains a comma, split into multiple indices and
// add each one to the datafeed
const indexPatternNames = splitIndexPatternNames(this._indexPatternName);
@ -962,7 +1009,7 @@ export class DataRecognizer {
// loop through the custom urls in each job and replace the INDEX_PATTERN_ID
// marker for the id of the specified index pattern
updateJobUrlIndexPatterns(moduleConfig: Module) {
private _updateJobUrlIndexPatterns(moduleConfig: Module) {
if (Array.isArray(moduleConfig.jobs)) {
moduleConfig.jobs.forEach((job) => {
// if the job has custom_urls
@ -986,7 +1033,7 @@ export class DataRecognizer {
// check the custom urls in the module's jobs to see if they contain INDEX_PATTERN_ID
// which needs replacement
doJobUrlsContainIndexPatternId(moduleConfig: Module) {
private _doJobUrlsContainIndexPatternId(moduleConfig: Module) {
if (Array.isArray(moduleConfig.jobs)) {
for (const job of moduleConfig.jobs) {
// if the job has custom_urls
@ -1004,7 +1051,7 @@ export class DataRecognizer {
// loop through each kibana saved object and replace any INDEX_PATTERN_ID and
// INDEX_PATTERN_NAME markers for the id or name of the specified index pattern
updateSavedObjectIndexPatterns(moduleConfig: Module) {
private _updateSavedObjectIndexPatterns(moduleConfig: Module) {
if (moduleConfig.kibana) {
Object.keys(moduleConfig.kibana).forEach((category) => {
moduleConfig.kibana[category]!.forEach((item) => {
@ -1037,7 +1084,7 @@ export class DataRecognizer {
/**
* Provides a time range of the last 3 months of data
*/
async getFallbackTimeRange(
private async _getFallbackTimeRange(
timeField: string,
query?: any
): Promise<{ start: number; end: number }> {
@ -1059,7 +1106,7 @@ export class DataRecognizer {
* Ensure the model memory limit for each job is not greater than
* the max model memory setting for the cluster
*/
async updateModelMemoryLimits(
private async _updateModelMemoryLimits(
moduleConfig: Module,
estimateMML: boolean,
start?: number,
@ -1069,12 +1116,12 @@ export class DataRecognizer {
return;
}
if (estimateMML && this.jobsForModelMemoryEstimation.length > 0) {
if (estimateMML && this._jobsForModelMemoryEstimation.length > 0) {
try {
// Checks if all jobs in the module have the same time field configured
const firstJobTimeField = this.jobsForModelMemoryEstimation[0].job.config.data_description
const firstJobTimeField = this._jobsForModelMemoryEstimation[0].job.config.data_description
.time_field;
const isSameTimeFields = this.jobsForModelMemoryEstimation.every(
const isSameTimeFields = this._jobsForModelMemoryEstimation.every(
({ job }) => job.config.data_description.time_field === firstJobTimeField
);
@ -1085,16 +1132,16 @@ export class DataRecognizer {
const {
start: fallbackStart,
end: fallbackEnd,
} = await this.getFallbackTimeRange(firstJobTimeField, { match_all: {} });
} = await this._getFallbackTimeRange(firstJobTimeField, { match_all: {} });
start = fallbackStart;
end = fallbackEnd;
}
for (const { job, query } of this.jobsForModelMemoryEstimation) {
for (const { job, query } of this._jobsForModelMemoryEstimation) {
let earliestMs = start;
let latestMs = end;
if (earliestMs === undefined || latestMs === undefined) {
const timeFieldRange = await this.getFallbackTimeRange(
const timeFieldRange = await this._getFallbackTimeRange(
job.config.data_description.time_field,
query
);
@ -1157,7 +1204,7 @@ export class DataRecognizer {
// check the kibana saved searches JSON in the module to see if they contain INDEX_PATTERN_ID
// which needs replacement
doSavedObjectsContainIndexPatternId(moduleConfig: Module) {
private _doSavedObjectsContainIndexPatternId(moduleConfig: Module) {
if (moduleConfig.kibana) {
for (const category of Object.keys(moduleConfig.kibana)) {
for (const item of moduleConfig.kibana[category]!) {
@ -1171,7 +1218,7 @@ export class DataRecognizer {
return false;
}
applyJobConfigOverrides(
public applyJobConfigOverrides(
moduleConfig: Module,
jobOverrides?: JobOverride | JobOverride[],
jobPrefix = ''
@ -1205,9 +1252,9 @@ export class DataRecognizer {
});
if (generalOverrides.some((override) => !!override.analysis_limits?.model_memory_limit)) {
this.jobsForModelMemoryEstimation = [];
this._jobsForModelMemoryEstimation = [];
} else {
this.jobsForModelMemoryEstimation = moduleConfig.jobs
this._jobsForModelMemoryEstimation = moduleConfig.jobs
.filter((job) => {
const override = jobSpecificOverrides.find((o) => `${jobPrefix}${o.job_id}` === job.id);
return override?.analysis_limits?.model_memory_limit === undefined;
@ -1266,7 +1313,7 @@ export class DataRecognizer {
});
}
applyDatafeedConfigOverrides(
public applyDatafeedConfigOverrides(
moduleConfig: Module,
datafeedOverrides?: DatafeedOverride | DatafeedOverride[],
jobPrefix = ''

View file

@ -1,25 +0,0 @@
{
"job": {
"properties": {
"job_id": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"datafeed_id": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"type": {
"type": "keyword"
}
}
}
}

View file

@ -0,0 +1,90 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { SavedObjectsTypeMappingDefinition } from 'kibana/server';
export const mlJob: SavedObjectsTypeMappingDefinition = {
properties: {
job_id: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
datafeed_id: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
type: {
type: 'keyword',
},
},
};
export const mlModule: SavedObjectsTypeMappingDefinition = {
dynamic: false,
properties: {
id: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
title: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
description: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
type: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
logo: {
type: 'object',
},
defaultIndexPattern: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
},
},
},
query: {
type: 'object',
},
jobs: {
type: 'object',
},
datafeeds: {
type: 'object',
},
},
};

View file

@ -6,10 +6,13 @@
*/
import { SavedObjectsServiceSetup } from 'kibana/server';
import mappings from './mappings.json';
import { mlJob, mlModule } from './mappings';
import { migrations } from './migrations';
import { ML_SAVED_OBJECT_TYPE } from '../../common/types/saved_objects';
import {
ML_SAVED_OBJECT_TYPE,
ML_MODULE_SAVED_OBJECT_TYPE,
} from '../../common/types/saved_objects';
export function setupSavedObjects(savedObjects: SavedObjectsServiceSetup) {
savedObjects.registerType({
@ -17,6 +20,13 @@ export function setupSavedObjects(savedObjects: SavedObjectsServiceSetup) {
hidden: false,
namespaceType: 'multiple',
migrations,
mappings: mappings.job,
mappings: mlJob,
});
savedObjects.registerType({
name: ML_MODULE_SAVED_OBJECT_TYPE,
hidden: false,
namespaceType: 'agnostic',
migrations,
mappings: mlModule,
});
}