[ML] Adding space aware jobs (#77916)

* [ML] Adding space aware jobs

* adding mlClient

* switching to type includes

* adding additional job checks

* fixing conflict

* adding dfa checks

* refactoring jobs in spaces checks

* filtering calendars

* adding initial job object status and repair endpoints

* enabling repair endpoint

* fixing listed jobs in status

* adding datafeed repair

* updating shared services

* adding results job id check

* fixing conflicts

* don't remove SO on delete

* fixing non-ml plugins

* filtering job audit messages

* fixing types

* fixing tests

* adding job ids wildcard support

* removing empty migration test

* fixing tests and disabling spaces test user

* adding saved objects all permission

* fixing calendars

* updating job 404

* updating job wildcard search

* renaming services

* fixing conflicts

* fixing log tests

* disabling apm test

* skipping more apm tests

* optimzing repair

* fixing types

* updating apm test archive to include ML saved objects

* enabling disabled test

* removing comment

* adding space assigning endpoints

* adding saved object default permissions

* removing commented code

* loading all jobs for all spaces for status check

* adding spaces list endpoint

* adding job spaces to management page

* adding trained model fltering

* fixing trained model id check and job wildcard check

* fixing types

* fixing bug when adding new job to calendar

* changes based on review

* updating schema

* changes based on review

* fixing types

* rolling back http service injection

* fixing http service injection

* adding errrors to repair endpoint response

* updating api doc

* improving types

* disabling id check on ad get endpoints

* fixing tests

* fixing group requests

* adding comments

* using filter in saved object search

* fixing fake request issue

* removing console log

* making job saved object hidden

* removing acccidentally included file

* renaming saved object client

* updating apidoc

* unhiding ml saved objects

* moving route guard

* improving error when SOC is null

* fixing types after merge with master

* fixing tests

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
James Gowdy 2020-11-03 15:34:35 +00:00 committed by GitHub
parent 2db4244efc
commit a0fded52eb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
133 changed files with 6127 additions and 1416 deletions

View file

@ -73,8 +73,14 @@ export function registerTransactionDurationAnomalyAlertType({
}
const alertParams = params;
const request = {} as KibanaRequest;
const { mlAnomalySearch } = ml.mlSystemProvider(request);
const anomalyDetectors = ml.anomalyDetectorsProvider(request);
const { mlAnomalySearch } = ml.mlSystemProvider(
request,
services.savedObjectsClient
);
const anomalyDetectors = ml.anomalyDetectorsProvider(
request,
services.savedObjectsClient
);
const mlJobs = await getMLJobs(anomalyDetectors, alertParams.environment);
@ -94,6 +100,7 @@ export function registerTransactionDurationAnomalyAlertType({
return {};
}
const jobIds = mlJobs.map((job) => job.job_id);
const anomalySearchParams = {
terminateAfter: 1,
body: {
@ -102,7 +109,7 @@ export function registerTransactionDurationAnomalyAlertType({
bool: {
filter: [
{ term: { result_type: 'record' } },
{ terms: { job_id: mlJobs.map((job) => job.job_id) } },
{ terms: { job_id: jobIds } },
{
range: {
timestamp: {
@ -163,7 +170,8 @@ export function registerTransactionDurationAnomalyAlertType({
};
const response = ((await mlAnomalySearch(
anomalySearchParams
anomalySearchParams,
jobIds
)) as unknown) as {
hits: { total: { value: number } };
aggregations?: {

View file

@ -123,8 +123,8 @@ function getMlSetup(
request: KibanaRequest
) {
return {
mlSystem: ml.mlSystemProvider(request),
anomalyDetectors: ml.anomalyDetectorsProvider(request),
mlSystem: ml.mlSystemProvider(request, savedObjectsClient),
anomalyDetectors: ml.anomalyDetectorsProvider(request, savedObjectsClient),
modules: ml.modulesProvider(request, savedObjectsClient),
};
}

View file

@ -104,7 +104,7 @@ export async function getServiceAnomalies({
},
};
const response = await ml.mlSystem.mlAnomalySearch(params);
const response = await ml.mlSystem.mlAnomalySearch(params, mlJobIds);
return {
mlJobIds,

View file

@ -81,7 +81,7 @@ export async function anomalySeriesFetcher({
const response: ESSearchResponse<
unknown,
typeof params
> = (await ml.mlSystem.mlAnomalySearch(params)) as any;
> = (await ml.mlSystem.mlAnomalySearch(params, [jobId])) as any;
return response;
} catch (err) {

View file

@ -49,7 +49,7 @@ export async function getMlBucketSize({
};
try {
const resp = await ml.mlSystem.mlAnomalySearch<ESResponse>(params);
const resp = await ml.mlSystem.mlAnomalySearch<ESResponse>(params, [jobId]);
return resp.hits.hits[0]?._source.bucket_span;
} catch (err) {
const isHttpError = 'statusCode' in err;

View file

@ -62,7 +62,8 @@ export async function getLogEntryDatasets(
endTime,
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey
)
),
jobIds
)
);

View file

@ -177,7 +177,8 @@ async function fetchMetricsHostsAnomalies(
const results = decodeOrThrow(metricsHostsAnomaliesResponseRT)(
await mlSystem.mlAnomalySearch(
createMetricsHostsAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
createMetricsHostsAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination),
jobIds
)
);

View file

@ -173,7 +173,8 @@ async function fetchMetricK8sAnomalies(
const results = decodeOrThrow(metricsK8sAnomaliesResponseRT)(
await mlSystem.mlAnomalySearch(
createMetricsK8sAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
createMetricsK8sAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination),
jobIds
)
);

View file

@ -62,7 +62,8 @@ export async function getLogEntryDatasets(
endTime,
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey
)
),
jobIds
)
);

View file

@ -223,7 +223,8 @@ async function fetchLogEntryAnomalies(
const results = decodeOrThrow(logEntryAnomaliesResponseRT)(
await mlSystem.mlAnomalySearch(
createLogEntryAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination, datasets)
createLogEntryAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination, datasets),
jobIds
)
);

View file

@ -226,7 +226,8 @@ async function fetchTopLogEntryCategories(
endTime,
categoryCount,
datasets
)
),
[logEntryCategoriesCountJobId]
)
);
@ -284,7 +285,8 @@ export async function fetchLogEntryCategories(
const logEntryCategoriesResponse = decodeOrThrow(logEntryCategoriesResponseRT)(
await context.infra.mlSystem.mlAnomalySearch(
createLogEntryCategoriesQuery(logEntryCategoriesCountJobId, categoryIds)
createLogEntryCategoriesQuery(logEntryCategoriesCountJobId, categoryIds),
[logEntryCategoriesCountJobId]
)
);
@ -333,7 +335,8 @@ async function fetchTopLogEntryCategoryHistograms(
startTime,
endTime,
bucketCount
)
),
[logEntryCategoriesCountJobId]
)
.then(decodeOrThrow(logEntryCategoryHistogramsResponseRT))
.then((response) => ({

View file

@ -40,7 +40,8 @@ export async function getLatestLogEntriesCategoriesDatasetsStats(
endTime,
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey
)
),
jobIds
);
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =

View file

@ -43,7 +43,8 @@ export async function getLogEntryRateBuckets(
COMPOSITE_AGGREGATION_BATCH_SIZE,
afterLatestBatchKey,
datasets
)
),
[logRateJobId]
);
const { after_key: afterKey, buckets: latestBatchBuckets = [] } =

View file

@ -149,8 +149,11 @@ export class InfraServerPlugin {
core.http.registerRouteHandlerContext(
'infra',
(context, request): InfraRequestHandlerContext => {
const mlSystem = plugins.ml?.mlSystemProvider(request);
const mlAnomalyDetectors = plugins.ml?.anomalyDetectorsProvider(request);
const mlSystem = plugins.ml?.mlSystemProvider(request, context.core.savedObjects.client);
const mlAnomalyDetectors = plugins.ml?.anomalyDetectorsProvider(
request,
context.core.savedObjects.client
);
const spaceId = plugins.spaces?.spacesService.getSpaceId(request) || 'default';
return {

View file

@ -11,6 +11,7 @@ export interface Calendar {
description: string;
events: any[];
job_ids: string[];
total_job_count?: number;
}
export interface UpdateCalendar extends Calendar {

View file

@ -78,7 +78,7 @@ export function getPluginPrivileges() {
const adminMlCapabilitiesKeys = Object.keys(adminMlCapabilities);
const allMlCapabilitiesKeys = [...adminMlCapabilitiesKeys, ...userMlCapabilitiesKeys];
// TODO: include ML in base privileges for the `8.0` release: https://github.com/elastic/kibana/issues/71422
const savedObjects = ['index-pattern', 'dashboard', 'search', 'visualization'];
const savedObjects = ['index-pattern', 'dashboard', 'search', 'visualization', 'ml-job'];
const privilege = {
app: [PLUGIN_ID, 'kibana'],
excludeFromBasePrivileges: true,
@ -116,7 +116,7 @@ export function getPluginPrivileges() {
catalogue: [],
savedObject: {
all: [],
read: [],
read: ['ml-job'],
},
api: apmUserMlCapabilitiesKeys.map((k) => `ml:${k}`),
ui: apmUserMlCapabilitiesKeys,

View file

@ -0,0 +1,7 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export type JobType = 'anomaly-detector' | 'data-frame-analytics';

View file

@ -4,4 +4,4 @@
* you may not use this file except in compliance with the Elastic License.
*/
export { MlServerLicense } from './ml_server_license';
export { JobSpacesList } from './job_spaces_list';

View file

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { FC } from 'react';
import { EuiFlexGroup, EuiFlexItem, EuiBadge } from '@elastic/eui';
interface Props {
spaces: string[];
}
export const JobSpacesList: FC<Props> = ({ spaces }) => (
<EuiFlexGroup wrap responsive={false} gutterSize="xs">
{spaces.map((space) => (
<EuiFlexItem grow={false} key={space}>
<EuiBadge color={'hollow'}>{space}</EuiBadge>
</EuiFlexItem>
))}
</EuiFlexGroup>
);

View file

@ -116,7 +116,8 @@ export const DataFrameAnalyticsList: FC<Props> = ({
setAnalyticsStats,
setErrorMessage,
setIsInitialized,
blockRefresh
blockRefresh,
isManagementTable
);
const updateFilteredItems = (queryClauses: any) => {

View file

@ -112,6 +112,7 @@ export interface DataFrameAnalyticsListRow {
mode: string;
state: DataFrameAnalyticsStats['state'];
stats: DataFrameAnalyticsStats;
spaces?: string[];
}
// Used to pass on attribute names to table columns

View file

@ -32,6 +32,7 @@ import {
import { useActions } from './use_actions';
import { useMlLink } from '../../../../../contexts/kibana';
import { ML_PAGES } from '../../../../../../../common/constants/ml_url_generator';
import { JobSpacesList } from '../../../../../components/job_spaces_list';
enum TASK_STATE_COLOR {
analyzing = 'primary',
@ -278,7 +279,8 @@ export const useColumns = (
name: i18n.translate('xpack.ml.jobsList.analyticsSpacesLabel', {
defaultMessage: 'Spaces',
}),
render: () => <EuiBadge color={'hollow'}>{'all'}</EuiBadge>,
render: (item: DataFrameAnalyticsListRow) =>
Array.isArray(item.spaces) ? <JobSpacesList spaces={item.spaces} /> : null,
width: '75px',
});

View file

@ -106,7 +106,8 @@ export const getAnalyticsFactory = (
React.SetStateAction<GetDataFrameAnalyticsStatsResponseError | undefined>
>,
setIsInitialized: React.Dispatch<React.SetStateAction<boolean>>,
blockRefresh: boolean
blockRefresh: boolean,
isManagementTable: boolean
): GetAnalytics => {
let concurrentLoads = 0;
@ -123,6 +124,12 @@ export const getAnalyticsFactory = (
const analyticsConfigs = await ml.dataFrameAnalytics.getDataFrameAnalytics();
const analyticsStats = await ml.dataFrameAnalytics.getDataFrameAnalyticsStats();
let spaces: { [id: string]: string[] } = {};
if (isManagementTable) {
const allSpaces = await ml.savedObjects.jobsSpaces();
spaces = allSpaces['data-frame-analytics'];
}
const analyticsStatsResult = isGetDataFrameAnalyticsStatsResponseOk(analyticsStats)
? getAnalyticsJobsStats(analyticsStats)
: undefined;
@ -148,6 +155,7 @@ export const getAnalyticsFactory = (
mode: DATA_FRAME_MODE.BATCH,
state: stats.state,
stats,
spaces: spaces[config.id] ?? [],
});
return reducedtableRows;
},

View file

@ -296,9 +296,12 @@ export function getTestUrl(job, customUrl) {
return new Promise((resolve, reject) => {
ml.results
.anomalySearch({
body,
})
.anomalySearch(
{
body,
},
[job.job_id]
)
.then((resp) => {
if (resp.hits.total.value > 0) {
const record = resp.hits.hits[0]._source;

View file

@ -40,7 +40,6 @@ export class JobDetailsUI extends Component {
}
render() {
console.log('this.props', this.props);
const { job } = this.state;
const {
services: {

View file

@ -14,9 +14,10 @@ import { toLocaleString } from '../../../../util/string_utils';
import { ResultLinks, actionsMenuContent } from '../job_actions';
import { JobDescription } from './job_description';
import { JobIcon } from '../../../../components/job_message_icon';
import { JobSpacesList } from '../../../../components/job_spaces_list';
import { TIME_FORMAT } from '../../../../../../common/constants/time_format';
import { EuiBadge, EuiBasicTable, EuiButtonIcon, EuiScreenReaderOnly } from '@elastic/eui';
import { EuiBasicTable, EuiButtonIcon, EuiScreenReaderOnly } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import { AnomalyDetectionJobIdLink } from './job_id_link';
@ -251,7 +252,7 @@ export class JobsList extends Component {
name: i18n.translate('xpack.ml.jobsList.spacesLabel', {
defaultMessage: 'Spaces',
}),
render: () => <EuiBadge color={'hollow'}>{'all'}</EuiBadge>,
render: (item) => <JobSpacesList spaces={item.spaces} />,
});
// Remove actions if Ml not enabled in current space
if (this.props.isMlEnabledInSpace === false) {

View file

@ -17,6 +17,7 @@ import {
EuiSpacer,
EuiTitle,
} from '@elastic/eui';
import { isEqual } from 'lodash';
import { ml } from '../../../../services/ml_api_service';
import { checkForAutoStartDatafeed, filterJobs, loadFullJob } from '../utils';
@ -34,7 +35,6 @@ import { NodeAvailableWarning } from '../../../../components/node_available_warn
import { DatePickerWrapper } from '../../../../components/navigation_menu/date_picker_wrapper';
import { UpgradeWarning } from '../../../../components/upgrade';
import { RefreshJobsListButton } from '../refresh_jobs_list_button';
import { isEqual } from 'lodash';
import { DELETING_JOBS_REFRESH_INTERVAL_MS } from '../../../../../../common/constants/jobs_list';
@ -246,6 +246,12 @@ export class JobsListView extends Component {
const expandedJobsIds = Object.keys(this.state.itemIdToExpandedRowMap);
try {
let spaces = {};
if (this.props.isManagementTable) {
const allSpaces = await ml.savedObjects.jobsSpaces();
spaces = allSpaces['anomaly-detector'];
}
const jobs = await ml.jobs.jobsSummary(expandedJobsIds);
const fullJobsList = {};
const jobsSummaryList = jobs.map((job) => {
@ -254,6 +260,10 @@ export class JobsListView extends Component {
delete job.fullJob;
}
job.latestTimestampSortValue = job.latestTimestampMs || 0;
job.spaces =
this.props.isManagementTable && spaces && spaces[job.id] !== undefined
? spaces[job.id]
: [];
return job;
});
const filteredJobsSummaryList = filterJobs(jobsSummaryList, this.state.filterClauses);

View file

@ -53,68 +53,70 @@ export function getScoresByRecord(
}
ml.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
},
},
{
bool: {
must: [
{
range: {
timestamp: {
gte: earliestMs,
lte: latestMs,
format: 'epoch_millis',
},
},
},
{
query_string: {
query: jobIdFilterStr,
},
},
],
},
},
],
},
},
aggs: {
detector_index: {
terms: {
field: 'detector_index',
order: {
recordScore: 'desc',
},
},
aggs: {
recordScore: {
max: {
field: 'record_score',
},
},
byTime: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
extended_bounds: {
min: earliestMs,
max: latestMs,
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
},
},
aggs: {
recordScore: {
max: {
field: 'record_score',
{
bool: {
must: [
{
range: {
timestamp: {
gte: earliestMs,
lte: latestMs,
format: 'epoch_millis',
},
},
},
{
query_string: {
query: jobIdFilterStr,
},
},
],
},
},
],
},
},
aggs: {
detector_index: {
terms: {
field: 'detector_index',
order: {
recordScore: 'desc',
},
},
aggs: {
recordScore: {
max: {
field: 'record_score',
},
},
byTime: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
extended_bounds: {
min: earliestMs,
max: latestMs,
},
},
aggs: {
recordScore: {
max: {
field: 'record_score',
},
},
},
},
@ -123,7 +125,8 @@ export function getScoresByRecord(
},
},
},
})
[jobId]
)
.then((resp: any) => {
const detectorsByIndex = get(resp, ['aggregations', 'detector_index', 'buckets'], []);
detectorsByIndex.forEach((dtr: any) => {

View file

@ -52,6 +52,7 @@ export const AnalyticsPanel: FC<Props> = ({ jobCreationDisabled }) => {
setAnalyticsStats,
setErrorMessage,
setIsInitialized,
false,
false
);

View file

@ -48,17 +48,20 @@ function getForecastsSummary(job, query, earliestMs, maxResults) {
}
ml.results
.anomalySearch({
size: maxResults,
body: {
query: {
bool: {
filter: filterCriteria,
.anomalySearch(
{
size: maxResults,
body: {
query: {
bool: {
filter: filterCriteria,
},
},
sort: [{ forecast_create_timestamp: { order: 'desc' } }],
},
sort: [{ forecast_create_timestamp: { order: 'desc' } }],
},
})
[job.job_id]
)
.then((resp) => {
if (resp.hits.total.value > 0) {
obj.forecasts = resp.hits.hits.map((hit) => hit._source);
@ -105,28 +108,31 @@ function getForecastDateRange(job, forecastId) {
// once forecasting with these parameters is supported.
ml.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: filterCriteria,
},
},
aggs: {
earliest: {
min: {
field: 'timestamp',
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: filterCriteria,
},
},
latest: {
max: {
field: 'timestamp',
aggs: {
earliest: {
min: {
field: 'timestamp',
},
},
latest: {
max: {
field: 'timestamp',
},
},
},
},
},
})
[job.job_id]
)
.then((resp) => {
obj.earliest = get(resp, 'aggregations.earliest.value', null);
obj.latest = get(resp, 'aggregations.latest.value', null);
@ -242,42 +248,45 @@ function getForecastData(
};
return ml.results
.anomalySearch$({
size: 0,
body: {
query: {
bool: {
filter: filterCriteria,
},
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
.anomalySearch$(
{
size: 0,
body: {
query: {
bool: {
filter: filterCriteria,
},
aggs: {
prediction: {
[forecastAggs.avg]: {
field: 'forecast_prediction',
},
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
},
forecastUpper: {
[forecastAggs.max]: {
field: 'forecast_upper',
aggs: {
prediction: {
[forecastAggs.avg]: {
field: 'forecast_prediction',
},
},
},
forecastLower: {
[forecastAggs.min]: {
field: 'forecast_lower',
forecastUpper: {
[forecastAggs.max]: {
field: 'forecast_upper',
},
},
forecastLower: {
[forecastAggs.min]: {
field: 'forecast_lower',
},
},
},
},
},
},
},
})
[job.job_id]
)
.pipe(
map((resp) => {
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
@ -341,16 +350,19 @@ function getForecastRequestStats(job, forecastId) {
];
ml.results
.anomalySearch({
size: 1,
body: {
query: {
bool: {
filter: filterCriteria,
.anomalySearch(
{
size: 1,
body: {
query: {
bool: {
filter: filterCriteria,
},
},
},
},
})
[job.job_id]
)
.then((resp) => {
if (resp.hits.total.value > 0) {
obj.stats = resp.hits.hits[0]._source;

View file

@ -14,6 +14,7 @@ import { filters } from './filters';
import { resultsApiProvider } from './results';
import { jobsApiProvider } from './jobs';
import { fileDatavisualizer } from './datavisualizer';
import { savedObjectsApiProvider } from './saved_objects';
import { MlServerDefaults, MlServerLimits } from '../../../../common/types/ml_server_info';
import { MlCapabilitiesResponse } from '../../../../common/types/capabilities';
@ -765,5 +766,6 @@ export function mlApiServicesProvider(httpService: HttpService) {
results: resultsApiProvider(httpService),
jobs: jobsApiProvider(httpService),
fileDatavisualizer,
savedObjects: savedObjectsApiProvider(httpService),
};
}

View file

@ -106,8 +106,8 @@ export const resultsApiProvider = (httpService: HttpService) => ({
});
},
anomalySearch(obj: any) {
const body = JSON.stringify(obj);
anomalySearch(query: any, jobIds: string[]) {
const body = JSON.stringify({ query, jobIds });
return httpService.http<any>({
path: `${basePath()}/results/anomaly_search`,
method: 'POST',
@ -115,8 +115,8 @@ export const resultsApiProvider = (httpService: HttpService) => ({
});
},
anomalySearch$(obj: any) {
const body = JSON.stringify(obj);
anomalySearch$(query: any, jobIds: string[]) {
const body = JSON.stringify({ query, jobIds });
return httpService.http$<any>({
path: `${basePath()}/results/anomaly_search`,
method: 'POST',

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// Service for managing job saved objects
import { HttpService } from '../http_service';
import { basePath } from './index';
export const savedObjectsApiProvider = (httpService: HttpService) => ({
jobsSpaces() {
return httpService.http<any>({
path: `${basePath()}/saved_objects/jobs_spaces`,
method: 'GET',
});
},
});

View file

@ -18,7 +18,6 @@ import { Dictionary } from '../../../../common/types/common';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
import { JobId } from '../../../../common/types/anomaly_detection_jobs';
import { MlApiServices } from '../ml_api_service';
import { ML_RESULTS_INDEX_PATTERN } from '../../../../common/constants/index_patterns';
import { CriteriaField } from './index';
interface ResultResponse {
@ -263,57 +262,59 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
];
return mlApiServices.results
.anomalySearch$({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:model_plot',
analyze_wildcard: true,
.anomalySearch$(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:model_plot',
analyze_wildcard: true,
},
},
},
{
bool: {
must: mustCriteria,
should: shouldCriteria,
minimum_should_match: 1,
{
bool: {
must: mustCriteria,
should: shouldCriteria,
minimum_should_match: 1,
},
},
},
],
},
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 0,
],
},
aggs: {
actual: {
avg: {
field: 'actual',
},
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 0,
},
modelUpper: {
[modelAggs.max]: {
field: 'model_upper',
aggs: {
actual: {
avg: {
field: 'actual',
},
},
},
modelLower: {
[modelAggs.min]: {
field: 'model_lower',
modelUpper: {
[modelAggs.max]: {
field: 'model_upper',
},
},
modelLower: {
[modelAggs.min]: {
field: 'model_lower',
},
},
},
},
},
},
},
})
[jobId]
)
.pipe(
map((resp) => {
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
@ -343,7 +344,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
// 'fieldValue' properties.
// Pass an empty array or ['*'] to search over all job IDs.
getRecordsForCriteria(
jobIds: string[] | undefined,
jobIds: string[],
criteriaFields: CriteriaField[],
threshold: any,
earliestMs: number,
@ -400,30 +401,32 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
});
return mlApiServices.results
.anomalySearch$({
index: ML_RESULTS_INDEX_PATTERN,
size: maxResults !== undefined ? maxResults : 100,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
.anomalySearch$(
{
size: maxResults !== undefined ? maxResults : 100,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
],
},
},
sort: [{ record_score: { order: 'desc' } }],
},
sort: [{ record_score: { order: 'desc' } }],
},
})
jobIds
)
.pipe(
map((resp) => {
if (resp.hits.total.value > 0) {
@ -441,7 +444,7 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
// Returned response contains a events property, which will only
// contains keys for jobs which have scheduled events for the specified time range.
getScheduledEventsByBucket(
jobIds: string[] | undefined,
jobIds: string[],
earliestMs: number,
latestMs: number,
intervalMs: number,
@ -484,46 +487,47 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
}
return mlApiServices.results
.anomalySearch$({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:bucket',
analyze_wildcard: false,
.anomalySearch$(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:bucket',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
},
},
aggs: {
jobs: {
terms: {
field: 'job_id',
min_doc_count: 1,
size: maxJobs,
],
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
},
aggs: {
events: {
terms: {
field: 'scheduled_events',
size: maxEvents,
},
aggs: {
jobs: {
terms: {
field: 'job_id',
min_doc_count: 1,
size: maxJobs,
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
},
aggs: {
events: {
terms: {
field: 'scheduled_events',
size: maxEvents,
},
},
},
},
@ -532,7 +536,8 @@ export function resultsServiceRxProvider(mlApiServices: MlApiServices) {
},
},
},
})
jobIds
)
.pipe(
map((resp) => {
const dataByJobId = get(resp, ['aggregations', 'jobs', 'buckets'], []);

View file

@ -66,66 +66,68 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:bucket',
analyze_wildcard: false,
},
},
{
bool: {
must: boolCriteria,
},
},
],
},
},
aggs: {
jobsCardinality: {
cardinality: {
field: 'job_id',
},
},
jobId: {
terms: {
field: 'job_id',
size: jobIds?.length ?? 1,
order: {
anomalyScore: 'desc',
},
},
aggs: {
anomalyScore: {
max: {
field: 'anomaly_score',
},
},
bucketTruncate: {
bucket_sort: {
from: (fromPage - 1) * perPage,
size: perPage === 0 ? 1 : perPage,
},
},
byTime: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
extended_bounds: {
min: earliestMs,
max: latestMs,
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:bucket',
analyze_wildcard: false,
},
},
aggs: {
anomalyScore: {
max: {
field: 'anomaly_score',
{
bool: {
must: boolCriteria,
},
},
],
},
},
aggs: {
jobsCardinality: {
cardinality: {
field: 'job_id',
},
},
jobId: {
terms: {
field: 'job_id',
size: jobIds?.length ?? 1,
order: {
anomalyScore: 'desc',
},
},
aggs: {
anomalyScore: {
max: {
field: 'anomaly_score',
},
},
bucketTruncate: {
bucket_sort: {
from: (fromPage - 1) * perPage,
size: perPage === 0 ? 1 : perPage,
},
},
byTime: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
extended_bounds: {
min: earliestMs,
max: latestMs,
},
},
aggs: {
anomalyScore: {
max: {
field: 'anomaly_score',
},
},
},
},
@ -134,7 +136,8 @@ export function resultsServiceProvider(mlApiServices) {
},
},
},
})
jobIds
)
.then((resp) => {
const dataByJobId = get(resp, ['aggregations', 'jobId', 'buckets'], []);
each(dataByJobId, (dataForJob) => {
@ -243,64 +246,66 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:influencer',
analyze_wildcard: false,
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:influencer',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
},
},
aggs: {
influencerFieldNames: {
terms: {
field: 'influencer_field_name',
size: 5,
order: {
maxAnomalyScore: 'desc',
},
],
},
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
aggs: {
influencerFieldNames: {
terms: {
field: 'influencer_field_name',
size: 5,
order: {
maxAnomalyScore: 'desc',
},
},
influencerFieldValues: {
terms: {
field: 'influencer_field_value',
size: maxFieldValues,
order: {
maxAnomalyScore: 'desc',
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
},
aggs: {
bucketTruncate: {
bucket_sort: {
from: (fromPage - 1) * perPage,
size: perPage,
influencerFieldValues: {
terms: {
field: 'influencer_field_value',
size: maxFieldValues,
order: {
maxAnomalyScore: 'desc',
},
},
maxAnomalyScore: {
max: {
field: 'influencer_score',
aggs: {
bucketTruncate: {
bucket_sort: {
from: (fromPage - 1) * perPage,
size: perPage,
},
},
},
sumAnomalyScore: {
sum: {
field: 'influencer_score',
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
},
sumAnomalyScore: {
sum: {
field: 'influencer_score',
},
},
},
},
@ -309,7 +314,8 @@ export function resultsServiceProvider(mlApiServices) {
},
},
},
})
jobIds
)
.then((resp) => {
const fieldNameBuckets = get(
resp,
@ -382,53 +388,56 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: `result_type:influencer AND influencer_field_name: ${escapeForElasticsearchQuery(
influencerFieldName
)}`,
analyze_wildcard: false,
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: `result_type:influencer AND influencer_field_name: ${escapeForElasticsearchQuery(
influencerFieldName
)}`,
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
},
},
aggs: {
influencerFieldValues: {
terms: {
field: 'influencer_field_value',
size: maxResults !== undefined ? maxResults : 2,
order: {
maxAnomalyScore: 'desc',
},
],
},
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
aggs: {
influencerFieldValues: {
terms: {
field: 'influencer_field_value',
size: maxResults !== undefined ? maxResults : 2,
order: {
maxAnomalyScore: 'desc',
},
},
sumAnomalyScore: {
sum: {
field: 'influencer_score',
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
},
sumAnomalyScore: {
sum: {
field: 'influencer_score',
},
},
},
},
},
},
},
})
jobIds
)
.then((resp) => {
const buckets = get(resp, ['aggregations', 'influencerFieldValues', 'buckets'], []);
each(buckets, (bucket) => {
@ -563,64 +572,66 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: `result_type:influencer AND influencer_field_name: ${escapeForElasticsearchQuery(
influencerFieldName
)}`,
analyze_wildcard: false,
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: `result_type:influencer AND influencer_field_name: ${escapeForElasticsearchQuery(
influencerFieldName
)}`,
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
},
},
aggs: {
influencerValuesCardinality: {
cardinality: {
field: 'influencer_field_value',
],
},
},
influencerFieldValues: {
terms: {
field: 'influencer_field_value',
size: !!maxResults ? maxResults : ANOMALY_SWIM_LANE_HARD_LIMIT,
order: {
maxAnomalyScore: 'desc',
aggs: {
influencerValuesCardinality: {
cardinality: {
field: 'influencer_field_value',
},
},
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
influencerFieldValues: {
terms: {
field: 'influencer_field_value',
size: !!maxResults ? maxResults : ANOMALY_SWIM_LANE_HARD_LIMIT,
order: {
maxAnomalyScore: 'desc',
},
},
bucketTruncate: {
bucket_sort: {
from: (fromPage - 1) * perPage,
size: perPage,
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
},
},
byTime: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
bucketTruncate: {
bucket_sort: {
from: (fromPage - 1) * perPage,
size: perPage,
},
},
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
byTime: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
},
aggs: {
maxAnomalyScore: {
max: {
field: 'influencer_score',
},
},
},
},
@ -629,7 +640,8 @@ export function resultsServiceProvider(mlApiServices) {
},
},
},
})
jobIds
)
.then((resp) => {
const fieldValueBuckets = get(
resp,
@ -723,30 +735,33 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: maxResults !== undefined ? maxResults : 100,
body: {
_source: ['job_id', 'detector_index', 'influencers', 'record_score'],
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
.anomalySearch(
{
size: maxResults !== undefined ? maxResults : 100,
body: {
_source: ['job_id', 'detector_index', 'influencers', 'record_score'],
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
],
},
},
sort: [{ record_score: { order: 'desc' } }],
},
sort: [{ record_score: { order: 'desc' } }],
},
})
jobIds
)
.then((resp) => {
if (resp.hits.total.value > 0) {
each(resp.hits.hits, (hit) => {
@ -854,29 +869,32 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: maxResults !== undefined ? maxResults : 100,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
.anomalySearch(
{
size: maxResults !== undefined ? maxResults : 100,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
],
},
},
sort: [{ record_score: { order: 'desc' } }],
},
sort: [{ record_score: { order: 'desc' } }],
},
})
jobIds
)
.then((resp) => {
if (resp.hits.total.value > 0) {
each(resp.hits.hits, (hit) => {
@ -978,29 +996,32 @@ export function resultsServiceProvider(mlApiServices) {
}
mlApiServices.results
.anomalySearch({
size: maxResults !== undefined ? maxResults : 100,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
.anomalySearch(
{
size: maxResults !== undefined ? maxResults : 100,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
],
},
},
sort: [{ record_score: { order: 'desc' } }],
},
sort: [{ record_score: { order: 'desc' } }],
},
})
[jobId]
)
.then((resp) => {
if (resp.hits.total.value > 0) {
each(resp.hits.hits, (hit) => {
@ -1302,44 +1323,47 @@ export function resultsServiceProvider(mlApiServices) {
});
mlApiServices.results
.anomalySearch({
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: true,
.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: true,
},
},
},
{
bool: {
must: mustCriteria,
{
bool: {
must: mustCriteria,
},
},
},
],
},
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
],
},
aggs: {
recordScore: {
max: {
field: 'record_score',
},
aggs: {
times: {
date_histogram: {
field: 'timestamp',
fixed_interval: `${intervalMs}ms`,
min_doc_count: 1,
},
aggs: {
recordScore: {
max: {
field: 'record_score',
},
},
},
},
},
},
},
})
[jobId]
)
.then((resp) => {
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
each(aggregationsByTime, (dataForTime) => {

View file

@ -201,7 +201,6 @@ export function getGetUrlGenerator() {
}
export function clearCache() {
console.log('clearing dependency cache'); // eslint-disable-line no-console
Object.keys(cache).forEach((k) => {
cache[k as keyof DependencyCache] = null;
});

View file

@ -4,11 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { getAdminCapabilities, getUserCapabilities } from './__mocks__/ml_capabilities';
import { capabilitiesProvider } from './check_capabilities';
import { MlLicense } from '../../../common/license';
import { getDefaultCapabilities } from '../../../common/types/capabilities';
import type { MlClient } from '../../lib/ml_client';
const mlLicense = {
isSecurityEnabled: () => true,
@ -23,35 +23,27 @@ const mlLicenseBasic = {
const mlIsEnabled = async () => true;
const mlIsNotEnabled = async () => false;
const mlClusterClientNonUpgrade = ({
asInternalUser: {
ml: {
info: async () => ({
body: {
upgrade_mode: false,
},
}),
const mlClientNonUpgrade = ({
info: async () => ({
body: {
upgrade_mode: false,
},
},
} as unknown) as IScopedClusterClient;
}),
} as unknown) as MlClient;
const mlClusterClientUpgrade = ({
asInternalUser: {
ml: {
info: async () => ({
body: {
upgrade_mode: true,
},
}),
const mlClientUpgrade = ({
info: async () => ({
body: {
upgrade_mode: true,
},
},
} as unknown) as IScopedClusterClient;
}),
} as unknown) as MlClient;
describe('check_capabilities', () => {
describe('getCapabilities() - right number of capabilities', () => {
test('kibana capabilities count', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientNonUpgrade,
mlClientNonUpgrade,
getAdminCapabilities(),
mlLicense,
mlIsEnabled
@ -65,7 +57,7 @@ describe('check_capabilities', () => {
describe('getCapabilities() with security', () => {
test('ml_user capabilities only', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientNonUpgrade,
mlClientNonUpgrade,
getUserCapabilities(),
mlLicense,
mlIsEnabled
@ -113,7 +105,7 @@ describe('check_capabilities', () => {
test('full capabilities', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientNonUpgrade,
mlClientNonUpgrade,
getAdminCapabilities(),
mlLicense,
mlIsEnabled
@ -161,7 +153,7 @@ describe('check_capabilities', () => {
test('upgrade in progress with full capabilities', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientUpgrade,
mlClientUpgrade,
getAdminCapabilities(),
mlLicense,
mlIsEnabled
@ -209,7 +201,7 @@ describe('check_capabilities', () => {
test('upgrade in progress with partial capabilities', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientUpgrade,
mlClientUpgrade,
getUserCapabilities(),
mlLicense,
mlIsEnabled
@ -257,7 +249,7 @@ describe('check_capabilities', () => {
test('full capabilities, ml disabled in space', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientNonUpgrade,
mlClientNonUpgrade,
getDefaultCapabilities(),
mlLicense,
mlIsNotEnabled
@ -306,7 +298,7 @@ describe('check_capabilities', () => {
test('full capabilities, basic license, ml disabled in space', async () => {
const { getCapabilities } = capabilitiesProvider(
mlClusterClientNonUpgrade,
mlClientNonUpgrade,
getDefaultCapabilities(),
mlLicenseBasic,
mlIsNotEnabled

View file

@ -4,7 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient, KibanaRequest } from 'kibana/server';
import { KibanaRequest } from 'kibana/server';
import type { MlClient } from '../../lib/ml_client';
import { mlLog } from '../../client/log';
import {
MlCapabilities,
@ -22,12 +23,12 @@ import {
} from './errors';
export function capabilitiesProvider(
client: IScopedClusterClient,
mlClient: MlClient,
capabilities: MlCapabilities,
mlLicense: MlLicense,
isMlEnabledInSpace: () => Promise<boolean>
) {
const { isUpgradeInProgress } = upgradeCheckProvider(client);
const { isUpgradeInProgress } = upgradeCheckProvider(mlClient);
async function getCapabilities(): Promise<MlCapabilitiesResponse> {
const upgradeInProgress = await isUpgradeInProgress();
const isPlatinumOrTrialLicense = mlLicense.isFullLicense();

View file

@ -4,14 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import type { MlClient } from '../../lib/ml_client';
import { mlLog } from '../../client/log';
export function upgradeCheckProvider({ asInternalUser }: IScopedClusterClient) {
export function upgradeCheckProvider(mlClient: MlClient) {
async function isUpgradeInProgress(): Promise<boolean> {
let upgradeInProgress = false;
try {
const { body } = await asInternalUser.ml.info();
const { body } = await mlClient.info();
// if ml indices are currently being migrated, upgrade_mode will be set to true
// pass this back with the privileges to allow for the disabling of UI controls.
upgradeInProgress = body.upgrade_mode === true;

View file

@ -1,49 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
KibanaRequest,
KibanaResponseFactory,
RequestHandlerContext,
IScopedClusterClient,
RequestHandler,
} from 'kibana/server';
import { MlLicense } from '../../../common/license';
type Handler = (handlerParams: {
client: IScopedClusterClient;
request: KibanaRequest<any, any, any, any>;
response: KibanaResponseFactory;
context: RequestHandlerContext;
}) => ReturnType<RequestHandler>;
export class MlServerLicense extends MlLicense {
public fullLicenseAPIGuard(handler: Handler) {
return guard(() => this.isFullLicense(), handler);
}
public basicLicenseAPIGuard(handler: Handler) {
return guard(() => this.isMinimumLicense(), handler);
}
}
function guard(check: () => boolean, handler: Handler) {
return (
context: RequestHandlerContext,
request: KibanaRequest<any, any, any, any>,
response: KibanaResponseFactory
) => {
if (check() === false) {
return response.forbidden();
}
return handler({
client: context.core.elasticsearch.client,
request,
response,
context,
});
};
}

View file

@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export class MLJobNotFound extends Error {
statusCode = 404;
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { getMlClient } from './ml_client';
export { MLJobNotFound } from './errors';
export { MlClient } from './types';

View file

@ -0,0 +1,561 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { JobSavedObjectService } from '../../saved_objects';
import { JobType } from '../../../common/types/saved_objects';
import {
Job,
JobStats,
Datafeed,
DatafeedStats,
} from '../../../common/types/anomaly_detection_jobs';
import { Calendar } from '../../../common/types/calendars';
import { searchProvider } from './search';
import { DataFrameAnalyticsConfig } from '../../../common/types/data_frame_analytics';
import { InferenceConfigResponse, TrainedModelStat } from '../../../common/types/trained_models';
import { MLJobNotFound } from './errors';
import {
MlClient,
MlClientParams,
MlGetADParams,
MlGetDFAParams,
MlGetDatafeedParams,
} from './types';
export function getMlClient(
client: IScopedClusterClient,
jobSavedObjectService: JobSavedObjectService
): MlClient {
const mlClient = client.asInternalUser.ml;
async function jobIdsCheck(jobType: JobType, p: MlClientParams, allowWildcards: boolean = false) {
const jobIds =
jobType === 'anomaly-detector' ? getADJobIdsFromRequest(p) : getDFAJobIdsFromRequest(p);
if (jobIds.length) {
const filteredJobIds = await jobSavedObjectService.filterJobIdsForSpace(jobType, jobIds);
let missingIds = jobIds.filter((j) => filteredJobIds.indexOf(j) === -1);
if (allowWildcards === true && missingIds.join().match('\\*') !== null) {
// filter out wildcard ids from the error
missingIds = missingIds.filter((id) => id.match('\\*') === null);
}
if (missingIds.length) {
throw new MLJobNotFound(`No known job with id '${missingIds.join(',')}'`);
}
}
}
async function groupIdsCheck(p: MlClientParams, allJobs: Job[], filteredJobIds: string[]) {
// if job ids have been specified, we need to check in case any of them are actually
// group ids, which will be unknown to the saved objects.
// find which ids are not group ids and check them.
const ids = getADJobIdsFromRequest(p);
if (ids.length) {
// find all groups from unfiltered jobs
const responseGroupIds = [...new Set(allJobs.map((j) => j.groups ?? []).flat())];
// work out which ids requested are actually groups
const requestedGroupIds = ids.filter((id) => responseGroupIds.includes(id));
// find all groups from filtered jobs
const groupIdsFromFilteredJobs = [
...new Set(
allJobs
.filter((j) => filteredJobIds.includes(j.job_id))
.map((j) => j.groups ?? [])
.flat()
),
];
const groupsIdsThatDidNotMatch = requestedGroupIds.filter(
(id) => groupIdsFromFilteredJobs.includes(id) === false
);
if (groupsIdsThatDidNotMatch.length) {
// is there are group ids which were requested but didn't
// exist in filtered jobs, list them in an error
throw new MLJobNotFound(`No known job with id '${groupsIdsThatDidNotMatch.join(',')}'`);
}
}
}
async function groupIdsCheckFromJobStats(
filteredJobIds: string[],
...p: Parameters<MlClient['getJobStats']>
) {
// similar to groupIdsCheck above, however we need to load the jobs first to get the groups information
const ids = getADJobIdsFromRequest(p);
if (ids.length) {
const { body } = await mlClient.getJobs<{ jobs: Job[] }>(...p);
await groupIdsCheck(p, body.jobs, filteredJobIds);
}
}
async function datafeedIdsCheck(p: MlClientParams, allowWildcards: boolean = false) {
const datafeedIds = getDatafeedIdsFromRequest(p);
if (datafeedIds.length) {
const filteredDatafeedIds = await jobSavedObjectService.filterDatafeedIdsForSpace(
datafeedIds
);
let missingIds = datafeedIds.filter((j) => filteredDatafeedIds.indexOf(j) === -1);
if (allowWildcards === true && missingIds.join().match('\\*') !== null) {
// filter out wildcard ids from the error
missingIds = missingIds.filter((id) => id.match('\\*') === null);
}
if (missingIds.length) {
throw new MLJobNotFound(`No known datafeed with id '${missingIds.join(',')}'`);
}
}
}
async function getFilterTrainedModels(
p: Parameters<MlClient['getTrainedModels']>,
allowWildcards: boolean = false
) {
let configs = [];
try {
const resp = await mlClient.getTrainedModels<InferenceConfigResponse>(...p);
configs = resp.body.trained_model_configs;
} catch (error) {
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
const modelIds = getTrainedModelIdsFromRequest(p);
const modelJobIds: string[] = configs
.map((m) => m.metadata?.analytics_config.id)
.filter((id) => id !== undefined);
const filteredModelJobIds = await jobSavedObjectService.filterJobIdsForSpace(
'data-frame-analytics',
modelJobIds
);
const filteredConfigs = configs.filter((m) => {
const jobId = m.metadata?.analytics_config.id;
return jobId === undefined || filteredModelJobIds.includes(jobId);
});
const filteredConfigsIds = filteredConfigs.map((c) => c.model_id);
if (modelIds.length > filteredConfigs.length) {
let missingIds = modelIds.filter((j) => filteredConfigsIds.indexOf(j) === -1);
if (allowWildcards === true && missingIds.join().match('\\*') !== null) {
// filter out wildcard ids from the error
missingIds = missingIds.filter((id) => id.match('\\*') === null);
}
if (missingIds.length) {
throw new MLJobNotFound(`No known trained model with model_id [${missingIds.join(',')}]`);
}
}
return filteredConfigs;
}
return {
async closeJob(...p: Parameters<MlClient['closeJob']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.closeJob(...p);
},
async deleteCalendar(...p: Parameters<MlClient['deleteCalendar']>) {
return mlClient.deleteCalendar(...p);
},
async deleteCalendarEvent(...p: Parameters<MlClient['deleteCalendarEvent']>) {
return mlClient.deleteCalendarEvent(...p);
},
async deleteCalendarJob(...p: Parameters<MlClient['deleteCalendarJob']>) {
return mlClient.deleteCalendarJob(...p);
},
async deleteDataFrameAnalytics(...p: Parameters<MlClient['deleteDataFrameAnalytics']>) {
await jobIdsCheck('data-frame-analytics', p);
const resp = await mlClient.deleteDataFrameAnalytics(...p);
// don't delete the job saved object as the real job will not be
// deleted initially and could still fail.
return resp;
},
async deleteDatafeed(...p: any) {
await datafeedIdsCheck(p);
const resp = await mlClient.deleteDatafeed(...p);
const [datafeedId] = getDatafeedIdsFromRequest(p);
if (datafeedId !== undefined) {
await jobSavedObjectService.deleteDatafeed(datafeedId);
}
return resp;
},
async deleteExpiredData(...p: Parameters<MlClient['deleteExpiredData']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.deleteExpiredData(...p);
},
async deleteFilter(...p: Parameters<MlClient['deleteFilter']>) {
return mlClient.deleteFilter(...p);
},
async deleteForecast(...p: Parameters<MlClient['deleteForecast']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.deleteForecast(...p);
},
async deleteJob(...p: Parameters<MlClient['deleteJob']>) {
await jobIdsCheck('anomaly-detector', p);
const resp = await mlClient.deleteJob(...p);
// don't delete the job saved object as the real job will not be
// deleted initially and could still fail.
return resp;
},
async deleteModelSnapshot(...p: Parameters<MlClient['deleteModelSnapshot']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.deleteModelSnapshot(...p);
},
async deleteTrainedModel(...p: Parameters<MlClient['deleteTrainedModel']>) {
await getFilterTrainedModels(p, true);
return mlClient.deleteTrainedModel(...p);
},
async estimateModelMemory(...p: Parameters<MlClient['estimateModelMemory']>) {
return mlClient.estimateModelMemory(...p);
},
async evaluateDataFrame(...p: Parameters<MlClient['evaluateDataFrame']>) {
return mlClient.evaluateDataFrame(...p);
},
async explainDataFrameAnalytics(...p: Parameters<MlClient['explainDataFrameAnalytics']>) {
await jobIdsCheck('data-frame-analytics', p);
return mlClient.explainDataFrameAnalytics(...p);
},
async findFileStructure(...p: Parameters<MlClient['findFileStructure']>) {
return mlClient.findFileStructure(...p);
},
async flushJob(...p: Parameters<MlClient['flushJob']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.flushJob(...p);
},
async forecast(...p: Parameters<MlClient['forecast']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.forecast(...p);
},
async getBuckets(...p: Parameters<MlClient['getBuckets']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getBuckets(...p);
},
async getCalendarEvents(...p: Parameters<MlClient['getCalendarEvents']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getCalendarEvents(...p);
},
async getCalendars(...p: Parameters<MlClient['getCalendars']>) {
const { body } = await mlClient.getCalendars<{ calendars: Calendar[] }, any>(...p);
const {
body: { jobs: allJobs },
} = await mlClient.getJobs<{ jobs: Job[] }>();
const allJobIds = allJobs.map((j) => j.job_id);
// flatten the list of all jobs ids and check which ones are valid
const calJobIds = [...new Set(body.calendars.map((c) => c.job_ids).flat())];
// find groups by getting the cal job ids which aren't real jobs.
const groups = calJobIds.filter((j) => allJobIds.includes(j) === false);
// get list of calendar jobs which are allowed in this space
const filteredJobIds = await jobSavedObjectService.filterJobIdsForSpace(
'anomaly-detector',
calJobIds
);
const calendars = body.calendars.map((c) => ({
...c,
job_ids: c.job_ids.filter((id) => filteredJobIds.includes(id) || groups.includes(id)),
total_job_count: calJobIds.length,
}));
return { body: { ...body, calendars } };
},
async getCategories(...p: Parameters<MlClient['getCategories']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getCategories(...p);
},
async getDataFrameAnalytics(...p: Parameters<MlClient['getDataFrameAnalytics']>) {
await jobIdsCheck('data-frame-analytics', p, true);
try {
const { body } = await mlClient.getDataFrameAnalytics<{
data_frame_analytics: DataFrameAnalyticsConfig[];
}>(...p);
const jobs = await jobSavedObjectService.filterJobsForSpace<DataFrameAnalyticsConfig>(
'data-frame-analytics',
body.data_frame_analytics,
'id'
);
return { body: { ...body, count: jobs.length, data_frame_analytics: jobs } };
} catch (error) {
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
},
async getDataFrameAnalyticsStats(...p: Parameters<MlClient['getDataFrameAnalyticsStats']>) {
// this should use DataFrameAnalyticsStats, but needs a refactor to move DataFrameAnalyticsStats to common
await jobIdsCheck('data-frame-analytics', p, true);
try {
const { body } = await mlClient.getDataFrameAnalyticsStats<{
data_frame_analytics: DataFrameAnalyticsConfig[];
}>(...p);
const jobs = await jobSavedObjectService.filterJobsForSpace<DataFrameAnalyticsConfig>(
'data-frame-analytics',
body.data_frame_analytics,
'id'
);
return { body: { ...body, count: jobs.length, data_frame_analytics: jobs } };
} catch (error) {
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
},
async getDatafeedStats(...p: Parameters<MlClient['getDatafeedStats']>) {
await datafeedIdsCheck(p, true);
try {
const { body } = await mlClient.getDatafeedStats<{ datafeeds: DatafeedStats[] }>(...p);
const datafeeds = await jobSavedObjectService.filterDatafeedsForSpace<DatafeedStats>(
'anomaly-detector',
body.datafeeds,
'datafeed_id'
);
return { body: { ...body, count: datafeeds.length, datafeeds } };
} catch (error) {
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
},
async getDatafeeds(...p: Parameters<MlClient['getDatafeeds']>) {
await datafeedIdsCheck(p, true);
try {
const { body } = await mlClient.getDatafeeds<{ datafeeds: Datafeed[] }>(...p);
const datafeeds = await jobSavedObjectService.filterDatafeedsForSpace<Datafeed>(
'anomaly-detector',
body.datafeeds,
'datafeed_id'
);
return { body: { ...body, count: datafeeds.length, datafeeds } };
} catch (error) {
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
},
async getFilters(...p: Parameters<MlClient['getFilters']>) {
return mlClient.getFilters(...p);
},
async getInfluencers(...p: Parameters<MlClient['getInfluencers']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getInfluencers(...p);
},
async getJobStats(...p: Parameters<MlClient['getJobStats']>) {
try {
const { body } = await mlClient.getJobStats<{ jobs: JobStats[] }>(...p);
const jobs = await jobSavedObjectService.filterJobsForSpace<JobStats>(
'anomaly-detector',
body.jobs,
'job_id'
);
await groupIdsCheckFromJobStats(
jobs.map((j) => j.job_id),
...p
);
return { body: { ...body, count: jobs.length, jobs } };
} catch (error) {
if (error instanceof MLJobNotFound) {
throw error;
}
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
},
async getJobs(...p: Parameters<MlClient['getJobs']>) {
try {
const { body } = await mlClient.getJobs<{ jobs: Job[] }>(...p);
const jobs = await jobSavedObjectService.filterJobsForSpace<Job>(
'anomaly-detector',
body.jobs,
'job_id'
);
await groupIdsCheck(
p,
body.jobs,
jobs.map((j) => j.job_id)
);
return { body: { ...body, count: jobs.length, jobs } };
} catch (error) {
if (error instanceof MLJobNotFound) {
throw error;
}
if (error.statusCode === 404) {
throw new MLJobNotFound(error.body.error.reason);
}
throw error.body ?? error;
}
},
async getModelSnapshots(...p: Parameters<MlClient['getModelSnapshots']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getModelSnapshots(...p);
},
async getOverallBuckets(...p: Parameters<MlClient['getOverallBuckets']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getOverallBuckets(...p);
},
async getRecords(...p: Parameters<MlClient['getRecords']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.getRecords(...p);
},
async getTrainedModels(...p: Parameters<MlClient['getTrainedModels']>) {
const models = await getFilterTrainedModels(p, true);
return { body: { trained_model_configs: models } };
},
async getTrainedModelsStats(...p: Parameters<MlClient['getTrainedModelsStats']>) {
await getFilterTrainedModels(p, true);
const models = await getFilterTrainedModels(p);
const filteredModelIds = models.map((m) => m.model_id);
const { body: allModelStats } = await mlClient.getTrainedModelsStats<{
trained_model_stats: TrainedModelStat[];
}>(...p);
const modelStats = allModelStats.trained_model_stats.filter((m) =>
filteredModelIds.includes(m.model_id)
);
return { body: { trained_model_stats: modelStats } };
},
async info(...p: Parameters<MlClient['info']>) {
return mlClient.info(...p);
},
async openJob(...p: Parameters<MlClient['openJob']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.openJob(...p);
},
async postCalendarEvents(...p: Parameters<MlClient['postCalendarEvents']>) {
return mlClient.postCalendarEvents(...p);
},
async postData(...p: Parameters<MlClient['postData']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.postData(...p);
},
async previewDatafeed(...p: Parameters<MlClient['previewDatafeed']>) {
await datafeedIdsCheck(p);
return mlClient.previewDatafeed(...p);
},
async putCalendar(...p: Parameters<MlClient['putCalendar']>) {
return mlClient.putCalendar(...p);
},
async putCalendarJob(...p: Parameters<MlClient['putCalendarJob']>) {
return mlClient.putCalendarJob(...p);
},
async putDataFrameAnalytics(...p: Parameters<MlClient['putDataFrameAnalytics']>) {
const resp = await mlClient.putDataFrameAnalytics(...p);
const [analyticsId] = getDFAJobIdsFromRequest(p);
if (analyticsId !== undefined) {
await jobSavedObjectService.createDataFrameAnalyticsJob(analyticsId);
}
return resp;
},
async putDatafeed(...p: Parameters<MlClient['putDatafeed']>) {
const resp = await mlClient.putDatafeed(...p);
const [datafeedId] = getDatafeedIdsFromRequest(p);
const jobId = getJobIdFromBody(p);
if (datafeedId !== undefined && jobId !== undefined) {
await jobSavedObjectService.addDatafeed(datafeedId, jobId);
}
return resp;
},
async putFilter(...p: Parameters<MlClient['putFilter']>) {
return mlClient.putFilter(...p);
},
async putJob(...p: Parameters<MlClient['putJob']>) {
const resp = await mlClient.putJob(...p);
const [jobId] = getADJobIdsFromRequest(p);
if (jobId !== undefined) {
await jobSavedObjectService.createAnomalyDetectionJob(jobId);
}
return resp;
},
async putTrainedModel(...p: Parameters<MlClient['putTrainedModel']>) {
return mlClient.putTrainedModel(...p);
},
async revertModelSnapshot(...p: Parameters<MlClient['revertModelSnapshot']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.revertModelSnapshot(...p);
},
async setUpgradeMode(...p: Parameters<MlClient['setUpgradeMode']>) {
return mlClient.setUpgradeMode(...p);
},
async startDataFrameAnalytics(...p: Parameters<MlClient['startDataFrameAnalytics']>) {
await jobIdsCheck('data-frame-analytics', p);
return mlClient.startDataFrameAnalytics(...p);
},
async startDatafeed(...p: Parameters<MlClient['startDatafeed']>) {
await datafeedIdsCheck(p);
return mlClient.startDatafeed(...p);
},
async stopDataFrameAnalytics(...p: Parameters<MlClient['stopDataFrameAnalytics']>) {
await jobIdsCheck('data-frame-analytics', p);
return mlClient.stopDataFrameAnalytics(...p);
},
async stopDatafeed(...p: Parameters<MlClient['stopDatafeed']>) {
await datafeedIdsCheck(p);
return mlClient.stopDatafeed(...p);
},
async updateDataFrameAnalytics(...p: Parameters<MlClient['updateDataFrameAnalytics']>) {
await jobIdsCheck('data-frame-analytics', p);
return mlClient.updateDataFrameAnalytics(...p);
},
async updateDatafeed(...p: Parameters<MlClient['updateDatafeed']>) {
await datafeedIdsCheck(p);
return mlClient.updateDatafeed(...p);
},
async updateFilter(...p: Parameters<MlClient['updateFilter']>) {
return mlClient.updateFilter(...p);
},
async updateJob(...p: Parameters<MlClient['updateJob']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.updateJob(...p);
},
async updateModelSnapshot(...p: Parameters<MlClient['updateModelSnapshot']>) {
await jobIdsCheck('anomaly-detector', p);
return mlClient.updateModelSnapshot(...p);
},
async validate(...p: Parameters<MlClient['validate']>) {
return mlClient.validate(...p);
},
async validateDetector(...p: Parameters<MlClient['validateDetector']>) {
return mlClient.validateDetector(...p);
},
...searchProvider(client, jobSavedObjectService),
} as MlClient;
}
function getDFAJobIdsFromRequest([params]: MlGetDFAParams): string[] {
const ids = params?.id?.split(',');
return ids || [];
}
function getADJobIdsFromRequest([params]: MlGetADParams): string[] {
const ids = params?.job_id?.split(',');
return ids || [];
}
function getDatafeedIdsFromRequest([params]: MlGetDatafeedParams): string[] {
const ids = params?.datafeed_id?.split(',');
return ids || [];
}
function getJobIdFromBody(p: any): string | undefined {
const [params] = p;
return params?.body?.job_id;
}
function getTrainedModelIdsFromRequest(p: any): string[] {
const [params] = p;
const ids = params?.model_id?.split(',');
return ids || [];
}

View file

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Boom from '@hapi/boom';
import { IScopedClusterClient } from 'kibana/server';
import { RequestParams, ApiResponse } from '@elastic/elasticsearch';
import { JobSavedObjectService } from '../../saved_objects';
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
import type { SearchResponse7 } from '../../../common/types/es_client';
import type { JobType } from '../../../common/types/saved_objects';
export function searchProvider(
client: IScopedClusterClient,
jobSavedObjectService: JobSavedObjectService
) {
async function jobIdsCheck(jobType: JobType, jobIds: string[]) {
if (jobIds.length) {
const filteredJobIds = await jobSavedObjectService.filterJobIdsForSpace(jobType, jobIds);
const missingIds = jobIds.filter((j) => filteredJobIds.indexOf(j) === -1);
if (missingIds.length) {
throw Boom.notFound(`${missingIds.join(',')} missing`);
}
}
}
async function anomalySearch<T>(
searchParams: RequestParams.Search<any>,
jobIds: string[]
): Promise<ApiResponse<SearchResponse7<T>>> {
await jobIdsCheck('anomaly-detector', jobIds);
const { asInternalUser } = client;
const resp = await asInternalUser.search<SearchResponse7<T>>({
...searchParams,
index: ML_RESULTS_INDEX_PATTERN,
});
return resp;
}
return { anomalySearch };
}

View file

@ -0,0 +1,86 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ElasticsearchClient } from 'kibana/server';
import { searchProvider } from './search';
type OrigMlClient = ElasticsearchClient['ml'];
export interface MlClient extends OrigMlClient {
anomalySearch: ReturnType<typeof searchProvider>['anomalySearch'];
}
export type MlClientParams =
| Parameters<MlClient['closeJob']>
| Parameters<MlClient['deleteCalendar']>
| Parameters<MlClient['deleteCalendarEvent']>
| Parameters<MlClient['deleteCalendarJob']>
| Parameters<MlClient['deleteDataFrameAnalytics']>
| Parameters<MlClient['deleteExpiredData']>
| Parameters<MlClient['deleteFilter']>
| Parameters<MlClient['deleteForecast']>
| Parameters<MlClient['deleteJob']>
| Parameters<MlClient['deleteModelSnapshot']>
| Parameters<MlClient['deleteTrainedModel']>
| Parameters<MlClient['estimateModelMemory']>
| Parameters<MlClient['evaluateDataFrame']>
| Parameters<MlClient['explainDataFrameAnalytics']>
| Parameters<MlClient['findFileStructure']>
| Parameters<MlClient['flushJob']>
| Parameters<MlClient['forecast']>
| Parameters<MlClient['getBuckets']>
| Parameters<MlClient['getCalendarEvents']>
| Parameters<MlClient['getCalendars']>
| Parameters<MlClient['getCategories']>
| Parameters<MlClient['getDataFrameAnalytics']>
| Parameters<MlClient['getDataFrameAnalyticsStats']>
| Parameters<MlClient['getDatafeedStats']>
| Parameters<MlClient['getDatafeeds']>
| Parameters<MlClient['getFilters']>
| Parameters<MlClient['getInfluencers']>
| Parameters<MlClient['getJobStats']>
| Parameters<MlClient['getJobs']>
| Parameters<MlClient['getModelSnapshots']>
| Parameters<MlClient['getOverallBuckets']>
| Parameters<MlClient['getRecords']>
| Parameters<MlClient['getTrainedModels']>
| Parameters<MlClient['getTrainedModelsStats']>
| Parameters<MlClient['info']>
| Parameters<MlClient['openJob']>
| Parameters<MlClient['postCalendarEvents']>
| Parameters<MlClient['postData']>
| Parameters<MlClient['previewDatafeed']>
| Parameters<MlClient['putCalendar']>
| Parameters<MlClient['putCalendarJob']>
| Parameters<MlClient['putDataFrameAnalytics']>
| Parameters<MlClient['putDatafeed']>
| Parameters<MlClient['putFilter']>
| Parameters<MlClient['putJob']>
| Parameters<MlClient['putTrainedModel']>
| Parameters<MlClient['revertModelSnapshot']>
| Parameters<MlClient['setUpgradeMode']>
| Parameters<MlClient['startDataFrameAnalytics']>
| Parameters<MlClient['startDatafeed']>
| Parameters<MlClient['stopDataFrameAnalytics']>
| Parameters<MlClient['stopDatafeed']>
| Parameters<MlClient['updateDataFrameAnalytics']>
| Parameters<MlClient['updateDatafeed']>
| Parameters<MlClient['updateFilter']>
| Parameters<MlClient['updateJob']>
| Parameters<MlClient['updateModelSnapshot']>
| Parameters<MlClient['validate']>
| Parameters<MlClient['validateDetector']>;
export type MlGetADParams = Parameters<MlClient['getJobStats']> | Parameters<MlClient['getJobs']>;
export type MlGetDatafeedParams =
| Parameters<MlClient['getDatafeedStats']>
| Parameters<MlClient['getDatafeeds']>;
export type MlGetDFAParams =
| Parameters<MlClient['getDataFrameAnalytics']>
| Parameters<MlClient['getDataFrameAnalyticsStats']>
| Parameters<MlClient['putDataFrameAnalytics']>;

View file

@ -0,0 +1,78 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import {
KibanaRequest,
KibanaResponseFactory,
RequestHandlerContext,
IScopedClusterClient,
RequestHandler,
SavedObjectsClientContract,
} from 'kibana/server';
import { jobSavedObjectServiceFactory, JobSavedObjectService } from '../saved_objects';
import { MlLicense } from '../../common/license';
import { MlClient, getMlClient } from '../lib/ml_client';
type Handler = (handlerParams: {
client: IScopedClusterClient;
request: KibanaRequest<any, any, any, any>;
response: KibanaResponseFactory;
context: RequestHandlerContext;
jobSavedObjectService: JobSavedObjectService;
mlClient: MlClient;
}) => ReturnType<RequestHandler>;
type GetMlSavedObjectClient = (request: KibanaRequest) => SavedObjectsClientContract | null;
export class RouteGuard {
private _mlLicense: MlLicense;
private _getMlSavedObjectClient: GetMlSavedObjectClient;
constructor(mlLicense: MlLicense, getSavedObject: GetMlSavedObjectClient) {
this._mlLicense = mlLicense;
this._getMlSavedObjectClient = getSavedObject;
}
public fullLicenseAPIGuard(handler: Handler) {
return this._guard(() => this._mlLicense.isFullLicense(), handler);
}
public basicLicenseAPIGuard(handler: Handler) {
return this._guard(() => this._mlLicense.isMinimumLicense(), handler);
}
private _guard(check: () => boolean, handler: Handler) {
return (
context: RequestHandlerContext,
request: KibanaRequest<any, any, any, any>,
response: KibanaResponseFactory
) => {
if (check() === false) {
return response.forbidden();
}
const mlSavedObjectClient = this._getMlSavedObjectClient(request);
if (mlSavedObjectClient === null) {
return response.badRequest({
body: { message: 'saved object client has not been initialized' },
});
}
const jobSavedObjectService = jobSavedObjectServiceFactory(mlSavedObjectClient);
const client = context.core.elasticsearch.client;
return handler({
client,
request,
response,
context,
jobSavedObjectService,
mlClient: getMlClient(client, jobSavedObjectService),
});
};
}
}

View file

@ -10,6 +10,7 @@ import { MLCATEGORY } from '../../../common/constants/field_types';
import { AnalysisConfig } from '../../../common/types/anomaly_detection_jobs';
import { fieldsServiceProvider } from '../fields_service';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
import type { MlClient } from '../../lib/ml_client';
export interface ModelMemoryEstimationResult {
/**
@ -123,8 +124,10 @@ const cardinalityCheckProvider = (client: IScopedClusterClient) => {
};
};
export function calculateModelMemoryLimitProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
export function calculateModelMemoryLimitProvider(
client: IScopedClusterClient,
mlClient: MlClient
) {
const getCardinalities = cardinalityCheckProvider(client);
/**
@ -141,7 +144,7 @@ export function calculateModelMemoryLimitProvider(client: IScopedClusterClient)
latestMs: number,
allowMMLGreaterThanMax = false
): Promise<ModelMemoryEstimationResult> {
const { body: info } = await asInternalUser.ml.info<MlInfoResponse>();
const { body: info } = await mlClient.info<MlInfoResponse>();
const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase();
const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase();
@ -154,7 +157,7 @@ export function calculateModelMemoryLimitProvider(client: IScopedClusterClient)
latestMs
);
const { body } = await asInternalUser.ml.estimateModelMemory<ModelMemoryEstimateResponse>({
const { body } = await mlClient.estimateModelMemory<ModelMemoryEstimateResponse>({
body: {
analysis_config: analysisConfig,
overall_cardinality: overallCardinality,

View file

@ -5,8 +5,8 @@
*/
import { difference } from 'lodash';
import { IScopedClusterClient } from 'kibana/server';
import { EventManager, CalendarEvent } from './event_manager';
import type { MlClient } from '../../lib/ml_client';
interface BasicCalendar {
job_ids: string[];
@ -23,16 +23,16 @@ export interface FormCalendar extends BasicCalendar {
}
export class CalendarManager {
private _asInternalUser: IScopedClusterClient['asInternalUser'];
private _mlClient: MlClient;
private _eventManager: EventManager;
constructor(client: IScopedClusterClient) {
this._asInternalUser = client.asInternalUser;
this._eventManager = new EventManager(client);
constructor(mlClient: MlClient) {
this._mlClient = mlClient;
this._eventManager = new EventManager(mlClient);
}
async getCalendar(calendarId: string) {
const { body } = await this._asInternalUser.ml.getCalendars({
const { body } = await this._mlClient.getCalendars({
calendar_id: calendarId,
});
@ -43,7 +43,7 @@ export class CalendarManager {
}
async getAllCalendars() {
const { body } = await this._asInternalUser.ml.getCalendars({ size: 1000 });
const { body } = await this._mlClient.getCalendars({ size: 1000 });
const events: CalendarEvent[] = await this._eventManager.getAllEvents();
const calendars: Calendar[] = body.calendars;
@ -71,7 +71,7 @@ export class CalendarManager {
async newCalendar(calendar: FormCalendar) {
const { calendarId, events, ...newCalendar } = calendar;
await this._asInternalUser.ml.putCalendar({
await this._mlClient.putCalendar({
calendar_id: calendarId,
body: newCalendar,
});
@ -106,7 +106,7 @@ export class CalendarManager {
// add all new jobs
if (jobsToAdd.length) {
await this._asInternalUser.ml.putCalendarJob({
await this._mlClient.putCalendarJob({
calendar_id: calendarId,
job_id: jobsToAdd.join(','),
});
@ -114,7 +114,7 @@ export class CalendarManager {
// remove all removed jobs
if (jobsToRemove.length) {
await this._asInternalUser.ml.deleteCalendarJob({
await this._mlClient.deleteCalendarJob({
calendar_id: calendarId,
job_id: jobsToRemove.join(','),
});
@ -137,7 +137,7 @@ export class CalendarManager {
}
async deleteCalendar(calendarId: string) {
const { body } = await this._asInternalUser.ml.deleteCalendar({ calendar_id: calendarId });
const { body } = await this._mlClient.deleteCalendar({ calendar_id: calendarId });
return body;
}
}

View file

@ -4,8 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { GLOBAL_CALENDAR } from '../../../common/constants/calendars';
import type { MlClient } from '../../lib/ml_client';
export interface CalendarEvent {
calendar_id?: string;
@ -16,13 +16,13 @@ export interface CalendarEvent {
}
export class EventManager {
private _asInternalUser: IScopedClusterClient['asInternalUser'];
constructor({ asInternalUser }: IScopedClusterClient) {
this._asInternalUser = asInternalUser;
private _mlClient: MlClient;
constructor(mlClient: MlClient) {
this._mlClient = mlClient;
}
async getCalendarEvents(calendarId: string) {
const { body } = await this._asInternalUser.ml.getCalendarEvents({ calendar_id: calendarId });
const { body } = await this._mlClient.getCalendarEvents({ calendar_id: calendarId });
return body.events;
}
@ -30,7 +30,7 @@ export class EventManager {
// jobId is optional
async getAllEvents(jobId?: string) {
const calendarId = GLOBAL_CALENDAR;
const { body } = await this._asInternalUser.ml.getCalendarEvents({
const { body } = await this._mlClient.getCalendarEvents({
calendar_id: calendarId,
job_id: jobId,
});
@ -41,14 +41,14 @@ export class EventManager {
async addEvents(calendarId: string, events: CalendarEvent[]) {
const body = { events };
return await this._asInternalUser.ml.postCalendarEvents({
return await this._mlClient.postCalendarEvents({
calendar_id: calendarId,
body,
});
}
async deleteEvent(calendarId: string, eventId: string) {
return this._asInternalUser.ml.deleteCalendarEvent({
return this._mlClient.deleteCalendarEvent({
calendar_id: calendarId,
event_id: eventId,
});

View file

@ -11,14 +11,15 @@ import {
isRegressionAnalysis,
} from '../../../common/util/analytics_utils';
import { DEFAULT_RESULTS_FIELD } from '../../../common/constants/data_frame_analytics';
import type { MlClient } from '../../lib/ml_client';
// Obtains data for the data frame analytics feature importance functionalities
// such as baseline, decision paths, or importance summary.
export function analyticsFeatureImportanceProvider({
asInternalUser,
asCurrentUser,
}: IScopedClusterClient) {
export function analyticsFeatureImportanceProvider(
{ asCurrentUser }: IScopedClusterClient,
mlClient: MlClient
) {
async function getRegressionAnalyticsBaseline(analyticsId: string): Promise<number | undefined> {
const { body } = await asInternalUser.ml.getDataFrameAnalytics({
const { body } = await mlClient.getDataFrameAnalytics({
id: analyticsId,
});
const jobConfig = body.data_frame_analytics[0];

View file

@ -7,6 +7,7 @@
import { SavedObjectsClientContract, KibanaRequest, IScopedClusterClient } from 'kibana/server';
import { Module } from '../../../common/types/modules';
import { DataRecognizer } from '../data_recognizer';
import type { MlClient } from '../../lib/ml_client';
const callAs = () => Promise.resolve({ body: {} });
@ -15,9 +16,12 @@ const mlClusterClient = ({
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
const mlClient = (callAs as unknown) as MlClient;
describe('ML - data recognizer', () => {
const dr = new DataRecognizer(
mlClusterClient,
mlClient,
({
find: jest.fn(),
bulkCreate: jest.fn(),

View file

@ -14,6 +14,7 @@ import { merge } from 'lodash';
import { AnalysisLimits } from '../../../common/types/anomaly_detection_jobs';
import { getAuthorizationHeader } from '../../lib/request_authorization';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
import type { MlClient } from '../../lib/ml_client';
import {
KibanaObjects,
KibanaObjectConfig,
@ -104,13 +105,19 @@ interface SaveResults {
}
export class DataRecognizer {
private _asCurrentUser: IScopedClusterClient['asCurrentUser'];
private _asInternalUser: IScopedClusterClient['asInternalUser'];
private _client: IScopedClusterClient;
private _mlClient: MlClient;
private _savedObjectsClient: SavedObjectsClientContract;
private _authorizationHeader: object;
private _modulesDir = `${__dirname}/modules`;
private _indexPatternName: string = '';
private _indexPatternId: string | undefined = undefined;
private _jobsService: ReturnType<typeof jobServiceProvider>;
private _resultsService: ReturnType<typeof resultsServiceProvider>;
private _calculateModelMemoryLimit: ReturnType<typeof calculateModelMemoryLimitProvider>;
/**
* List of the module jobs that require model memory estimation
*/
@ -118,13 +125,17 @@ export class DataRecognizer {
constructor(
mlClusterClient: IScopedClusterClient,
private savedObjectsClient: SavedObjectsClientContract,
mlClient: MlClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest
) {
this._client = mlClusterClient;
this._asCurrentUser = mlClusterClient.asCurrentUser;
this._asInternalUser = mlClusterClient.asInternalUser;
this._mlClient = mlClient;
this._savedObjectsClient = savedObjectsClient;
this._authorizationHeader = getAuthorizationHeader(request);
this._jobsService = jobServiceProvider(mlClusterClient, mlClient);
this._resultsService = resultsServiceProvider(mlClient);
this._calculateModelMemoryLimit = calculateModelMemoryLimitProvider(mlClusterClient, mlClient);
}
// list all directories under the given directory
@ -246,7 +257,7 @@ export class DataRecognizer {
query: moduleConfig.query,
};
const { body } = await this._asCurrentUser.search({
const { body } = await this._client.asCurrentUser.search({
index,
size,
body: searchBody,
@ -510,8 +521,7 @@ export class DataRecognizer {
// Add a wildcard at the front of each of the job IDs in the module,
// as a prefix may have been supplied when creating the jobs in the module.
const jobIds = module.jobs.map((job) => `*${job.id}`);
const { jobsExist } = jobServiceProvider(this._client);
const jobInfo = await jobsExist(jobIds);
const jobInfo = await this._jobsService.jobsExist(jobIds);
// Check if the value for any of the jobs is false.
const doJobsExist = Object.values(jobInfo).includes(false) === false;
@ -519,14 +529,15 @@ export class DataRecognizer {
if (doJobsExist === true) {
// Get the IDs of the jobs created from the module, and their earliest / latest timestamps.
const { body } = await this._asInternalUser.ml.getJobStats<MlJobsStatsResponse>({
const { body } = await this._mlClient.getJobStats<MlJobsStatsResponse>({
job_id: jobIds.join(),
});
const jobStatsJobs: JobStat[] = [];
if (body.jobs && body.jobs.length > 0) {
const foundJobIds = body.jobs.map((job) => job.job_id);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(this._client);
const latestBucketTimestampsByJob = await getLatestBucketTimestampByJob(foundJobIds);
const latestBucketTimestampsByJob = await this._resultsService.getLatestBucketTimestampByJob(
foundJobIds
);
body.jobs.forEach((job) => {
const jobStat = {
@ -552,7 +563,7 @@ export class DataRecognizer {
}
async loadIndexPatterns() {
return await this.savedObjectsClient.find<IndexPatternAttributes>({
return await this._savedObjectsClient.find<IndexPatternAttributes>({
type: 'index-pattern',
perPage: 1000,
});
@ -663,7 +674,7 @@ export class DataRecognizer {
// find all existing savedObjects for a given type
loadExistingSavedObjects(type: string) {
// TODO: define saved object type
return this.savedObjectsClient.find<any>({ type, perPage: 1000 });
return this._savedObjectsClient.find<any>({ type, perPage: 1000 });
}
// save the savedObjects if they do not exist already
@ -673,7 +684,7 @@ export class DataRecognizer {
.filter((o) => o.exists === false)
.map((o) => o.savedObject!);
if (filteredSavedObjects.length) {
results = await this.savedObjectsClient.bulkCreate(
results = await this._savedObjectsClient.bulkCreate(
// Add an empty migrationVersion attribute to each saved object to ensure
// it is automatically migrated to the 7.0+ format with a references attribute.
filteredSavedObjects.map((doc) => ({
@ -704,7 +715,7 @@ export class DataRecognizer {
}
async saveJob(job: ModuleJob) {
return this._asInternalUser.ml.putJob({ job_id: job.id, body: job.config });
return this._mlClient.putJob({ job_id: job.id, body: job.config });
}
// save the datafeeds.
@ -724,7 +735,7 @@ export class DataRecognizer {
}
async saveDatafeed(datafeed: ModuleDatafeed) {
return this._asInternalUser.ml.putDatafeed(
return this._mlClient.putDatafeed(
{
datafeed_id: datafeed.id,
body: datafeed.config,
@ -753,7 +764,7 @@ export class DataRecognizer {
const result = { started: false } as DatafeedResponse;
let opened = false;
try {
const { body } = await this._asInternalUser.ml.openJob({
const { body } = await this._mlClient.openJob({
job_id: datafeed.config.job_id,
});
opened = body.opened;
@ -777,7 +788,7 @@ export class DataRecognizer {
duration.end = (end as unknown) as string;
}
await this._asInternalUser.ml.startDatafeed({
await this._mlClient.startDatafeed({
datafeed_id: datafeed.id,
...duration,
});
@ -1017,8 +1028,6 @@ export class DataRecognizer {
if (estimateMML && this.jobsForModelMemoryEstimation.length > 0) {
try {
const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this._client);
// Checks if all jobs in the module have the same time field configured
const firstJobTimeField = this.jobsForModelMemoryEstimation[0].job.config.data_description
.time_field;
@ -1050,7 +1059,7 @@ export class DataRecognizer {
latestMs = timeFieldRange.end;
}
const { modelMemoryLimit } = await calculateModelMemoryLimit(
const { modelMemoryLimit } = await this._calculateModelMemoryLimit(
job.config.analysis_config,
this._indexPatternName,
query,
@ -1072,7 +1081,7 @@ export class DataRecognizer {
const {
body: { limits },
} = await this._asInternalUser.ml.info<MlInfoResponse>();
} = await this._mlClient.info<MlInfoResponse>();
const maxMml = limits.max_model_memory_limit;
if (!maxMml) {

View file

@ -4,20 +4,20 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import {
AnalysisResult,
FormattedOverrides,
InputOverrides,
FindFileStructureResponse,
} from '../../../common/types/file_datavisualizer';
import type { MlClient } from '../../lib/ml_client';
export type InputData = any[];
export function fileDataVisualizerProvider({ asInternalUser }: IScopedClusterClient) {
export function fileDataVisualizerProvider(mlClient: MlClient) {
async function analyzeFile(data: InputData, overrides: InputOverrides): Promise<AnalysisResult> {
overrides.explain = overrides.explain === undefined ? 'true' : overrides.explain;
const { body } = await asInternalUser.ml.findFileStructure<FindFileStructureResponse>({
const { body } = await mlClient.findFileStructure<FindFileStructureResponse>({
body: data,
...overrides,
});

View file

@ -5,7 +5,7 @@
*/
import Boom from '@hapi/boom';
import { IScopedClusterClient } from 'kibana/server';
import type { MlClient } from '../../lib/ml_client';
import { DetectorRule, DetectorRuleScope } from '../../../common/types/detector_rules';
@ -58,17 +58,14 @@ interface PartialJob {
}
export class FilterManager {
private _asInternalUser: IScopedClusterClient['asInternalUser'];
constructor({ asInternalUser }: IScopedClusterClient) {
this._asInternalUser = asInternalUser;
}
constructor(private _mlClient: MlClient) {}
async getFilter(filterId: string) {
try {
const [JOBS, FILTERS] = [0, 1];
const results = await Promise.all([
this._asInternalUser.ml.getJobs(),
this._asInternalUser.ml.getFilters({ filter_id: filterId }),
this._mlClient.getJobs(),
this._mlClient.getFilters({ filter_id: filterId }),
]);
if (results[FILTERS] && results[FILTERS].body.filters.length) {
@ -90,7 +87,7 @@ export class FilterManager {
async getAllFilters() {
try {
const { body } = await this._asInternalUser.ml.getFilters({ size: 1000 });
const { body } = await this._mlClient.getFilters({ size: 1000 });
return body.filters;
} catch (error) {
throw Boom.badRequest(error);
@ -101,8 +98,8 @@ export class FilterManager {
try {
const [JOBS, FILTERS] = [0, 1];
const results = await Promise.all([
this._asInternalUser.ml.getJobs(),
this._asInternalUser.ml.getFilters({ size: 1000 }),
this._mlClient.getJobs(),
this._mlClient.getFilters({ size: 1000 }),
]);
// Build a map of filter_ids against jobs and detectors using that filter.
@ -139,7 +136,7 @@ export class FilterManager {
const { filterId, ...body } = filter;
try {
// Returns the newly created filter.
const { body: resp } = await this._asInternalUser.ml.putFilter({ filter_id: filterId, body });
const { body: resp } = await this._mlClient.putFilter({ filter_id: filterId, body });
return resp;
} catch (error) {
throw Boom.badRequest(error);
@ -160,7 +157,7 @@ export class FilterManager {
}
// Returns the newly updated filter.
const { body: resp } = await this._asInternalUser.ml.updateFilter({
const { body: resp } = await this._mlClient.updateFilter({
filter_id: filterId,
body,
});
@ -171,7 +168,7 @@ export class FilterManager {
}
async deleteFilter(filterId: string) {
const { body } = await this._asInternalUser.ml.deleteFilter({ filter_id: filterId });
const { body } = await this._mlClient.deleteFilter({ filter_id: filterId });
return body;
}

View file

@ -5,10 +5,17 @@
*/
import { IScopedClusterClient } from 'kibana/server';
import type { MlClient } from '../../lib/ml_client';
import type { JobSavedObjectService } from '../../saved_objects';
export function jobAuditMessagesProvider(
client: IScopedClusterClient
client: IScopedClusterClient,
mlClient: MlClient
): {
getJobAuditMessages: (jobId?: string, from?: string) => any;
getJobAuditMessages: (
jobSavedObjectService: JobSavedObjectService,
jobId?: string,
from?: string
) => any;
getAuditMessagesSummary: (jobIds?: string[]) => any;
};

View file

@ -34,14 +34,14 @@ const anomalyDetectorTypeFilter = {
},
};
export function jobAuditMessagesProvider({ asInternalUser }) {
export function jobAuditMessagesProvider({ asInternalUser }, mlClient) {
// search for audit messages,
// jobId is optional. without it, all jobs will be listed.
// from is optional and should be a string formatted in ES time units. e.g. 12h, 1d, 7d
async function getJobAuditMessages(jobId, from) {
async function getJobAuditMessages(jobSavedObjectService, jobId, from) {
let gte = null;
if (jobId !== undefined && from === undefined) {
const jobs = await asInternalUser.ml.getJobs({ job_id: jobId });
const jobs = await mlClient.getJobs({ job_id: jobId });
if (jobs.count > 0 && jobs.jobs !== undefined) {
gte = moment(jobs.jobs[0].create_time).valueOf();
}
@ -113,6 +113,11 @@ export function jobAuditMessagesProvider({ asInternalUser }) {
if (body.hits.total.value > 0) {
messages = body.hits.hits.map((hit) => hit._source);
}
messages = await jobSavedObjectService.filterJobsForSpace(
'anomaly-detector',
messages,
'job_id'
);
return messages;
}

View file

@ -4,11 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { i18n } from '@kbn/i18n';
import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states';
import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils';
import { Datafeed, DatafeedStats } from '../../../common/types/anomaly_detection_jobs';
import type { MlClient } from '../../lib/ml_client';
export interface MlDatafeedsResponse {
datafeeds: Datafeed[];
@ -26,7 +26,7 @@ interface Results {
};
}
export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
export function datafeedsProvider(mlClient: MlClient) {
async function forceStartDatafeeds(datafeedIds: string[], start?: number, end?: number) {
const jobIds = await getJobIdsByDatafeedId();
const doStartsCalled = datafeedIds.reduce((acc, cur) => {
@ -84,7 +84,7 @@ export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
async function openJob(jobId: string) {
let opened = false;
try {
const { body } = await asInternalUser.ml.openJob({ job_id: jobId });
const { body } = await mlClient.openJob({ job_id: jobId });
opened = body.opened;
} catch (error) {
if (error.statusCode === 409) {
@ -97,7 +97,7 @@ export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
}
async function startDatafeed(datafeedId: string, start?: number, end?: number) {
return asInternalUser.ml.startDatafeed({
return mlClient.startDatafeed({
datafeed_id: datafeedId,
start: (start as unknown) as string,
end: (end as unknown) as string,
@ -109,7 +109,7 @@ export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
for (const datafeedId of datafeedIds) {
try {
const { body } = await asInternalUser.ml.stopDatafeed<{
const { body } = await mlClient.stopDatafeed<{
started: boolean;
}>({
datafeed_id: datafeedId,
@ -131,7 +131,7 @@ export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
}
async function forceDeleteDatafeed(datafeedId: string) {
const { body } = await asInternalUser.ml.deleteDatafeed({
const { body } = await mlClient.deleteDatafeed<{ acknowledged: boolean }>({
datafeed_id: datafeedId,
force: true,
});
@ -141,7 +141,8 @@ export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
async function getDatafeedIdsByJobId() {
const {
body: { datafeeds },
} = await asInternalUser.ml.getDatafeeds<MlDatafeedsResponse>();
} = await mlClient.getDatafeeds<MlDatafeedsResponse>();
return datafeeds.reduce((acc, cur) => {
acc[cur.job_id] = cur.datafeed_id;
return acc;
@ -151,7 +152,8 @@ export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
async function getJobIdsByDatafeedId() {
const {
body: { datafeeds },
} = await asInternalUser.ml.getDatafeeds<MlDatafeedsResponse>();
} = await mlClient.getDatafeeds<MlDatafeedsResponse>();
return datafeeds.reduce((acc, cur) => {
acc[cur.datafeed_id] = cur.job_id;
return acc;

View file

@ -4,11 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { CalendarManager } from '../calendar';
import { GLOBAL_CALENDAR } from '../../../common/constants/calendars';
import { Job } from '../../../common/types/anomaly_detection_jobs';
import { MlJobsResponse } from './jobs';
import type { MlClient } from '../../lib/ml_client';
interface Group {
id: string;
@ -23,15 +23,14 @@ interface Results {
};
}
export function groupsProvider(client: IScopedClusterClient) {
const calMngr = new CalendarManager(client);
const { asInternalUser } = client;
export function groupsProvider(mlClient: MlClient) {
const calMngr = new CalendarManager(mlClient);
async function getAllGroups() {
const groups: { [id: string]: Group } = {};
const jobIds: { [id: string]: undefined | null } = {};
const [{ body }, calendars] = await Promise.all([
asInternalUser.ml.getJobs<MlJobsResponse>(),
mlClient.getJobs<MlJobsResponse>(),
calMngr.getAllCalendars(),
]);
@ -81,7 +80,7 @@ export function groupsProvider(client: IScopedClusterClient) {
for (const job of jobs) {
const { job_id: jobId, groups } = job;
try {
await asInternalUser.ml.updateJob({ job_id: jobId, body: { groups } });
await mlClient.updateJob({ job_id: jobId, body: { groups } });
results[jobId] = { success: true };
} catch ({ body }) {
results[jobId] = { success: false, error: body };

View file

@ -11,15 +11,16 @@ import { groupsProvider } from './groups';
import { newJobCapsProvider } from './new_job_caps';
import { newJobChartsProvider, topCategoriesProvider } from './new_job';
import { modelSnapshotProvider } from './model_snapshots';
import type { MlClient } from '../../lib/ml_client';
export function jobServiceProvider(client: IScopedClusterClient) {
export function jobServiceProvider(client: IScopedClusterClient, mlClient: MlClient) {
return {
...datafeedsProvider(client),
...jobsProvider(client),
...groupsProvider(client),
...datafeedsProvider(mlClient),
...jobsProvider(client, mlClient),
...groupsProvider(mlClient),
...newJobCapsProvider(client),
...newJobChartsProvider(client),
...topCategoriesProvider(client),
...modelSnapshotProvider(client),
...topCategoriesProvider(mlClient),
...modelSnapshotProvider(mlClient),
};
}

View file

@ -30,6 +30,8 @@ import {
isTimeSeriesViewJob,
} from '../../../common/util/job_utils';
import { groupsProvider } from './groups';
import type { MlClient } from '../../lib/ml_client';
export interface MlJobsResponse {
jobs: Job[];
count: number;
@ -47,16 +49,16 @@ interface Results {
};
}
export function jobsProvider(client: IScopedClusterClient) {
export function jobsProvider(client: IScopedClusterClient, mlClient: MlClient) {
const { asInternalUser } = client;
const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(client);
const { getAuditMessagesSummary } = jobAuditMessagesProvider(client);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(client);
const calMngr = new CalendarManager(client);
const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(mlClient);
const { getAuditMessagesSummary } = jobAuditMessagesProvider(client, mlClient);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(mlClient);
const calMngr = new CalendarManager(mlClient);
async function forceDeleteJob(jobId: string) {
return asInternalUser.ml.deleteJob({ job_id: jobId, force: true, wait_for_completion: false });
await mlClient.deleteJob({ job_id: jobId, force: true, wait_for_completion: false });
}
async function deleteJobs(jobIds: string[]) {
@ -100,7 +102,7 @@ export function jobsProvider(client: IScopedClusterClient) {
const results: Results = {};
for (const jobId of jobIds) {
try {
await asInternalUser.ml.closeJob({ job_id: jobId });
await mlClient.closeJob({ job_id: jobId });
results[jobId] = { closed: true };
} catch (error) {
if (isRequestTimeout(error)) {
@ -116,7 +118,7 @@ export function jobsProvider(client: IScopedClusterClient) {
// if the job has failed we want to attempt a force close.
// however, if we received a 409 due to the datafeed being started we should not attempt a force close.
try {
await asInternalUser.ml.closeJob({ job_id: jobId, force: true });
await mlClient.closeJob({ job_id: jobId, force: true });
results[jobId] = { closed: true };
} catch (error2) {
if (isRequestTimeout(error2)) {
@ -139,12 +141,12 @@ export function jobsProvider(client: IScopedClusterClient) {
throw Boom.notFound(`Cannot find datafeed for job ${jobId}`);
}
const { body } = await asInternalUser.ml.stopDatafeed({ datafeed_id: datafeedId, force: true });
const { body } = await mlClient.stopDatafeed({ datafeed_id: datafeedId, force: true });
if (body.stopped !== true) {
return { success: false };
}
await asInternalUser.ml.closeJob({ job_id: jobId, force: true });
await mlClient.closeJob({ job_id: jobId, force: true });
return { success: true };
}
@ -272,14 +274,12 @@ export function jobsProvider(client: IScopedClusterClient) {
calendarResults,
latestBucketTimestampByJob,
] = await Promise.all([
asInternalUser.ml.getJobs<MlJobsResponse>(
mlClient.getJobs<MlJobsResponse>(jobIds.length > 0 ? { job_id: jobIdsString } : undefined),
mlClient.getJobStats<MlJobsStatsResponse>(
jobIds.length > 0 ? { job_id: jobIdsString } : undefined
),
asInternalUser.ml.getJobStats<MlJobsStatsResponse>(
jobIds.length > 0 ? { job_id: jobIdsString } : undefined
),
asInternalUser.ml.getDatafeeds<MlDatafeedsResponse>(),
asInternalUser.ml.getDatafeedStats<MlDatafeedsStatsResponse>(),
mlClient.getDatafeeds<MlDatafeedsResponse>(),
mlClient.getDatafeedStats<MlDatafeedsStatsResponse>(),
calMngr.getAllCalendars(),
getLatestBucketTimestampByJob(),
]);
@ -390,7 +390,7 @@ export function jobsProvider(client: IScopedClusterClient) {
async function deletingJobTasks() {
const actions = ['cluster:admin/xpack/ml/job/delete'];
const detailed = true;
const jobIds = [];
const jobIds: string[] = [];
try {
const { body } = await asInternalUser.tasks.list({ actions, detailed });
Object.keys(body.nodes).forEach((nodeId) => {
@ -404,7 +404,8 @@ export function jobsProvider(client: IScopedClusterClient) {
// use the jobs list to get the ids of deleting jobs
const {
body: { jobs },
} = await asInternalUser.ml.getJobs<MlJobsResponse>();
} = await mlClient.getJobs<MlJobsResponse>();
jobIds.push(...jobs.filter((j) => j.deleting === true).map((j) => j.job_id));
}
return { jobIds };
@ -417,7 +418,7 @@ export function jobsProvider(client: IScopedClusterClient) {
const results: { [id: string]: boolean } = {};
for (const jobId of jobIds) {
try {
const { body } = await asInternalUser.ml.getJobs<MlJobsResponse>({
const { body } = await mlClient.getJobs<MlJobsResponse>({
job_id: jobId,
});
results[jobId] = body.count > 0;
@ -433,8 +434,8 @@ export function jobsProvider(client: IScopedClusterClient) {
}
async function getAllJobAndGroupIds() {
const { getAllGroups } = groupsProvider(client);
const { body } = await asInternalUser.ml.getJobs<MlJobsResponse>();
const { getAllGroups } = groupsProvider(mlClient);
const { body } = await mlClient.getJobs<MlJobsResponse>();
const jobIds = body.jobs.map((job) => job.job_id);
const groups = await getAllGroups();
const groupIds = groups.map((group) => group.id);
@ -448,7 +449,7 @@ export function jobsProvider(client: IScopedClusterClient) {
async function getLookBackProgress(jobId: string, start: number, end: number) {
const datafeedId = `datafeed-${jobId}`;
const [{ body }, isRunning] = await Promise.all([
asInternalUser.ml.getJobStats<MlJobsStatsResponse>({ job_id: jobId }),
mlClient.getJobStats<MlJobsStatsResponse>({ job_id: jobId }),
isDatafeedRunning(datafeedId),
]);
@ -467,7 +468,7 @@ export function jobsProvider(client: IScopedClusterClient) {
}
async function isDatafeedRunning(datafeedId: string) {
const { body } = await asInternalUser.ml.getDatafeedStats<MlDatafeedsStatsResponse>({
const { body } = await mlClient.getDatafeedStats<MlDatafeedsStatsResponse>({
datafeed_id: datafeedId,
});
if (body.datafeeds.length) {

View file

@ -6,10 +6,10 @@
import Boom from '@hapi/boom';
import { i18n } from '@kbn/i18n';
import { IScopedClusterClient } from 'kibana/server';
import { ModelSnapshot } from '../../../common/types/anomaly_detection_jobs';
import { datafeedsProvider } from './datafeeds';
import { FormCalendar, CalendarManager } from '../calendar';
import type { MlClient } from '../../lib/ml_client';
export interface ModelSnapshotsResponse {
count: number;
@ -19,9 +19,8 @@ export interface RevertModelSnapshotResponse {
model: ModelSnapshot;
}
export function modelSnapshotProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(client);
export function modelSnapshotProvider(mlClient: MlClient) {
const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(mlClient);
async function revertModelSnapshot(
jobId: string,
@ -33,12 +32,12 @@ export function modelSnapshotProvider(client: IScopedClusterClient) {
) {
let datafeedId = `datafeed-${jobId}`;
// ensure job exists
await asInternalUser.ml.getJobs({ job_id: jobId });
await mlClient.getJobs({ job_id: jobId });
try {
// ensure the datafeed exists
// the datafeed is probably called datafeed-<jobId>
await asInternalUser.ml.getDatafeeds({
await mlClient.getDatafeeds({
datafeed_id: datafeedId,
});
} catch (e) {
@ -52,7 +51,7 @@ export function modelSnapshotProvider(client: IScopedClusterClient) {
}
// ensure the snapshot exists
const { body: snapshot } = await asInternalUser.ml.getModelSnapshots<ModelSnapshotsResponse>({
const { body: snapshot } = await mlClient.getModelSnapshots<ModelSnapshotsResponse>({
job_id: jobId,
snapshot_id: snapshotId,
});
@ -60,7 +59,7 @@ export function modelSnapshotProvider(client: IScopedClusterClient) {
// apply the snapshot revert
const {
body: { model },
} = await asInternalUser.ml.revertModelSnapshot<RevertModelSnapshotResponse>({
} = await mlClient.revertModelSnapshot<RevertModelSnapshotResponse>({
job_id: jobId,
snapshot_id: snapshotId,
body: {
@ -87,7 +86,7 @@ export function modelSnapshotProvider(client: IScopedClusterClient) {
end_time: s.end,
})),
};
const cm = new CalendarManager(client);
const cm = new CalendarManager(mlClient);
await cm.newCalendar(calendar);
}

View file

@ -5,74 +5,77 @@
*/
import { SearchResponse } from 'elasticsearch';
import { IScopedClusterClient } from 'kibana/server';
import { ML_RESULTS_INDEX_PATTERN } from '../../../../../common/constants/index_patterns';
import { CategoryId, Category } from '../../../../../common/types/categories';
import type { MlClient } from '../../../../lib/ml_client';
export function topCategoriesProvider({ asInternalUser }: IScopedClusterClient) {
export function topCategoriesProvider(mlClient: MlClient) {
async function getTotalCategories(jobId: string): Promise<number> {
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
query: {
bool: {
filter: [
{
term: {
job_id: jobId,
const { body } = await mlClient.anomalySearch<SearchResponse<any>>(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
term: {
job_id: jobId,
},
},
},
{
exists: {
field: 'category_id',
{
exists: {
field: 'category_id',
},
},
},
],
],
},
},
},
},
});
[]
);
// @ts-ignore total is an object here
return body?.hits?.total?.value ?? 0;
}
async function getTopCategoryCounts(jobId: string, numberOfCategories: number) {
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
query: {
bool: {
filter: [
{
term: {
job_id: jobId,
const { body } = await mlClient.anomalySearch<SearchResponse<any>>(
{
size: 0,
body: {
query: {
bool: {
filter: [
{
term: {
job_id: jobId,
},
},
},
{
term: {
result_type: 'model_plot',
{
term: {
result_type: 'model_plot',
},
},
},
{
term: {
by_field_name: 'mlcategory',
{
term: {
by_field_name: 'mlcategory',
},
},
},
],
],
},
},
},
aggs: {
cat_count: {
terms: {
field: 'by_field_value',
size: numberOfCategories,
aggs: {
cat_count: {
terms: {
field: 'by_field_value',
size: numberOfCategories,
},
},
},
},
},
});
[]
);
const catCounts: Array<{
id: CategoryId;
@ -100,24 +103,26 @@ export function topCategoriesProvider({ asInternalUser }: IScopedClusterClient)
field: 'category_id',
},
};
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size,
body: {
query: {
bool: {
filter: [
{
term: {
job_id: jobId,
const { body } = await mlClient.anomalySearch<any>(
{
size,
body: {
query: {
bool: {
filter: [
{
term: {
job_id: jobId,
},
},
},
categoryFilter,
],
categoryFilter,
],
},
},
},
},
});
[]
);
return body.hits.hits?.map((c: { _source: Category }) => c._source) || [];
}

View file

@ -8,20 +8,10 @@ import { IScopedClusterClient } from 'kibana/server';
import { validateJob, ValidateJobPayload } from './job_validation';
import { JobValidationMessage } from '../../../common/constants/messages';
import type { MlClient } from '../../lib/ml_client';
const callAs = {
fieldCaps: () => Promise.resolve({ body: { fields: [] } }),
ml: {
info: () =>
Promise.resolve({
body: {
limits: {
effective_max_model_memory_limit: '100MB',
max_model_memory_limit: '1GB',
},
},
}),
},
search: () => Promise.resolve({ body: { hits: { total: { value: 0, relation: 'eq' } } } }),
};
@ -30,6 +20,18 @@ const mlClusterClient = ({
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
const mlClient = ({
info: () =>
Promise.resolve({
body: {
limits: {
effective_max_model_memory_limit: '100MB',
max_model_memory_limit: '1GB',
},
},
}),
} as unknown) as MlClient;
// Note: The tests cast `payload` as any
// so we can simulate possible runtime payloads
// that don't satisfy the TypeScript specs.
@ -39,7 +41,7 @@ describe('ML - validateJob', () => {
job: { analysis_config: { detectors: [] } },
} as unknown) as ValidateJobPayload;
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
@ -59,7 +61,7 @@ describe('ML - validateJob', () => {
job_id: id,
},
} as unknown) as ValidateJobPayload;
return validateJob(mlClusterClient, payload).catch(() => {
return validateJob(mlClusterClient, mlClient, payload).catch(() => {
new Error('Promise should not fail for jobIdTests.');
});
});
@ -80,7 +82,7 @@ describe('ML - validateJob', () => {
job: { analysis_config: { detectors: [] }, groups: testIds },
} as unknown) as ValidateJobPayload;
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids.includes(messageId)).toBe(true);
});
@ -120,7 +122,7 @@ describe('ML - validateJob', () => {
const payload = ({
job: { analysis_config: { bucket_span: format, detectors: [] } },
} as unknown) as ValidateJobPayload;
return validateJob(mlClusterClient, payload).catch(() => {
return validateJob(mlClusterClient, mlClient, payload).catch(() => {
new Error('Promise should not fail for bucketSpanFormatTests.');
});
});
@ -159,7 +161,7 @@ describe('ML - validateJob', () => {
function: undefined,
});
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids.includes('detectors_function_empty')).toBe(true);
});
@ -173,7 +175,7 @@ describe('ML - validateJob', () => {
function: 'count',
});
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids.includes('detectors_function_not_empty')).toBe(true);
});
@ -185,7 +187,7 @@ describe('ML - validateJob', () => {
fields: {},
} as unknown) as ValidateJobPayload;
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids.includes('index_fields_invalid')).toBe(true);
});
@ -197,7 +199,7 @@ describe('ML - validateJob', () => {
fields: { testField: {} },
} as unknown) as ValidateJobPayload;
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids.includes('index_fields_valid')).toBe(true);
});
@ -225,7 +227,7 @@ describe('ML - validateJob', () => {
const payload = getBasicPayload() as any;
delete payload.job.analysis_config.influencers;
validateJob(mlClusterClient, payload).then(
validateJob(mlClusterClient, mlClient, payload).then(
() =>
done(
new Error('Promise should not resolve for this test when influencers is not an Array.')
@ -237,7 +239,7 @@ describe('ML - validateJob', () => {
it('detect duplicate detectors', () => {
const payload = getBasicPayload() as any;
payload.job.analysis_config.detectors.push({ function: 'count' });
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
'job_id_valid',
@ -260,7 +262,7 @@ describe('ML - validateJob', () => {
{ function: 'count', by_field_name: 'airline' },
{ function: 'count', partition_field_name: 'airline' },
];
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
'job_id_valid',
@ -275,7 +277,7 @@ describe('ML - validateJob', () => {
// Failing https://github.com/elastic/kibana/issues/65865
it('basic validation passes, extended checks return some messages', () => {
const payload = getBasicPayload();
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
'job_id_valid',
@ -308,7 +310,7 @@ describe('ML - validateJob', () => {
fields: { testField: {} },
};
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
'job_id_valid',
@ -341,7 +343,7 @@ describe('ML - validateJob', () => {
fields: { testField: {} },
};
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
'job_id_valid',
@ -384,7 +386,7 @@ describe('ML - validateJob', () => {
fields: { testField: {} },
};
return validateJob(mlClusterClient, payload).then((messages) => {
return validateJob(mlClusterClient, mlClient, payload).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toStrictEqual([
'job_id_valid',
@ -403,7 +405,7 @@ describe('ML - validateJob', () => {
const docsTestPayload = getBasicPayload() as any;
docsTestPayload.job.analysis_config.detectors = [{ function: 'count', by_field_name: 'airline' }];
it('creates a docs url pointing to the current docs version', () => {
return validateJob(mlClusterClient, docsTestPayload).then((messages) => {
return validateJob(mlClusterClient, mlClient, docsTestPayload).then((messages) => {
const message = messages[
messages.findIndex((m) => m.id === 'field_not_aggregatable')
] as JobValidationMessage;
@ -412,7 +414,7 @@ describe('ML - validateJob', () => {
});
it('creates a docs url pointing to the master docs version', () => {
return validateJob(mlClusterClient, docsTestPayload, 'master').then((messages) => {
return validateJob(mlClusterClient, mlClient, docsTestPayload, 'master').then((messages) => {
const message = messages[
messages.findIndex((m) => m.id === 'field_not_aggregatable')
] as JobValidationMessage;

View file

@ -26,6 +26,7 @@ import { validateModelMemoryLimit } from './validate_model_memory_limit';
import { validateTimeRange, isValidTimeField } from './validate_time_range';
import { validateJobSchema } from '../../routes/schemas/job_validation_schema';
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
import type { MlClient } from '../../lib/ml_client';
export type ValidateJobPayload = TypeOf<typeof validateJobSchema>;
@ -35,6 +36,7 @@ export type ValidateJobPayload = TypeOf<typeof validateJobSchema>;
*/
export async function validateJob(
client: IScopedClusterClient,
mlClient: MlClient,
payload: ValidateJobPayload,
kbnVersion = 'current',
isSecurityDisabled?: boolean
@ -94,7 +96,9 @@ export async function validateJob(
// if cardinality checks didn't return a message with an error level
if (cardinalityError === false) {
validationMessages.push(...(await validateInfluencers(job)));
validationMessages.push(...(await validateModelMemoryLimit(client, job, duration)));
validationMessages.push(
...(await validateModelMemoryLimit(client, mlClient, job, duration))
);
}
} else {
validationMessages = basicValidation.messages;

View file

@ -8,6 +8,7 @@ import { IScopedClusterClient } from 'kibana/server';
import { CombinedJob, Detector } from '../../../common/types/anomaly_detection_jobs';
import { ModelMemoryEstimateResponse } from '../calculate_model_memory_limit/calculate_model_memory_limit';
import { validateModelMemoryLimit } from './validate_model_memory_limit';
import type { MlClient } from '../../lib/ml_client';
describe('ML - validateModelMemoryLimit', () => {
// mock info endpoint response
@ -70,7 +71,7 @@ describe('ML - validateModelMemoryLimit', () => {
};
interface MockAPICallResponse {
'ml.estimateModelMemory'?: ModelMemoryEstimateResponse;
estimateModelMemory?: ModelMemoryEstimateResponse;
}
// mock asCurrentUser
@ -78,15 +79,8 @@ describe('ML - validateModelMemoryLimit', () => {
// - to retrieve the info endpoint
// - to search for cardinality of split field
// - to retrieve field capabilities used in search for split field cardinality
const getMockMlClusterClient = ({
'ml.estimateModelMemory': estimateModelMemory,
}: MockAPICallResponse = {}): IScopedClusterClient => {
const getMockMlClusterClient = (): IScopedClusterClient => {
const callAs = {
ml: {
info: () => Promise.resolve({ body: mlInfoResponse }),
estimateModelMemory: () =>
Promise.resolve({ body: estimateModelMemory || modelMemoryEstimateResponse }),
},
search: () => Promise.resolve({ body: cardinalitySearchResponse }),
fieldCaps: () => Promise.resolve({ body: fieldCapsResponse }),
};
@ -97,6 +91,18 @@ describe('ML - validateModelMemoryLimit', () => {
} as unknown) as IScopedClusterClient;
};
const getMockMlClient = ({
estimateModelMemory: estimateModelMemory,
}: MockAPICallResponse = {}): MlClient => {
const callAs = {
info: () => Promise.resolve({ body: mlInfoResponse }),
estimateModelMemory: () =>
Promise.resolve({ body: estimateModelMemory || modelMemoryEstimateResponse }),
};
return callAs as MlClient;
};
function getJobConfig(influencers: string[] = [], detectors: Detector[] = []) {
return ({
analysis_config: { detectors, influencers },
@ -127,7 +133,12 @@ describe('ML - validateModelMemoryLimit', () => {
const job = getJobConfig();
const duration = undefined;
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual([]);
});
@ -139,7 +150,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '31mb';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_greater_than_max_mml']);
});
@ -153,7 +169,8 @@ describe('ML - validateModelMemoryLimit', () => {
job.analysis_limits.model_memory_limit = '20mb';
return validateModelMemoryLimit(
getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '66mb' } }),
getMockMlClusterClient(),
getMockMlClient({ estimateModelMemory: { model_memory_estimate: '66mb' } }),
job,
duration
).then((messages) => {
@ -170,7 +187,8 @@ describe('ML - validateModelMemoryLimit', () => {
job.analysis_limits.model_memory_limit = '30mb';
return validateModelMemoryLimit(
getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '24mb' } }),
getMockMlClusterClient(),
getMockMlClient({ estimateModelMemory: { model_memory_estimate: '24mb' } }),
job,
duration
).then((messages) => {
@ -187,7 +205,8 @@ describe('ML - validateModelMemoryLimit', () => {
job.analysis_limits.model_memory_limit = '10mb';
return validateModelMemoryLimit(
getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '22mb' } }),
getMockMlClusterClient(),
getMockMlClient({ estimateModelMemory: { model_memory_estimate: '22mb' } }),
job,
duration
).then((messages) => {
@ -205,7 +224,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '10mb';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['half_estimated_mml_greater_than_mml']);
});
@ -217,7 +241,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '31mb';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual([]);
});
@ -229,7 +258,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '41mb';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_greater_than_effective_max_mml']);
});
@ -243,7 +277,8 @@ describe('ML - validateModelMemoryLimit', () => {
job.analysis_limits.model_memory_limit = '20mb';
return validateModelMemoryLimit(
getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '19mb' } }),
getMockMlClusterClient(),
getMockMlClient({ estimateModelMemory: { model_memory_estimate: '19mb' } }),
job,
duration
).then((messages) => {
@ -259,7 +294,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '0mb';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_value_invalid']);
});
@ -272,7 +312,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '10mbananas';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_value_invalid']);
});
@ -285,7 +330,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '10';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_value_invalid']);
});
@ -298,7 +348,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = 'mb';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_value_invalid']);
});
@ -311,7 +366,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = 'asdf';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_value_invalid']);
});
@ -324,7 +384,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '1023KB';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['mml_value_invalid']);
});
@ -337,7 +402,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '1024KB';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['half_estimated_mml_greater_than_mml']);
});
@ -350,7 +420,12 @@ describe('ML - validateModelMemoryLimit', () => {
// @ts-expect-error
job.analysis_limits.model_memory_limit = '6MB';
return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => {
return validateModelMemoryLimit(
getMockMlClusterClient(),
getMockMlClient(),
job,
duration
).then((messages) => {
const ids = messages.map((m) => m.id);
expect(ids).toEqual(['half_estimated_mml_greater_than_mml']);
});
@ -364,7 +439,8 @@ describe('ML - validateModelMemoryLimit', () => {
job.analysis_limits.model_memory_limit = '20MB';
return validateModelMemoryLimit(
getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '20mb' } }),
getMockMlClusterClient(),
getMockMlClient({ estimateModelMemory: { model_memory_estimate: '20mb' } }),
job,
duration
).then((messages) => {

View file

@ -11,16 +11,17 @@ import { validateJobObject } from './validate_job_object';
import { calculateModelMemoryLimitProvider } from '../calculate_model_memory_limit';
import { ALLOWED_DATA_UNITS } from '../../../common/constants/validation';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
import type { MlClient } from '../../lib/ml_client';
// The minimum value the backend expects is 1MByte
const MODEL_MEMORY_LIMIT_MINIMUM_BYTES = 1048576;
export async function validateModelMemoryLimit(
client: IScopedClusterClient,
mlClient: MlClient,
job: CombinedJob,
duration?: { start?: number; end?: number }
) {
const { asInternalUser } = client;
validateJobObject(job);
// retrieve the model memory limit specified by the user in the job config.
@ -52,12 +53,12 @@ export async function validateModelMemoryLimit(
// retrieve the max_model_memory_limit value from the server
// this will be unset unless the user has set this on their cluster
const { body } = await asInternalUser.ml.info<MlInfoResponse>();
const { body } = await mlClient.info<MlInfoResponse>();
const maxModelMemoryLimit = body.limits.max_model_memory_limit?.toUpperCase();
const effectiveMaxModelMemoryLimit = body.limits.effective_max_model_memory_limit?.toUpperCase();
if (runCalcModelMemoryTest) {
const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(client)(
const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(client, mlClient)(
job.analysis_config,
job.datafeed_config.indices.join(','),
job.datafeed_config.query,

View file

@ -5,13 +5,12 @@
*/
import Boom from '@hapi/boom';
import { IScopedClusterClient } from 'kibana/server';
import { PARTITION_FIELDS } from '../../../common/constants/anomalies';
import { PartitionFieldsType } from '../../../common/types/anomalies';
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
import { CriteriaField } from './results_service';
import { FieldConfig, FieldsConfig } from '../../routes/schemas/results_service_schema';
import { Job } from '../../../common/types/anomaly_detection_jobs';
import type { MlClient } from '../../lib/ml_client';
type SearchTerm =
| {
@ -128,7 +127,7 @@ function getFieldObject(fieldType: PartitionFieldsType, aggs: any) {
: {};
}
export const getPartitionFieldsValuesFactory = ({ asInternalUser }: IScopedClusterClient) =>
export const getPartitionFieldsValuesFactory = (mlClient: MlClient) =>
/**
* Gets the record of partition fields with possible values that fit the provided queries.
* @param jobId - Job ID
@ -146,7 +145,7 @@ export const getPartitionFieldsValuesFactory = ({ asInternalUser }: IScopedClust
latestMs: number,
fieldsConfig: FieldsConfig = {}
) {
const { body: jobsResponse } = await asInternalUser.ml.getJobs({ job_id: jobId });
const { body: jobsResponse } = await mlClient.getJobs({ job_id: jobId });
if (jobsResponse.count === 0 || jobsResponse.jobs === undefined) {
throw Boom.notFound(`Job with the id "${jobId}" not found`);
}
@ -220,11 +219,13 @@ export const getPartitionFieldsValuesFactory = ({ asInternalUser }: IScopedClust
},
};
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: requestBody,
});
const { body } = await mlClient.anomalySearch(
{
size: 0,
body: requestBody,
},
[jobId]
);
return PARTITION_FIELDS.reduce((acc, key) => {
return {

View file

@ -6,11 +6,8 @@
import { sortBy, slice, get } from 'lodash';
import moment from 'moment';
import { SearchResponse } from 'elasticsearch';
import { IScopedClusterClient } from 'kibana/server';
import Boom from '@hapi/boom';
import { buildAnomalyTableItems } from './build_anomaly_table_items';
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
import { ANOMALIES_TABLE_DEFAULT_QUERY_SIZE } from '../../../common/constants/search';
import { getPartitionFieldsValuesFactory } from './get_partition_fields_values';
import {
@ -21,6 +18,7 @@ import {
import { JOB_ID, PARTITION_FIELD_VALUE } from '../../../common/constants/anomalies';
import { GetStoppedPartitionResult } from '../../../common/types/results';
import { MlJobsResponse } from '../job_service/jobs';
import type { MlClient } from '../../lib/ml_client';
// Service for carrying out Elasticsearch queries to obtain data for the
// ML Results dashboards.
@ -38,8 +36,7 @@ interface Influencer {
fieldValue: any;
}
export function resultsServiceProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
export function resultsServiceProvider(mlClient: MlClient) {
// Obtains data for the anomalies table, aggregating anomalies by day or hour as requested.
// Return an Object with properties 'anomalies' and 'interval' (interval used to aggregate anomalies,
// one of day, hour or second. Note 'auto' can be provided as the aggregationInterval in the request,
@ -142,30 +139,32 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
});
}
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: maxRecords,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
const { body } = await mlClient.anomalySearch(
{
size: maxRecords,
body: {
query: {
bool: {
filter: [
{
query_string: {
query: 'result_type:record',
analyze_wildcard: false,
},
},
},
{
bool: {
must: boolCriteria,
{
bool: {
must: boolCriteria,
},
},
},
],
],
},
},
sort: [{ record_score: { order: 'desc' } }],
},
sort: [{ record_score: { order: 'desc' } }],
},
});
[]
);
const tableData: {
anomalies: AnomaliesTableRecord[];
@ -266,7 +265,6 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
const query = {
size: 0,
index: ML_RESULTS_INDEX_PATTERN,
body: {
query: {
bool: {
@ -295,7 +293,7 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
},
};
const { body } = await asInternalUser.search(query);
const { body } = await mlClient.anomalySearch(query, []);
const maxScore = get(body, ['aggregations', 'max_score', 'value'], null);
return { maxScore };
@ -333,32 +331,34 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
// Size of job terms agg, consistent with maximum number of jobs supported by Java endpoints.
const maxJobs = 10000;
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
query: {
bool: {
filter,
},
},
aggs: {
byJobId: {
terms: {
field: 'job_id',
size: maxJobs,
const { body } = await mlClient.anomalySearch(
{
size: 0,
body: {
query: {
bool: {
filter,
},
aggs: {
maxTimestamp: {
max: {
field: 'timestamp',
},
aggs: {
byJobId: {
terms: {
field: 'job_id',
size: maxJobs,
},
aggs: {
maxTimestamp: {
max: {
field: 'timestamp',
},
},
},
},
},
},
},
});
[]
);
const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = get(
body,
@ -377,17 +377,19 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
// from the given index and job ID.
// Returned response consists of a list of examples against category ID.
async function getCategoryExamples(jobId: string, categoryIds: any, maxExamples: number) {
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: ANOMALIES_TABLE_DEFAULT_QUERY_SIZE, // Matches size of records in anomaly summary table.
body: {
query: {
bool: {
filter: [{ term: { job_id: jobId } }, { terms: { category_id: categoryIds } }],
const { body } = await mlClient.anomalySearch(
{
size: ANOMALIES_TABLE_DEFAULT_QUERY_SIZE, // Matches size of records in anomaly summary table.
body: {
query: {
bool: {
filter: [{ term: { job_id: jobId } }, { terms: { category_id: categoryIds } }],
},
},
},
},
});
[]
);
const examplesByCategoryId: { [key: string]: any } = {};
if (body.hits.total.value > 0) {
@ -411,17 +413,19 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
// Returned response contains four properties - categoryId, regex, examples
// and terms (space delimited String of the common tokens matched in values of the category).
async function getCategoryDefinition(jobId: string, categoryId: string) {
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: 1,
body: {
query: {
bool: {
filter: [{ term: { job_id: jobId } }, { term: { category_id: categoryId } }],
const { body } = await mlClient.anomalySearch<any>(
{
size: 1,
body: {
query: {
bool: {
filter: [{ term: { job_id: jobId } }, { term: { category_id: categoryId } }],
},
},
},
},
});
[]
);
const definition = { categoryId, terms: null, regex: null, examples: [] };
if (body.hits.total.value > 0) {
@ -451,23 +455,25 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
},
});
}
const { body } = await asInternalUser.search<SearchResponse<AnomalyCategorizerStatsDoc>>({
index: ML_RESULTS_INDEX_PATTERN,
body: {
query: {
bool: {
must: mustMatchClauses,
filter: [
{
term: {
job_id: jobId,
const { body } = await mlClient.anomalySearch<AnomalyCategorizerStatsDoc>(
{
body: {
query: {
bool: {
must: mustMatchClauses,
filter: [
{
term: {
job_id: jobId,
},
},
},
],
],
},
},
},
},
});
[]
);
return body ? body.hits.hits.map((r) => r._source) : [];
}
@ -480,7 +486,7 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
};
// first determine from job config if stop_on_warn is true
// if false return []
const { body } = await asInternalUser.ml.getJobs<MlJobsResponse>({
const { body } = await mlClient.getJobs<MlJobsResponse>({
job_id: jobIds.join(),
});
@ -538,25 +544,27 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
},
},
];
const { body: results } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
query: {
bool: {
must: mustMatchClauses,
filter: [
{
terms: {
job_id: jobIdsWithStopOnWarnSet,
const { body: results } = await mlClient.anomalySearch<any>(
{
size: 0,
body: {
query: {
bool: {
must: mustMatchClauses,
filter: [
{
terms: {
job_id: jobIdsWithStopOnWarnSet,
},
},
},
],
],
},
},
aggs,
},
aggs,
},
});
[]
);
if (fieldToBucket === JOB_ID) {
finalResults = {
jobs: results.aggregations?.unique_terms?.buckets.map(
@ -589,7 +597,7 @@ export function resultsServiceProvider(client: IScopedClusterClient) {
getCategoryExamples,
getLatestBucketTimestampByJob,
getMaxAnomalyScore,
getPartitionFieldsValues: getPartitionFieldsValuesFactory(client),
getPartitionFieldsValues: getPartitionFieldsValuesFactory(mlClient),
getCategorizerStats,
getCategoryStoppedPartitions,
};

View file

@ -14,6 +14,8 @@ import {
PluginInitializerContext,
CapabilitiesStart,
IClusterClient,
SavedObjectsServiceStart,
SavedObjectsClientContract,
} from 'kibana/server';
import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/server';
import { PluginsSetup, RouteInitialization } from './types';
@ -37,17 +39,19 @@ import { indicesRoutes } from './routes/indices';
import { jobAuditMessagesRoutes } from './routes/job_audit_messages';
import { jobRoutes } from './routes/anomaly_detectors';
import { jobServiceRoutes } from './routes/job_service';
import { savedObjectsRoutes } from './routes/saved_objects';
import { jobValidationRoutes } from './routes/job_validation';
import { notificationRoutes } from './routes/notification_settings';
import { resultsServiceRoutes } from './routes/results_service';
import { systemRoutes } from './routes/system';
import { MlLicense } from '../common/license';
import { MlServerLicense } from './lib/license';
import { createSharedServices, SharedServices } from './shared_services';
import { getPluginPrivileges } from '../common/types/capabilities';
import { setupCapabilitiesSwitcher } from './lib/capabilities';
import { registerKibanaSettings } from './lib/register_settings';
import { trainedModelsRoutes } from './routes/trained_models';
import { setupSavedObjects } from './saved_objects';
import { RouteGuard } from './lib/route_guard';
export type MlPluginSetup = SharedServices;
export type MlPluginStart = void;
@ -55,14 +59,15 @@ export type MlPluginStart = void;
export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, PluginsSetup> {
private log: Logger;
private version: string;
private mlLicense: MlServerLicense;
private mlLicense: MlLicense;
private capabilities: CapabilitiesStart | null = null;
private clusterClient: IClusterClient | null = null;
private savedObjectsStart: SavedObjectsServiceStart | null = null;
constructor(ctx: PluginInitializerContext) {
this.log = ctx.logger.get();
this.version = ctx.env.packageInfo.branch;
this.mlLicense = new MlServerLicense();
this.mlLicense = new MlLicense();
}
public setup(coreSetup: CoreSetup, plugins: PluginsSetup): MlPluginSetup {
@ -113,9 +118,20 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
// initialize capabilities switcher to add license filter to ml capabilities
setupCapabilitiesSwitcher(coreSetup, plugins.licensing.license$, this.log);
setupSavedObjects(coreSetup.savedObjects);
const getMlSavedObjectsClient = (request: KibanaRequest): SavedObjectsClientContract | null => {
if (this.savedObjectsStart === null) {
return null;
}
return this.savedObjectsStart.getScopedClient(request, {
includedHiddenTypes: ['ml-job'],
});
};
const routeInit: RouteInitialization = {
router: coreSetup.http.createRouter(),
routeGuard: new RouteGuard(this.mlLicense, getMlSavedObjectsClient),
mlLicense: this.mlLicense,
};
@ -143,16 +159,17 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
notificationRoutes(routeInit);
resultsServiceRoutes(routeInit);
jobValidationRoutes(routeInit, this.version);
savedObjectsRoutes(routeInit);
systemRoutes(routeInit, {
spaces: plugins.spaces,
cloud: plugins.cloud,
resolveMlCapabilities,
});
trainedModelsRoutes(routeInit);
initMlServerLog({ log: this.log });
initMlTelemetry(coreSetup, plugins.usageCollection);
trainedModelsRoutes(routeInit);
return {
...createSharedServices(
this.mlLicense,
@ -167,6 +184,7 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
public start(coreStart: CoreStart): MlPluginStart {
this.capabilities = coreStart.capabilities;
this.clusterClient = coreStart.elasticsearch.client;
this.savedObjectsStart = coreStart.savedObjects;
}
public stop() {

View file

@ -33,7 +33,7 @@ function getAnnotationsFeatureUnavailableErrorMessage() {
* Routes for annotations
*/
export function annotationRoutes(
{ router, mlLicense }: RouteInitialization,
{ router, routeGuard }: RouteInitialization,
securityPlugin?: SecurityPluginSetup
) {
/**
@ -58,7 +58,7 @@ export function annotationRoutes(
tags: ['access:ml:canGetAnnotations'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getAnnotations } = annotationServiceProvider(client);
const resp = await getAnnotations(request.body);
@ -91,7 +91,7 @@ export function annotationRoutes(
tags: ['access:ml:canCreateAnnotation'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(client);
if (annotationsFeatureAvailable === false) {
@ -134,7 +134,7 @@ export function annotationRoutes(
tags: ['access:ml:canDeleteAnnotation'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(client);
if (annotationsFeatureAvailable === false) {

View file

@ -22,10 +22,12 @@ import {
updateModelSnapshotSchema,
} from './schemas/anomaly_detectors_schema';
import { Job, JobStats } from '../../common/types/anomaly_detection_jobs';
/**
* Routes for the anomaly detectors
*/
export function jobRoutes({ router, mlLicense }: RouteInitialization) {
export function jobRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup AnomalyDetectors
*
@ -44,9 +46,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ response, client }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.getJobs();
const { body } = await mlClient.getJobs<{ jobs: Job[] }>();
return response.ok({
body,
});
@ -75,10 +77,10 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { jobId } = request.params;
const { body } = await client.asInternalUser.ml.getJobs({ job_id: jobId });
const { body } = await mlClient.getJobs<{ jobs: Job[] }>({ job_id: jobId });
return response.ok({
body,
});
@ -106,9 +108,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.getJobStats();
const { body } = await mlClient.getJobStats<{ jobs: JobStats[] }>();
return response.ok({
body,
});
@ -137,10 +139,10 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { jobId } = request.params;
const { body } = await client.asInternalUser.ml.getJobStats({ job_id: jobId });
const { body } = await mlClient.getJobStats({ job_id: jobId });
return response.ok({
body,
});
@ -173,13 +175,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { jobId } = request.params;
const { body } = await client.asInternalUser.ml.putJob({
const { body } = await mlClient.putJob({
job_id: jobId,
body: request.body,
});
return response.ok({
body,
});
@ -210,10 +213,10 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canUpdateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { jobId } = request.params;
const { body } = await client.asInternalUser.ml.updateJob({
const { body } = await mlClient.updateJob({
job_id: jobId,
body: request.body,
});
@ -245,10 +248,10 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canOpenJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { jobId } = request.params;
const { body } = await client.asInternalUser.ml.openJob({ job_id: jobId });
const { body } = await mlClient.openJob({ job_id: jobId });
return response.ok({
body,
});
@ -277,7 +280,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCloseJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const options: RequestParams.MlCloseJob = {
job_id: request.params.jobId,
@ -286,7 +289,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
if (force !== undefined) {
options.force = force;
}
const { body } = await client.asInternalUser.ml.closeJob(options);
const { body } = await mlClient.closeJob(options);
return response.ok({
body,
});
@ -315,7 +318,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const options: RequestParams.MlDeleteJob = {
job_id: request.params.jobId,
@ -325,7 +328,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
if (force !== undefined) {
options.force = force;
}
const { body } = await client.asInternalUser.ml.deleteJob(options);
const { body } = await mlClient.deleteJob(options);
return response.ok({
body,
});
@ -352,9 +355,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.validateDetector({ body: request.body });
const { body } = await mlClient.validateDetector({ body: request.body });
return response.ok({
body,
});
@ -385,11 +388,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canForecastJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const jobId = request.params.jobId;
const duration = request.body.duration;
const { body } = await client.asInternalUser.ml.forecast({
const { body } = await mlClient.forecast({
job_id: jobId,
duration,
});
@ -426,9 +429,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.getRecords({
const { body } = await mlClient.getRecords({
job_id: request.params.jobId,
body: request.body,
});
@ -465,9 +468,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.getBuckets({
const { body } = await mlClient.getBuckets({
job_id: request.params.jobId,
timestamp: request.params.timestamp,
body: request.body,
@ -505,9 +508,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.getOverallBuckets({
const { body } = await mlClient.getOverallBuckets({
job_id: request.params.jobId,
top_n: request.body.topN,
bucket_span: request.body.bucketSpan,
@ -542,9 +545,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.getCategories({
const { body } = await mlClient.getCategories({
job_id: request.params.jobId,
category_id: request.params.categoryId,
});
@ -576,9 +579,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.getModelSnapshots({
const { body } = await mlClient.getModelSnapshots({
job_id: request.params.jobId,
});
return response.ok({
@ -609,9 +612,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.getModelSnapshots({
const { body } = await mlClient.getModelSnapshots({
job_id: request.params.jobId,
snapshot_id: request.params.snapshotId,
});
@ -645,9 +648,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.updateModelSnapshot({
const { body } = await mlClient.updateModelSnapshot({
job_id: request.params.jobId,
snapshot_id: request.params.snapshotId,
body: request.body,
@ -680,9 +683,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.deleteModelSnapshot({
const { body } = await mlClient.deleteModelSnapshot({
job_id: request.params.jobId,
snapshot_id: request.params.snapshotId,
});

View file

@ -138,6 +138,14 @@
"FieldsService",
"GetCardinalityOfFields",
"GetTimeFieldRange"
"GetTimeFieldRange",
"JobSavedObjects",
"SavedObjectsStatus",
"RepairJobSavedObjects",
"InitializeJobSavedObjects",
"AssignJobsToSpaces",
"RemoveJobsFromSpaces",
"JobsSpaces"
]
}

View file

@ -4,43 +4,43 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import { calendarSchema, calendarIdSchema, calendarIdsSchema } from './schemas/calendars_schema';
import { CalendarManager, Calendar, FormCalendar } from '../models/calendar';
import type { MlClient } from '../lib/ml_client';
function getAllCalendars(client: IScopedClusterClient) {
const cal = new CalendarManager(client);
function getAllCalendars(mlClient: MlClient) {
const cal = new CalendarManager(mlClient);
return cal.getAllCalendars();
}
function getCalendar(client: IScopedClusterClient, calendarId: string) {
const cal = new CalendarManager(client);
function getCalendar(mlClient: MlClient, calendarId: string) {
const cal = new CalendarManager(mlClient);
return cal.getCalendar(calendarId);
}
function newCalendar(client: IScopedClusterClient, calendar: FormCalendar) {
const cal = new CalendarManager(client);
function newCalendar(mlClient: MlClient, calendar: FormCalendar) {
const cal = new CalendarManager(mlClient);
return cal.newCalendar(calendar);
}
function updateCalendar(client: IScopedClusterClient, calendarId: string, calendar: Calendar) {
const cal = new CalendarManager(client);
function updateCalendar(mlClient: MlClient, calendarId: string, calendar: Calendar) {
const cal = new CalendarManager(mlClient);
return cal.updateCalendar(calendarId, calendar);
}
function deleteCalendar(client: IScopedClusterClient, calendarId: string) {
const cal = new CalendarManager(client);
function deleteCalendar(mlClient: MlClient, calendarId: string) {
const cal = new CalendarManager(mlClient);
return cal.deleteCalendar(calendarId);
}
function getCalendarsByIds(client: IScopedClusterClient, calendarIds: string) {
const cal = new CalendarManager(client);
function getCalendarsByIds(mlClient: MlClient, calendarIds: string) {
const cal = new CalendarManager(mlClient);
return cal.getCalendarsByIds(calendarIds);
}
export function calendars({ router, mlLicense }: RouteInitialization) {
export function calendars({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup Calendars
*
@ -56,9 +56,9 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetCalendars'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const resp = await getAllCalendars(client);
const resp = await getAllCalendars(mlClient);
return response.ok({
body: resp,
@ -88,15 +88,15 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetCalendars'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
let returnValue;
try {
const calendarIds = request.params.calendarIds.split(',');
if (calendarIds.length === 1) {
returnValue = await getCalendar(client, calendarIds[0]);
returnValue = await getCalendar(mlClient, calendarIds[0]);
} else {
returnValue = await getCalendarsByIds(client, calendarIds);
returnValue = await getCalendarsByIds(mlClient, calendarIds);
}
return response.ok({
@ -127,10 +127,10 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateCalendar'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const body = request.body;
const resp = await newCalendar(client, body);
const resp = await newCalendar(mlClient, body);
return response.ok({
body: resp,
@ -162,11 +162,11 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateCalendar'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { calendarId } = request.params;
const body = request.body;
const resp = await updateCalendar(client, calendarId, body);
const resp = await updateCalendar(mlClient, calendarId, body);
return response.ok({
body: resp,
@ -196,10 +196,10 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteCalendar'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { calendarId } = request.params;
const resp = await deleteCalendar(client, calendarId);
const resp = await deleteCalendar(mlClient, calendarId);
return response.ok({
body: resp,

View file

@ -35,7 +35,7 @@ function deleteDestIndexPatternById(context: RequestHandlerContext, indexPattern
/**
* Routes for the data frame analytics
*/
export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitialization) {
export function dataFrameAnalyticsRoutes({ router, mlLicense, routeGuard }: RouteInitialization) {
async function userCanDeleteIndex(
client: IScopedClusterClient,
destinationIndex: string
@ -76,9 +76,9 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({ size: 1000 });
const { body } = await mlClient.getDataFrameAnalytics({ size: 1000 });
return response.ok({
body,
});
@ -107,10 +107,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { analyticsId } = request.params;
const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({
const { body } = await mlClient.getDataFrameAnalytics({
id: analyticsId,
});
return response.ok({
@ -137,9 +137,9 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.getDataFrameAnalyticsStats({ size: 1000 });
const { body } = await mlClient.getDataFrameAnalyticsStats({ size: 1000 });
return response.ok({
body,
});
@ -168,10 +168,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { analyticsId } = request.params;
const { body } = await client.asInternalUser.ml.getDataFrameAnalyticsStats({
const { body } = await mlClient.getDataFrameAnalyticsStats({
id: analyticsId,
});
return response.ok({
@ -205,10 +205,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canCreateDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { analyticsId } = request.params;
const { body } = await client.asInternalUser.ml.putDataFrameAnalytics(
const { body } = await mlClient.putDataFrameAnalytics(
{
id: analyticsId,
body: request.body,
@ -243,9 +243,9 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.evaluateDataFrame(
const { body } = await mlClient.evaluateDataFrame(
{
body: request.body,
},
@ -280,9 +280,9 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canCreateDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.explainDataFrameAnalytics(
const { body } = await mlClient.explainDataFrameAnalytics(
{
body: request.body,
},
@ -317,7 +317,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canDeleteDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, client, request, response, context }) => {
try {
const { analyticsId } = request.params;
const { deleteDestIndex, deleteDestIndexPattern } = request.query;
@ -330,7 +330,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
try {
// Check if analyticsId is valid and get destination index
const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({
const { body } = await mlClient.getDataFrameAnalytics({
id: analyticsId,
});
if (Array.isArray(body.data_frame_analytics) && body.data_frame_analytics.length > 0) {
@ -378,7 +378,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
// Delete the data frame analytics
try {
await client.asInternalUser.ml.deleteDataFrameAnalytics({
await mlClient.deleteDataFrameAnalytics({
id: analyticsId,
});
analyticsJobDeleted.success = true;
@ -418,10 +418,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canStartStopDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { analyticsId } = request.params;
const { body } = await client.asInternalUser.ml.startDataFrameAnalytics({
const { body } = await mlClient.startDataFrameAnalytics({
id: analyticsId,
});
return response.ok({
@ -454,12 +454,13 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canStartStopDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { body } = await client.asInternalUser.ml.stopDataFrameAnalytics({
const { body } = await mlClient.stopDataFrameAnalytics({
id: request.params.analyticsId,
force: request.query.force,
});
return response.ok({
body,
});
@ -489,10 +490,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canCreateDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { analyticsId } = request.params;
const { body } = await client.asInternalUser.ml.updateDataFrameAnalytics(
const { body } = await mlClient.updateDataFrameAnalytics(
{
id: analyticsId,
body: request.body,
@ -527,7 +528,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider(client);
@ -561,10 +562,13 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, client, request, response }) => {
try {
const { analyticsId } = request.params;
const { getRegressionAnalyticsBaseline } = analyticsFeatureImportanceProvider(client);
const { getRegressionAnalyticsBaseline } = analyticsFeatureImportanceProvider(
client,
mlClient
);
const baseline = await getRegressionAnalyticsBaseline(analyticsId);
return response.ok({

View file

@ -80,7 +80,7 @@ function getHistogramsForFields(
/**
* Routes for the index data visualizer.
*/
export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) {
export function dataVisualizerRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup DataVisualizer
*
@ -104,7 +104,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
params: { indexPatternTitle },
@ -151,7 +151,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
params: { indexPatternTitle },
@ -216,7 +216,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
params: { indexPatternTitle },

View file

@ -15,10 +15,12 @@ import {
} from './schemas/datafeeds_schema';
import { getAuthorizationHeader } from '../lib/request_authorization';
import { Datafeed, DatafeedStats } from '../../common/types/anomaly_detection_jobs';
/**
* Routes for datafeed service
*/
export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
export function dataFeedRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup DatafeedService
*
@ -34,10 +36,9 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.getDatafeeds();
const { body } = await mlClient.getDatafeeds<{ datafeeds: Datafeed[] }>();
return response.ok({
body,
});
@ -66,10 +67,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { body } = await client.asInternalUser.ml.getDatafeeds({ datafeed_id: datafeedId });
const { body } = await mlClient.getDatafeeds({ datafeed_id: datafeedId });
return response.ok({
body,
@ -95,10 +96,11 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.getDatafeedStats();
const { body } = await mlClient.getDatafeedStats<{
datafeeds: DatafeedStats[];
}>();
return response.ok({
body,
});
@ -127,10 +129,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { body } = await client.asInternalUser.ml.getDatafeedStats({
const { body } = await mlClient.getDatafeedStats({
datafeed_id: datafeedId,
});
@ -164,10 +166,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { body } = await client.asInternalUser.ml.putDatafeed(
const { body } = await mlClient.putDatafeed(
{
datafeed_id: datafeedId,
body: request.body,
@ -205,10 +207,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canUpdateDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { body } = await client.asInternalUser.ml.updateDatafeed(
const { body } = await mlClient.updateDatafeed(
{
datafeed_id: datafeedId,
body: request.body,
@ -246,17 +248,17 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const options: RequestParams.MlDeleteDatafeed = {
datafeed_id: request.params.jobId,
datafeed_id: request.params.datafeedId,
};
const force = request.query.force;
if (force !== undefined) {
options.force = force;
}
const { body } = await client.asInternalUser.ml.deleteDatafeed(options);
const { body } = await mlClient.deleteDatafeed(options);
return response.ok({
body,
@ -288,12 +290,12 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { start, end } = request.body;
const { body } = await client.asInternalUser.ml.startDatafeed({
const { body } = await mlClient.startDatafeed({
datafeed_id: datafeedId,
start,
end,
@ -327,11 +329,11 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { body } = await client.asInternalUser.ml.stopDatafeed({
const { body } = await mlClient.stopDatafeed({
datafeed_id: datafeedId,
});
@ -363,10 +365,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canPreviewDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { body } = await client.asInternalUser.ml.previewDatafeed(
const { body } = await mlClient.previewDatafeed(
{
datafeed_id: datafeedId,
},

View file

@ -28,7 +28,7 @@ function getTimeFieldRange(client: IScopedClusterClient, payload: any) {
/**
* Routes for fields service
*/
export function fieldsService({ router, mlLicense }: RouteInitialization) {
export function fieldsService({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup FieldsService
*
@ -50,7 +50,7 @@ export function fieldsService({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getCardinalityOfFields(client, request.body);
@ -85,7 +85,7 @@ export function fieldsService({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getTimeFieldRange(client, request.body);

View file

@ -27,9 +27,10 @@ import {
importFileBodySchema,
importFileQuerySchema,
} from './schemas/file_data_visualizer_schema';
import type { MlClient } from '../lib/ml_client';
function analyzeFiles(client: IScopedClusterClient, data: InputData, overrides: InputOverrides) {
const { analyzeFile } = fileDataVisualizerProvider(client);
function analyzeFiles(mlClient: MlClient, data: InputData, overrides: InputOverrides) {
const { analyzeFile } = fileDataVisualizerProvider(mlClient);
return analyzeFile(data, overrides);
}
@ -49,7 +50,7 @@ function importData(
/**
* Routes for the file data visualizer.
*/
export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitialization) {
export function fileDataVisualizerRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup FileDataVisualizer
*
@ -74,9 +75,9 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canFindFileStructure'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const result = await analyzeFiles(client, request.body, request.query);
const result = await analyzeFiles(mlClient, request.body, request.query);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
@ -109,7 +110,7 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canFindFileStructure'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { id } = request.query;
const { index, data, settings, mappings, ingestPipeline } = request.body;

View file

@ -4,45 +4,45 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import { createFilterSchema, filterIdSchema, updateFilterSchema } from './schemas/filters_schema';
import { FilterManager, FormFilter } from '../models/filter';
import type { MlClient } from '../lib/ml_client';
// TODO - add function for returning a list of just the filter IDs.
// TODO - add function for returning a list of filter IDs plus item count.
function getAllFilters(client: IScopedClusterClient) {
const mgr = new FilterManager(client);
function getAllFilters(mlClient: MlClient) {
const mgr = new FilterManager(mlClient);
return mgr.getAllFilters();
}
function getAllFilterStats(client: IScopedClusterClient) {
const mgr = new FilterManager(client);
function getAllFilterStats(mlClient: MlClient) {
const mgr = new FilterManager(mlClient);
return mgr.getAllFilterStats();
}
function getFilter(client: IScopedClusterClient, filterId: string) {
const mgr = new FilterManager(client);
function getFilter(mlClient: MlClient, filterId: string) {
const mgr = new FilterManager(mlClient);
return mgr.getFilter(filterId);
}
function newFilter(client: IScopedClusterClient, filter: FormFilter) {
const mgr = new FilterManager(client);
function newFilter(mlClient: MlClient, filter: FormFilter) {
const mgr = new FilterManager(mlClient);
return mgr.newFilter(filter);
}
function updateFilter(client: IScopedClusterClient, filterId: string, filter: FormFilter) {
const mgr = new FilterManager(client);
function updateFilter(mlClient: MlClient, filterId: string, filter: FormFilter) {
const mgr = new FilterManager(mlClient);
return mgr.updateFilter(filterId, filter);
}
function deleteFilter(client: IScopedClusterClient, filterId: string) {
const mgr = new FilterManager(client);
function deleteFilter(mlClient: MlClient, filterId: string) {
const mgr = new FilterManager(mlClient);
return mgr.deleteFilter(filterId);
}
export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
export function filtersRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup Filters
*
@ -61,9 +61,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetFilters'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const resp = await getAllFilters(client);
const resp = await getAllFilters(mlClient);
return response.ok({
body: resp,
@ -96,9 +96,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetFilters'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getFilter(client, request.params.filterId);
const resp = await getFilter(mlClient, request.params.filterId);
return response.ok({
body: resp,
});
@ -130,10 +130,10 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateFilter'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const body = request.body;
const resp = await newFilter(client, body);
const resp = await newFilter(mlClient, body);
return response.ok({
body: resp,
@ -168,11 +168,11 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateFilter'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { filterId } = request.params;
const body = request.body;
const resp = await updateFilter(client, filterId, body);
const resp = await updateFilter(mlClient, filterId, body);
return response.ok({
body: resp,
@ -202,10 +202,10 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteFilter'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { filterId } = request.params;
const resp = await deleteFilter(client, filterId);
const resp = await deleteFilter(mlClient, filterId);
return response.ok({
body: resp,
@ -235,9 +235,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetFilters'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const resp = await getAllFilterStats(client);
const resp = await getAllFilterStats(mlClient);
return response.ok({
body: resp,

View file

@ -11,7 +11,7 @@ import { indicesSchema } from './schemas/indices_schema';
/**
* Indices routes.
*/
export function indicesRoutes({ router, mlLicense }: RouteInitialization) {
export function indicesRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup Indices
*
@ -31,7 +31,7 @@ export function indicesRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
body: { index, fields: requestFields },

View file

@ -15,7 +15,7 @@ import {
/**
* Routes for job audit message routes
*/
export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitialization) {
export function jobAuditMessagesRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup JobAuditMessages
*
@ -37,20 +37,22 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getJobAuditMessages } = jobAuditMessagesProvider(client);
const { jobId } = request.params;
const { from } = request.query;
const resp = await getJobAuditMessages(jobId, from);
routeGuard.fullLicenseAPIGuard(
async ({ client, mlClient, request, response, jobSavedObjectService }) => {
try {
const { getJobAuditMessages } = jobAuditMessagesProvider(client, mlClient);
const { jobId } = request.params;
const { from } = request.query;
const resp = await getJobAuditMessages(jobSavedObjectService, jobId, from);
return response.ok({
body: resp,
});
} catch (e) {
return response.customError(wrapError(e));
return response.ok({
body: resp,
});
} catch (e) {
return response.customError(wrapError(e));
}
}
})
)
);
/**
@ -72,18 +74,20 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getJobAuditMessages } = jobAuditMessagesProvider(client);
const { from } = request.query;
const resp = await getJobAuditMessages(undefined, from);
routeGuard.fullLicenseAPIGuard(
async ({ client, mlClient, request, response, jobSavedObjectService }) => {
try {
const { getJobAuditMessages } = jobAuditMessagesProvider(client, mlClient);
const { from } = request.query;
const resp = await getJobAuditMessages(jobSavedObjectService, undefined, from);
return response.ok({
body: resp,
});
} catch (e) {
return response.customError(wrapError(e));
return response.ok({
body: resp,
});
} catch (e) {
return response.customError(wrapError(e));
}
}
})
)
);
}

View file

@ -28,7 +28,7 @@ import { categorizationExamplesProvider } from '../models/job_service/new_job';
/**
* Routes for job service
*/
export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
export function jobServiceRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup JobService
*
@ -48,9 +48,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { forceStartDatafeeds } = jobServiceProvider(client);
const { forceStartDatafeeds } = jobServiceProvider(client, mlClient);
const { datafeedIds, start, end } = request.body;
const resp = await forceStartDatafeeds(datafeedIds, start, end);
@ -82,9 +82,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { stopDatafeeds } = jobServiceProvider(client);
const { stopDatafeeds } = jobServiceProvider(client, mlClient);
const { datafeedIds } = request.body;
const resp = await stopDatafeeds(datafeedIds);
@ -116,9 +116,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { deleteJobs } = jobServiceProvider(client);
const { deleteJobs } = jobServiceProvider(client, mlClient);
const { jobIds } = request.body;
const resp = await deleteJobs(jobIds);
@ -150,9 +150,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCloseJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { closeJobs } = jobServiceProvider(client);
const { closeJobs } = jobServiceProvider(client, mlClient);
const { jobIds } = request.body;
const resp = await closeJobs(jobIds);
@ -184,9 +184,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCloseJob', 'access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { forceStopAndCloseJob } = jobServiceProvider(client);
const { forceStopAndCloseJob } = jobServiceProvider(client, mlClient);
const { jobId } = request.body;
const resp = await forceStopAndCloseJob(jobId);
@ -223,9 +223,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { jobsSummary } = jobServiceProvider(client);
const { jobsSummary } = jobServiceProvider(client, mlClient);
const { jobIds } = request.body;
const resp = await jobsSummary(jobIds);
@ -257,9 +257,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, response }) => {
try {
const { jobsWithTimerange } = jobServiceProvider(client);
const { jobsWithTimerange } = jobServiceProvider(client, mlClient);
const resp = await jobsWithTimerange();
return response.ok({
@ -290,9 +290,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { createFullJobsList } = jobServiceProvider(client);
const { createFullJobsList } = jobServiceProvider(client, mlClient);
const { jobIds } = request.body;
const resp = await createFullJobsList(jobIds);
@ -320,9 +320,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, response }) => {
try {
const { getAllGroups } = jobServiceProvider(client);
const { getAllGroups } = jobServiceProvider(client, mlClient);
const resp = await getAllGroups();
return response.ok({
@ -353,9 +353,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canUpdateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { updateGroups } = jobServiceProvider(client);
const { updateGroups } = jobServiceProvider(client, mlClient);
const { jobs } = request.body;
const resp = await updateGroups(jobs);
@ -383,9 +383,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, response }) => {
try {
const { deletingJobTasks } = jobServiceProvider(client);
const { deletingJobTasks } = jobServiceProvider(client, mlClient);
const resp = await deletingJobTasks();
return response.ok({
@ -416,9 +416,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { jobsExist } = jobServiceProvider(client);
const { jobsExist } = jobServiceProvider(client, mlClient);
const { jobIds } = request.body;
const resp = await jobsExist(jobIds);
@ -449,12 +449,12 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response, context }) => {
try {
const { indexPattern } = request.params;
const isRollup = request.query.rollup === 'true';
const savedObjectsClient = context.core.savedObjects.client;
const { newJobCaps } = jobServiceProvider(client);
const { newJobCaps } = jobServiceProvider(client, mlClient);
const resp = await newJobCaps(indexPattern, isRollup, savedObjectsClient);
return response.ok({
@ -485,7 +485,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const {
indexPatternTitle,
@ -499,7 +499,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
splitFieldValue,
} = request.body;
const { newJobLineChart } = jobServiceProvider(client);
const { newJobLineChart } = jobServiceProvider(client, mlClient);
const resp = await newJobLineChart(
indexPatternTitle,
timeField,
@ -540,7 +540,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const {
indexPatternTitle,
@ -553,7 +553,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
splitFieldName,
} = request.body;
const { newJobPopulationChart } = jobServiceProvider(client);
const { newJobPopulationChart } = jobServiceProvider(client, mlClient);
const resp = await newJobPopulationChart(
indexPatternTitle,
timeField,
@ -589,9 +589,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, response }) => {
try {
const { getAllJobAndGroupIds } = jobServiceProvider(client);
const { getAllJobAndGroupIds } = jobServiceProvider(client, mlClient);
const resp = await getAllJobAndGroupIds();
return response.ok({
@ -622,9 +622,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { getLookBackProgress } = jobServiceProvider(client);
const { getLookBackProgress } = jobServiceProvider(client, mlClient);
const { jobId, start, end } = request.body;
const resp = await getLookBackProgress(jobId, start, end);
@ -656,7 +656,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { validateCategoryExamples } = categorizationExamplesProvider(client);
const {
@ -709,9 +709,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { topCategories } = jobServiceProvider(client);
const { topCategories } = jobServiceProvider(client, mlClient);
const { jobId, count } = request.body;
const resp = await topCategories(jobId, count);
@ -743,9 +743,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob', 'access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { revertModelSnapshot } = jobServiceProvider(client);
const { revertModelSnapshot } = jobServiceProvider(client, mlClient);
const {
jobId,
snapshotId,

View file

@ -19,20 +19,25 @@ import {
import { estimateBucketSpanFactory } from '../models/bucket_span_estimator';
import { calculateModelMemoryLimitProvider } from '../models/calculate_model_memory_limit';
import { validateJob, validateCardinality } from '../models/job_validation';
import type { MlClient } from '../lib/ml_client';
type CalculateModelMemoryLimitPayload = TypeOf<typeof modelMemoryLimitSchema>;
/**
* Routes for job validation
*/
export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, version: string) {
export function jobValidationRoutes(
{ router, mlLicense, routeGuard }: RouteInitialization,
version: string
) {
function calculateModelMemoryLimit(
client: IScopedClusterClient,
mlClient: MlClient,
payload: CalculateModelMemoryLimitPayload
) {
const { analysisConfig, indexPattern, query, timeFieldName, earliestMs, latestMs } = payload;
return calculateModelMemoryLimitProvider(client)(
return calculateModelMemoryLimitProvider(client, mlClient)(
analysisConfig as AnalysisConfig,
indexPattern,
query,
@ -61,7 +66,7 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
let errorResp;
const resp = await estimateBucketSpanFactory(client)(request.body)
@ -109,9 +114,9 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const resp = await calculateModelMemoryLimit(client, request.body);
const resp = await calculateModelMemoryLimit(client, mlClient, request.body);
return response.ok({
body: resp,
@ -141,7 +146,7 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await validateCardinality(client, request.body);
@ -173,11 +178,12 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
// version corresponds to the version used in documentation links.
const resp = await validateJob(
client,
mlClient,
request.body,
version,
mlLicense.isSecurityEnabled() === false

View file

@ -17,24 +17,27 @@ import {
setupModuleBodySchema,
} from './schemas/modules';
import { RouteInitialization } from '../types';
import type { MlClient } from '../lib/ml_client';
function recognize(
client: IScopedClusterClient,
mlClient: MlClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
indexPatternTitle: string
) {
const dr = new DataRecognizer(client, savedObjectsClient, request);
const dr = new DataRecognizer(client, mlClient, savedObjectsClient, request);
return dr.findMatches(indexPatternTitle);
}
function getModule(
client: IScopedClusterClient,
mlClient: MlClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
moduleId: string
) {
const dr = new DataRecognizer(client, savedObjectsClient, request);
const dr = new DataRecognizer(client, mlClient, savedObjectsClient, request);
if (moduleId === undefined) {
return dr.listModules();
} else {
@ -44,6 +47,7 @@ function getModule(
function setup(
client: IScopedClusterClient,
mlClient: MlClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
moduleId: string,
@ -59,7 +63,7 @@ function setup(
datafeedOverrides?: DatafeedOverride | DatafeedOverride[],
estimateModelMemory?: boolean
) {
const dr = new DataRecognizer(client, savedObjectsClient, request);
const dr = new DataRecognizer(client, mlClient, savedObjectsClient, request);
return dr.setup(
moduleId,
prefix,
@ -78,18 +82,19 @@ function setup(
function dataRecognizerJobsExist(
client: IScopedClusterClient,
mlClient: MlClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
moduleId: string
) {
const dr = new DataRecognizer(client, savedObjectsClient, request);
const dr = new DataRecognizer(client, mlClient, savedObjectsClient, request);
return dr.dataRecognizerJobsExist(moduleId);
}
/**
* Recognizer routes.
*/
export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
export function dataRecognizer({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup Modules
*
@ -127,11 +132,12 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response, context }) => {
try {
const { indexPatternTitle } = request.params;
const results = await recognize(
client,
mlClient,
context.core.savedObjects.client,
request,
indexPatternTitle
@ -262,7 +268,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response, context }) => {
try {
let { moduleId } = request.params;
if (moduleId === '') {
@ -272,6 +278,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
}
const results = await getModule(
client,
mlClient,
context.core.savedObjects.client,
request,
moduleId
@ -435,7 +442,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response, context }) => {
try {
const { moduleId } = request.params;
@ -455,6 +462,8 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
const result = await setup(
client,
mlClient,
context.core.savedObjects.client,
request,
moduleId,
@ -540,11 +549,13 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response, context }) => {
try {
const { moduleId } = request.params;
const result = await dataRecognizerJobsExist(
client,
mlClient,
context.core.savedObjects.client,
request,
moduleId

View file

@ -10,7 +10,7 @@ import { RouteInitialization } from '../types';
/**
* Routes for notification settings
*/
export function notificationRoutes({ router, mlLicense }: RouteInitialization) {
export function notificationRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup NotificationSettings
*
@ -26,7 +26,7 @@ export function notificationRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const { body } = await client.asCurrentUser.cluster.getSettings({
include_defaults: true,

View file

@ -4,8 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { IScopedClusterClient } from 'kibana/server';
import { schema } from '@kbn/config-schema';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import {
@ -14,17 +12,18 @@ import {
categoryExamplesSchema,
maxAnomalyScoreSchema,
partitionFieldValuesSchema,
anomalySearchSchema,
} from './schemas/results_service_schema';
import { resultsServiceProvider } from '../models/results_service';
import { ML_RESULTS_INDEX_PATTERN } from '../../common/constants/index_patterns';
import { jobIdSchema } from './schemas/anomaly_detectors_schema';
import {
getCategorizerStatsSchema,
getCategorizerStoppedPartitionsSchema,
} from './schemas/results_service_schema';
import type { MlClient } from '../lib/ml_client';
function getAnomaliesTableData(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
function getAnomaliesTableData(mlClient: MlClient, payload: any) {
const rs = resultsServiceProvider(mlClient);
const {
jobIds,
criteriaFields,
@ -53,25 +52,25 @@ function getAnomaliesTableData(client: IScopedClusterClient, payload: any) {
);
}
function getCategoryDefinition(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
function getCategoryDefinition(mlClient: MlClient, payload: any) {
const rs = resultsServiceProvider(mlClient);
return rs.getCategoryDefinition(payload.jobId, payload.categoryId);
}
function getCategoryExamples(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
function getCategoryExamples(mlClient: MlClient, payload: any) {
const rs = resultsServiceProvider(mlClient);
const { jobId, categoryIds, maxExamples } = payload;
return rs.getCategoryExamples(jobId, categoryIds, maxExamples);
}
function getMaxAnomalyScore(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
function getMaxAnomalyScore(mlClient: MlClient, payload: any) {
const rs = resultsServiceProvider(mlClient);
const { jobIds, earliestMs, latestMs } = payload;
return rs.getMaxAnomalyScore(jobIds, earliestMs, latestMs);
}
function getPartitionFieldsValues(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
function getPartitionFieldsValues(mlClient: MlClient, payload: any) {
const rs = resultsServiceProvider(mlClient);
const { jobId, searchTerm, criteriaFields, earliestMs, latestMs, fieldsConfig } = payload;
return rs.getPartitionFieldsValues(
jobId,
@ -83,23 +82,23 @@ function getPartitionFieldsValues(client: IScopedClusterClient, payload: any) {
);
}
function getCategorizerStats(client: IScopedClusterClient, params: any, query: any) {
function getCategorizerStats(mlClient: MlClient, params: any, query: any) {
const { jobId } = params;
const { partitionByValue } = query;
const rs = resultsServiceProvider(client);
const rs = resultsServiceProvider(mlClient);
return rs.getCategorizerStats(jobId, partitionByValue);
}
function getCategoryStoppedPartitions(client: IScopedClusterClient, payload: any) {
function getCategoryStoppedPartitions(mlClient: MlClient, payload: any) {
const { jobIds, fieldToBucket } = payload;
const rs = resultsServiceProvider(client);
const rs = resultsServiceProvider(mlClient);
return rs.getCategoryStoppedPartitions(jobIds, fieldToBucket);
}
/**
* Routes for results service
*/
export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization) {
export function resultsServiceRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup ResultsService
*
@ -119,9 +118,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getAnomaliesTableData(client, request.body);
const resp = await getAnomaliesTableData(mlClient, request.body);
return response.ok({
body: resp,
@ -151,9 +150,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getCategoryDefinition(client, request.body);
const resp = await getCategoryDefinition(mlClient, request.body);
return response.ok({
body: resp,
@ -183,9 +182,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getMaxAnomalyScore(client, request.body);
const resp = await getMaxAnomalyScore(mlClient, request.body);
return response.ok({
body: resp,
@ -215,9 +214,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getCategoryExamples(client, request.body);
const resp = await getCategoryExamples(mlClient, request.body);
return response.ok({
body: resp,
@ -247,9 +246,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getPartitionFieldsValues(client, request.body);
const resp = await getPartitionFieldsValues(mlClient, request.body);
return response.ok({
body: resp,
@ -270,18 +269,16 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
{
path: '/api/ml/results/anomaly_search',
validate: {
body: schema.maybe(schema.any()),
body: anomalySearchSchema,
},
options: {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
const { body } = await client.asInternalUser.search({
...request.body,
index: ML_RESULTS_INDEX_PATTERN,
});
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { jobIds, query } = request.body;
const { body } = await mlClient.anomalySearch(query, jobIds);
return response.ok({
body,
});
@ -311,9 +308,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getCategorizerStats(client, request.params, request.query);
const resp = await getCategorizerStats(mlClient, request.params, request.query);
return response.ok({
body: resp,
});
@ -341,9 +338,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const resp = await getCategoryStoppedPartitions(client, request.body);
const resp = await getCategoryStoppedPartitions(mlClient, request.body);
return response.ok({
body: resp,
});

View file

@ -0,0 +1,223 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import { checksFactory } from '../saved_objects';
import { jobsAndSpaces, repairJobObjects } from './schemas/saved_objects';
/**
* Routes for job saved object management
*/
export function savedObjectsRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup JobSavedObjects
*
* @api {get} /api/ml/saved_objects/status Get job saved object status
* @apiName SavedObjectsStatus
* @apiDescription Lists all jobs and saved objects to view the relationship status between them
*
*/
router.get(
{
path: '/api/ml/saved_objects/status',
validate: false,
options: {
tags: ['access:ml:canGetJobs'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ client, response, jobSavedObjectService }) => {
try {
const { checkStatus } = checksFactory(client, jobSavedObjectService);
const status = await checkStatus();
return response.ok({
body: status,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
/**
* @apiGroup JobSavedObjects
*
* @api {get} /api/ml/saved_objects/repair Repair job saved objects
* @apiName RepairJobSavedObjects
* @apiDescription Create saved objects for jobs which are missing them.
* Delete saved objects for jobs which no longer exist.
* Update missing datafeed ids in saved objects for datafeeds which exist.
* Remove datafeed ids for datafeeds which no longer exist.
*
*/
router.get(
{
path: '/api/ml/saved_objects/repair',
validate: {
query: repairJobObjects,
},
options: {
tags: ['access:ml:canCreateJob', 'access:ml:canCreateDataFrameAnalytics'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ client, request, response, jobSavedObjectService }) => {
try {
const { simulate } = request.query;
const { repairJobs } = checksFactory(client, jobSavedObjectService);
const savedObjects = await repairJobs(simulate);
return response.ok({
body: savedObjects,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
/**
* @apiGroup JobSavedObjects
*
* @api {get} /api/ml/saved_objects/initialize Create job saved objects for all jobs
* @apiName InitializeJobSavedObjects
* @apiDescription Create saved objects for jobs which are missing them.
*
*/
router.get(
{
path: '/api/ml/saved_objects/initialize',
validate: {
query: repairJobObjects,
},
options: {
tags: ['access:ml:canCreateJob', 'access:ml:canCreateDataFrameAnalytics'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ client, request, response, jobSavedObjectService }) => {
try {
const { simulate } = request.query;
const { initSavedObjects } = checksFactory(client, jobSavedObjectService);
const savedObjects = await initSavedObjects(simulate);
return response.ok({
body: savedObjects,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
/**
* @apiGroup JobSavedObjects
*
* @api {post} /api/ml/saved_objects/assign_job_to_space Assign jobs to spaces
* @apiName AssignJobsToSpaces
* @apiDescription Add list of spaces to a list of jobs
*
* @apiSchema (body) jobsAndSpaces
*/
router.post(
{
path: '/api/ml/saved_objects/assign_job_to_space',
validate: {
body: jobsAndSpaces,
},
options: {
tags: ['access:ml:canCreateJob', 'access:ml:canCreateDataFrameAnalytics'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ request, response, jobSavedObjectService }) => {
try {
const { jobType, jobIds, spaces } = request.body;
const body = await jobSavedObjectService.assignJobsToSpaces(jobType, jobIds, spaces);
return response.ok({
body,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
/**
* @apiGroup JobSavedObjects
*
* @api {post} /api/ml/saved_objects/remove_job_from_space Remove jobs from spaces
* @apiName RemoveJobsFromSpaces
* @apiDescription Remove a list of spaces from a list of jobs
*
* @apiSchema (body) jobsAndSpaces
*/
router.post(
{
path: '/api/ml/saved_objects/remove_job_from_space',
validate: {
body: jobsAndSpaces,
},
options: {
tags: ['access:ml:canCreateJob', 'access:ml:canCreateDataFrameAnalytics'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ request, response, jobSavedObjectService }) => {
try {
const { jobType, jobIds, spaces } = request.body;
const body = await jobSavedObjectService.removeJobsFromSpaces(jobType, jobIds, spaces);
return response.ok({
body,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
/**
* @apiGroup JobSavedObjects
*
* @api {get} /api/ml/saved_objects/jobs_spaces All spaces in all jobs
* @apiName JobsSpaces
* @apiDescription List all jobs and their spaces
*
*/
router.get(
{
path: '/api/ml/saved_objects/jobs_spaces',
validate: false,
options: {
tags: ['access:ml:canGetJobs'],
},
},
routeGuard.fullLicenseAPIGuard(async ({ response, jobSavedObjectService, client }) => {
try {
const { checkStatus } = checksFactory(client, jobSavedObjectService);
const allStatuses = Object.values((await checkStatus()).savedObjects).flat();
const body = allStatuses
.filter((s) => s.checks.jobExists)
.reduce((acc, cur) => {
const type = cur.type;
if (acc[type] === undefined) {
acc[type] = {};
}
acc[type][cur.jobId] = cur.namespaces;
return acc;
}, {} as { [id: string]: { [id: string]: string[] | undefined } });
return response.ok({
body,
});
} catch (e) {
return response.customError(wrapError(e));
}
})
);
}

View file

@ -30,6 +30,7 @@ const detectorSchema = schema.object({
use_null: schema.maybe(schema.boolean()),
/** Custom rules */
custom_rules: customRulesSchema,
detector_index: schema.maybe(schema.number()),
});
const customUrlSchema = {

View file

@ -11,6 +11,7 @@ export const calendarSchema = schema.object({
calendarId: schema.string(),
job_ids: schema.arrayOf(schema.maybe(schema.string())),
description: schema.maybe(schema.string()),
total_job_count: schema.maybe(schema.number()),
events: schema.arrayOf(
schema.maybe(
schema.object({

View file

@ -45,6 +45,11 @@ export const categoryExamplesSchema = schema.object({
maxExamples: schema.number(),
});
export const anomalySearchSchema = schema.object({
jobIds: schema.arrayOf(schema.string()),
query: schema.any(),
});
const fieldConfig = schema.maybe(
schema.object({
applyTimeRange: schema.maybe(schema.boolean()),

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { schema } from '@kbn/config-schema';
export const jobsAndSpaces = schema.object({
jobType: schema.string(),
jobIds: schema.arrayOf(schema.string()),
spaces: schema.arrayOf(schema.string()),
});
export const repairJobObjects = schema.object({ simulate: schema.maybe(schema.boolean()) });

View file

@ -18,7 +18,7 @@ import { RouteInitialization, SystemRouteDeps } from '../types';
* System routes
*/
export function systemRoutes(
{ router, mlLicense }: RouteInitialization,
{ router, mlLicense, routeGuard }: RouteInitialization,
{ spaces, cloud, resolveMlCapabilities }: SystemRouteDeps
) {
async function getNodeCount(client: IScopedClusterClient) {
@ -57,12 +57,12 @@ export function systemRoutes(
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ mlClient, client, request, response }) => {
try {
const { asCurrentUser, asInternalUser } = client;
const { asCurrentUser } = client;
let upgradeInProgress = false;
try {
const { body } = await asInternalUser.ml.info();
const { body } = await mlClient.info();
// if ml indices are currently being migrated, upgrade_mode will be set to true
// pass this back with the privileges to allow for the disabling of UI controls.
upgradeInProgress = body.upgrade_mode === true;
@ -115,7 +115,7 @@ export function systemRoutes(
path: '/api/ml/ml_capabilities',
validate: false,
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
// if spaces is disabled force isMlEnabledInSpace to be true
const { isMlEnabledInSpace } =
@ -129,7 +129,7 @@ export function systemRoutes(
}
const { getCapabilities } = capabilitiesProvider(
client,
mlClient,
mlCapabilities,
mlLicense,
isMlEnabledInSpace
@ -159,7 +159,7 @@ export function systemRoutes(
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, response }) => {
try {
return response.ok({
body: await getNodeCount(client),
@ -185,9 +185,9 @@ export function systemRoutes(
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ mlClient, response }) => {
try {
const { body } = await client.asInternalUser.ml.info();
const { body } = await mlClient.info();
const cloudId = cloud && cloud.cloudId;
return response.ok({
body: { ...body, cloudId },
@ -216,7 +216,7 @@ export function systemRoutes(
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { body } = await client.asCurrentUser.search(request.body);
return response.ok({
@ -244,7 +244,7 @@ export function systemRoutes(
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { index } = request.body;

View file

@ -14,7 +14,7 @@ import {
import { modelsProvider } from '../models/data_frame_analytics';
import { InferenceConfigResponse } from '../../common/types/trained_models';
export function trainedModelsRoutes({ router, mlLicense }: RouteInitialization) {
export function trainedModelsRoutes({ router, routeGuard }: RouteInitialization) {
/**
* @apiGroup Inference
*
@ -33,11 +33,11 @@ export function trainedModelsRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { modelId } = request.params;
const { with_pipelines: withPipelines, ...query } = request.query;
const { body } = await client.asInternalUser.ml.getTrainedModels<InferenceConfigResponse>({
const { body } = await mlClient.getTrainedModels<InferenceConfigResponse>({
size: 1000,
...query,
...(modelId ? { model_id: modelId } : {}),
@ -84,10 +84,10 @@ export function trainedModelsRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
try {
const { modelId } = request.params;
const { body } = await client.asInternalUser.ml.getTrainedModelsStats({
const { body } = await mlClient.getTrainedModelsStats({
...(modelId ? { model_id: modelId } : {}),
});
return response.ok({
@ -116,7 +116,7 @@ export function trainedModelsRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { modelId } = request.params;
const result = await modelsProvider(client).getModelsPipelines(modelId.split(','));
@ -146,10 +146,10 @@ export function trainedModelsRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canDeleteDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
routeGuard.fullLicenseAPIGuard(async ({ mlClient, request, response }) => {
try {
const { modelId } = request.params;
const { body } = await client.asInternalUser.ml.deleteTrainedModel({
const { body } = await mlClient.deleteTrainedModel({
model_id: modelId,
});
return response.ok({

View file

@ -0,0 +1,392 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import Boom from '@hapi/boom';
import { IScopedClusterClient } from 'kibana/server';
import { SearchResponse } from 'elasticsearch';
import type { JobObject, JobSavedObjectService } from './service';
import { ML_SAVED_OBJECT_TYPE } from './saved_objects';
import { JobType } from '../../common/types/saved_objects';
import { Job } from '../../common/types/anomaly_detection_jobs';
import { Datafeed } from '../../common/types/anomaly_detection_jobs';
import { DataFrameAnalyticsConfig } from '../../common/types/data_frame_analytics';
interface JobSavedObjectStatus {
jobId: string;
type: JobType;
datafeedId?: string | null;
namespaces: string[] | undefined;
checks: {
jobExists: boolean;
datafeedExists?: boolean;
};
}
interface JobStatus {
jobId: string;
datafeedId?: string | null;
checks: {
savedObjectExits: boolean;
};
}
interface SavedObjectJob {
[ML_SAVED_OBJECT_TYPE]: {
job_id: string;
type: JobType;
};
namespaces: string[];
}
interface StatusResponse {
savedObjects: {
[type in JobType]: JobSavedObjectStatus[];
};
jobs: {
[type in JobType]: JobStatus[];
};
}
export function checksFactory(
client: IScopedClusterClient,
jobSavedObjectService: JobSavedObjectService
) {
async function checkStatus(): Promise<StatusResponse> {
const jobObjects = await jobSavedObjectService.getAllJobObjects(undefined, false);
// load all non-space jobs and datafeeds
const { body: adJobs } = await client.asInternalUser.ml.getJobs<{ jobs: Job[] }>();
const { body: datafeeds } = await client.asInternalUser.ml.getDatafeeds<{
datafeeds: Datafeed[];
}>();
const { body: dfaJobs } = await client.asInternalUser.ml.getDataFrameAnalytics<{
data_frame_analytics: DataFrameAnalyticsConfig[];
}>();
const savedObjectsStatus: JobSavedObjectStatus[] = jobObjects.map(
({ attributes, namespaces }) => {
const type: JobType = attributes.type;
const jobId = attributes.job_id;
const datafeedId = type === 'anomaly-detector' ? attributes.datafeed_id : undefined;
let jobExists = false;
let datafeedExists: boolean | undefined;
if (type === 'anomaly-detector') {
jobExists = adJobs.jobs.some((j) => j.job_id === jobId);
datafeedExists = datafeeds.datafeeds.some((d) => d.job_id === jobId);
} else {
jobExists = dfaJobs.data_frame_analytics.some((j) => j.id === jobId);
}
return {
jobId,
type,
datafeedId,
namespaces,
checks: {
jobExists,
datafeedExists,
},
};
}
);
const nonSpaceSavedObjects = await _loadAllJobSavedObjects();
const nonSpaceADObjectIds = new Set(
nonSpaceSavedObjects
.filter(({ type }) => type === 'anomaly-detector')
.map(({ jobId }) => jobId)
);
const nonSpaceDFAObjectIds = new Set(
nonSpaceSavedObjects
.filter(({ type }) => type === 'data-frame-analytics')
.map(({ jobId }) => jobId)
);
const adObjectIds = new Set(
savedObjectsStatus.filter(({ type }) => type === 'anomaly-detector').map(({ jobId }) => jobId)
);
const dfaObjectIds = new Set(
savedObjectsStatus
.filter(({ type }) => type === 'data-frame-analytics')
.map(({ jobId }) => jobId)
);
const anomalyDetectors = adJobs.jobs
.filter(({ job_id: jobId }) => {
// only list jobs which are in the current space (adObjectIds)
// or are not in any spaces (nonSpaceADObjectIds)
return adObjectIds.has(jobId) === true || nonSpaceADObjectIds.has(jobId) === false;
})
.map(({ job_id: jobId }) => {
const datafeedId = datafeeds.datafeeds.find((df) => df.job_id === jobId)?.datafeed_id;
return {
jobId,
datafeedId: datafeedId ?? null,
checks: {
savedObjectExits: nonSpaceADObjectIds.has(jobId),
},
};
});
const dataFrameAnalytics = dfaJobs.data_frame_analytics
.filter(({ id: jobId }) => {
// only list jobs which are in the current space (dfaObjectIds)
// or are not in any spaces (nonSpaceDFAObjectIds)
return dfaObjectIds.has(jobId) === true || nonSpaceDFAObjectIds.has(jobId) === false;
})
.map(({ id: jobId }) => {
return {
jobId,
datafeedId: null,
checks: {
savedObjectExits: nonSpaceDFAObjectIds.has(jobId),
},
};
});
return {
savedObjects: {
'anomaly-detector': savedObjectsStatus.filter(({ type }) => type === 'anomaly-detector'),
'data-frame-analytics': savedObjectsStatus.filter(
({ type }) => type === 'data-frame-analytics'
),
},
jobs: {
'anomaly-detector': anomalyDetectors,
'data-frame-analytics': dataFrameAnalytics,
},
};
}
async function repairJobs(simulate: boolean = false) {
type Result = Record<string, { success: boolean; error?: any }>;
const results: {
savedObjectsCreated: Result;
savedObjectsDeleted: Result;
datafeedsAdded: Result;
datafeedsRemoved: Result;
} = {
savedObjectsCreated: {},
savedObjectsDeleted: {},
datafeedsAdded: {},
datafeedsRemoved: {},
};
const { body: datafeeds } = await client.asInternalUser.ml.getDatafeeds<{
datafeeds: Datafeed[];
}>();
const tasks: Array<() => Promise<void>> = [];
const status = await checkStatus();
for (const job of status.jobs['anomaly-detector']) {
if (job.checks.savedObjectExits === false) {
if (simulate === true) {
results.savedObjectsCreated[job.jobId] = { success: true };
} else {
// create AD saved objects for jobs which are missing them
const jobId = job.jobId;
const datafeedId = job.datafeedId;
tasks.push(async () => {
try {
await jobSavedObjectService.createAnomalyDetectionJob(jobId, datafeedId ?? undefined);
results.savedObjectsCreated[job.jobId] = { success: true };
} catch (error) {
results.savedObjectsCreated[job.jobId] = {
success: false,
error: error.body ?? error,
};
}
});
}
}
}
for (const job of status.jobs['data-frame-analytics']) {
if (job.checks.savedObjectExits === false) {
if (simulate === true) {
results.savedObjectsCreated[job.jobId] = { success: true };
} else {
// create DFA saved objects for jobs which are missing them
const jobId = job.jobId;
tasks.push(async () => {
try {
await jobSavedObjectService.createDataFrameAnalyticsJob(jobId);
results.savedObjectsCreated[job.jobId] = { success: true };
} catch (error) {
results.savedObjectsCreated[job.jobId] = {
success: false,
error: error.body ?? error,
};
}
});
}
}
}
for (const job of status.savedObjects['anomaly-detector']) {
if (job.checks.jobExists === false) {
if (simulate === true) {
results.savedObjectsDeleted[job.jobId] = { success: true };
} else {
// Delete AD saved objects for jobs which no longer exist
const jobId = job.jobId;
tasks.push(async () => {
try {
await jobSavedObjectService.deleteAnomalyDetectionJob(jobId);
results.savedObjectsDeleted[job.jobId] = { success: true };
} catch (error) {
results.savedObjectsDeleted[job.jobId] = {
success: false,
error: error.body ?? error,
};
}
});
}
}
}
for (const job of status.savedObjects['data-frame-analytics']) {
if (job.checks.jobExists === false) {
if (simulate === true) {
results.savedObjectsDeleted[job.jobId] = { success: true };
} else {
// Delete DFA saved objects for jobs which no longer exist
const jobId = job.jobId;
tasks.push(async () => {
try {
await jobSavedObjectService.deleteDataFrameAnalyticsJob(jobId);
results.savedObjectsDeleted[job.jobId] = { success: true };
} catch (error) {
results.savedObjectsDeleted[job.jobId] = {
success: false,
error: error.body ?? error,
};
}
});
}
}
}
for (const job of status.savedObjects['anomaly-detector']) {
if (job.checks.datafeedExists === true && job.datafeedId === null) {
// add datafeed id for jobs where the datafeed exists but the id is missing from the saved object
if (simulate === true) {
results.datafeedsAdded[job.jobId] = { success: true };
} else {
const df = datafeeds.datafeeds.find((d) => d.job_id === job.jobId);
const jobId = job.jobId;
const datafeedId = df?.datafeed_id;
tasks.push(async () => {
try {
if (datafeedId !== undefined) {
await jobSavedObjectService.addDatafeed(datafeedId, jobId);
}
results.datafeedsAdded[job.jobId] = { success: true };
} catch (error) {
results.datafeedsAdded[job.jobId] = { success: false, error };
}
});
}
} else if (
job.checks.jobExists === true &&
job.checks.datafeedExists === false &&
job.datafeedId !== null &&
job.datafeedId !== undefined
) {
// remove datafeed id for jobs where the datafeed no longer exists but the id is populated in the saved object
if (simulate === true) {
results.datafeedsRemoved[job.jobId] = { success: true };
} else {
const datafeedId = job.datafeedId;
tasks.push(async () => {
try {
await jobSavedObjectService.deleteDatafeed(datafeedId);
results.datafeedsRemoved[job.jobId] = { success: true };
} catch (error) {
results.datafeedsRemoved[job.jobId] = { success: false, error: error.body ?? error };
}
});
}
}
}
await Promise.allSettled(tasks.map((t) => t()));
return results;
}
async function initSavedObjects(simulate: boolean = false, namespaces: string[] = ['*']) {
const results: { jobs: Array<{ id: string; type: string }>; success: boolean; error?: any } = {
jobs: [],
success: true,
};
const status = await checkStatus();
const jobs: JobObject[] = [];
const types: JobType[] = ['anomaly-detector', 'data-frame-analytics'];
types.forEach((type) => {
status.jobs[type].forEach((job) => {
if (job.checks.savedObjectExits === false) {
if (simulate === true) {
results.jobs.push({ id: job.jobId, type });
} else {
jobs.push({
job_id: job.jobId,
datafeed_id: job.datafeedId ?? null,
type,
});
}
}
});
});
try {
const createResults = await jobSavedObjectService.bulkCreateJobs(jobs, namespaces);
createResults.saved_objects.forEach(({ attributes }) => {
results.jobs.push({
id: attributes.job_id,
type: attributes.type,
});
});
} catch (error) {
results.success = false;
results.error = Boom.boomify(error).output;
}
return results;
}
async function _loadAllJobSavedObjects() {
const { body } = await client.asInternalUser.search<SearchResponse<SavedObjectJob>>({
index: '.kibana*',
size: 1000,
_source: ['ml-job.job_id', 'ml-job.type', 'namespaces'],
body: {
query: {
bool: {
filter: [
{
term: {
type: 'ml-job',
},
},
],
},
},
},
});
return body.hits.hits.map(({ _source }) => {
const { job_id: jobId, type } = _source[ML_SAVED_OBJECT_TYPE];
return {
jobId,
type,
spaces: _source.namespaces,
};
});
}
return { checkStatus, repairJobs, initSavedObjects };
}

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { setupSavedObjects } from './saved_objects';
export { JobObject, JobSavedObjectService, jobSavedObjectServiceFactory } from './service';
export { checksFactory } from './checks';

View file

@ -0,0 +1,25 @@
{
"job": {
"properties": {
"job_id": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"datafeed_id": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"type": {
"type": "keyword"
}
}
}
}

View file

@ -0,0 +1,12 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { SavedObjectMigrationMap } from '../../../../../src/core/server';
export const migrations: SavedObjectMigrationMap = {
'7.9.0': (doc) => doc,
'7.10.0': (doc) => doc,
};

View file

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { SavedObjectsServiceSetup } from 'kibana/server';
import mappings from './mappings.json';
import { migrations } from './migrations';
export const ML_SAVED_OBJECT_TYPE = 'ml-job';
export function setupSavedObjects(savedObjects: SavedObjectsServiceSetup) {
savedObjects.registerType({
name: ML_SAVED_OBJECT_TYPE,
hidden: false,
namespaceType: 'multiple',
migrations,
mappings: mappings.job,
});
}

View file

@ -0,0 +1,299 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import RE2 from 're2';
import { SavedObjectsClientContract, SavedObjectsFindOptions } from 'kibana/server';
import { ML_SAVED_OBJECT_TYPE } from './saved_objects';
import { JobType } from '../../common/types/saved_objects';
import { MLJobNotFound } from '../lib/ml_client';
export interface JobObject {
job_id: string;
datafeed_id: string | null;
type: JobType;
}
type JobObjectFilter = { [k in keyof JobObject]?: string };
export type JobSavedObjectService = ReturnType<typeof jobSavedObjectServiceFactory>;
export function jobSavedObjectServiceFactory(savedObjectsClient: SavedObjectsClientContract) {
async function _getJobObjects(
jobType?: JobType,
jobId?: string,
datafeedId?: string,
currentSpaceOnly: boolean = true
) {
const filterObject: JobObjectFilter = {};
if (jobType !== undefined) {
filterObject.type = jobType;
}
if (jobId !== undefined) {
filterObject.job_id = jobId;
} else if (datafeedId !== undefined) {
filterObject.datafeed_id = datafeedId;
}
const { filter, searchFields } = createSavedObjectFilter(filterObject);
const options: SavedObjectsFindOptions = {
type: ML_SAVED_OBJECT_TYPE,
perPage: 10000,
...(currentSpaceOnly === true ? {} : { namespaces: ['*'] }),
searchFields,
filter,
};
const jobs = await savedObjectsClient.find<JobObject>(options);
return jobs.saved_objects;
}
async function _createJob(jobType: JobType, jobId: string, datafeedId?: string) {
try {
await _deleteJob(jobType, jobId);
} catch (error) {
// fail silently
// the job object may or may not already exist, we'll overwrite it anyway.
}
await savedObjectsClient.create<JobObject>(ML_SAVED_OBJECT_TYPE, {
job_id: jobId,
datafeed_id: datafeedId ?? null,
type: jobType,
});
}
async function _bulkCreateJobs(jobs: JobObject[], namespaces?: string[]) {
return await savedObjectsClient.bulkCreate<JobObject>(
jobs.map((j) => ({
type: ML_SAVED_OBJECT_TYPE,
attributes: j,
initialNamespaces: namespaces,
}))
);
}
async function _deleteJob(jobType: JobType, jobId: string) {
const jobs = await _getJobObjects(jobType, jobId);
const job = jobs[0];
if (job === undefined) {
throw new MLJobNotFound('job not found');
}
await savedObjectsClient.delete(ML_SAVED_OBJECT_TYPE, job.id);
}
async function createAnomalyDetectionJob(jobId: string, datafeedId?: string) {
await _createJob('anomaly-detector', jobId, datafeedId);
}
async function deleteAnomalyDetectionJob(jobId: string) {
await _deleteJob('anomaly-detector', jobId);
}
async function createDataFrameAnalyticsJob(jobId: string) {
await _createJob('data-frame-analytics', jobId);
}
async function deleteDataFrameAnalyticsJob(jobId: string) {
await _deleteJob('data-frame-analytics', jobId);
}
async function bulkCreateJobs(jobs: JobObject[], namespaces?: string[]) {
return await _bulkCreateJobs(jobs, namespaces);
}
async function getAllJobObjects(jobType?: JobType, currentSpaceOnly: boolean = true) {
return await _getJobObjects(jobType, undefined, undefined, currentSpaceOnly);
}
async function addDatafeed(datafeedId: string, jobId: string) {
const jobs = await _getJobObjects('anomaly-detector', jobId);
const job = jobs[0];
if (job === undefined) {
throw new MLJobNotFound(`'${datafeedId}' not found`);
}
const jobObject = job.attributes;
jobObject.datafeed_id = datafeedId;
await savedObjectsClient.update<JobObject>(ML_SAVED_OBJECT_TYPE, job.id, jobObject);
}
async function deleteDatafeed(datafeedId: string) {
const jobs = await _getJobObjects('anomaly-detector', undefined, datafeedId);
const job = jobs[0];
if (job === undefined) {
throw new MLJobNotFound(`'${datafeedId}' not found`);
}
const jobObject = job.attributes;
jobObject.datafeed_id = null;
await savedObjectsClient.update<JobObject>(ML_SAVED_OBJECT_TYPE, job.id, jobObject);
}
async function getIds(jobType: JobType, idType: keyof JobObject) {
const jobs = await _getJobObjects(jobType);
return jobs.map((o) => o.attributes[idType]);
}
async function filterJobObjectsForSpace<T>(
jobType: JobType,
list: T[],
field: keyof T,
key: keyof JobObject
): Promise<T[]> {
if (list.length === 0) {
return [];
}
const jobIds = await getIds(jobType, key);
return list.filter((j) => jobIds.includes((j[field] as unknown) as string));
}
async function filterJobsForSpace<T>(jobType: JobType, list: T[], field: keyof T): Promise<T[]> {
return filterJobObjectsForSpace<T>(jobType, list, field, 'job_id');
}
async function filterDatafeedsForSpace<T>(
jobType: JobType,
list: T[],
field: keyof T
): Promise<T[]> {
return filterJobObjectsForSpace<T>(jobType, list, field, 'datafeed_id');
}
async function filterJobObjectIdsForSpace(
jobType: JobType,
ids: string[],
key: keyof JobObject,
allowWildcards: boolean = false
): Promise<string[]> {
if (ids.length === 0) {
return [];
}
const jobIds = await getIds(jobType, key);
// check to see if any of the ids supplied contain a wildcard
if (allowWildcards === false || ids.join().match('\\*') === null) {
// wildcards are not allowed or no wildcards could be found
return ids.filter((id) => jobIds.includes(id));
}
// if any of the ids contain a wildcard, check each one.
return ids.filter((id) => {
if (id.match('\\*') === null) {
return jobIds.includes(id);
}
const regex = new RE2(id.replace('*', '.*'));
return jobIds.some((jId) => typeof jId === 'string' && regex.exec(jId));
});
}
async function filterJobIdsForSpace(
jobType: JobType,
ids: string[],
allowWildcards: boolean = false
): Promise<string[]> {
return filterJobObjectIdsForSpace(jobType, ids, 'job_id', allowWildcards);
}
async function filterDatafeedIdsForSpace(
ids: string[],
allowWildcards: boolean = false
): Promise<string[]> {
return filterJobObjectIdsForSpace('anomaly-detector', ids, 'datafeed_id', allowWildcards);
}
async function assignJobsToSpaces(jobType: JobType, jobIds: string[], spaces: string[]) {
const results: Record<string, { success: boolean; error?: any }> = {};
const jobs = await _getJobObjects(jobType);
for (const id of jobIds) {
const job = jobs.find((j) => j.attributes.job_id === id);
if (job === undefined) {
results[id] = {
success: false,
error: createError(id, 'job_id'),
};
} else {
try {
await savedObjectsClient.addToNamespaces(ML_SAVED_OBJECT_TYPE, job.id, spaces);
results[id] = {
success: true,
};
} catch (error) {
results[id] = {
success: false,
error,
};
}
}
}
return results;
}
async function removeJobsFromSpaces(jobType: JobType, jobIds: string[], spaces: string[]) {
const results: Record<string, { success: boolean; error?: any }> = {};
const jobs = await _getJobObjects(jobType);
for (const job of jobs) {
if (jobIds.includes(job.attributes.job_id)) {
try {
await savedObjectsClient.deleteFromNamespaces(ML_SAVED_OBJECT_TYPE, job.id, spaces);
results[job.attributes.job_id] = {
success: true,
};
} catch (error) {
results[job.attributes.job_id] = {
success: false,
error,
};
}
}
}
return results;
}
return {
getAllJobObjects,
createAnomalyDetectionJob,
createDataFrameAnalyticsJob,
deleteAnomalyDetectionJob,
deleteDataFrameAnalyticsJob,
addDatafeed,
deleteDatafeed,
filterJobsForSpace,
filterJobIdsForSpace,
filterDatafeedsForSpace,
filterDatafeedIdsForSpace,
assignJobsToSpaces,
removeJobsFromSpaces,
bulkCreateJobs,
};
}
export function createError(id: string, key: keyof JobObject) {
let reason = `'${id}' not found`;
if (key === 'job_id') {
reason = `No known job with id '${id}'`;
} else if (key === 'datafeed_id') {
reason = `No known datafeed with id '${id}'`;
}
return {
error: {
reason,
},
status: 404,
};
}
function createSavedObjectFilter(filterObject: JobObjectFilter) {
const searchFields: string[] = [];
const filter = Object.entries(filterObject)
.map(([k, v]) => {
searchFields.push(k);
return `${ML_SAVED_OBJECT_TYPE}.attributes.${k}: "${v}"`;
})
.join(' AND ');
return { filter, searchFields };
}

View file

@ -4,13 +4,13 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { MlServerLicense } from '../../lib/license';
import { MlLicense } from '../../../common/license';
import { InsufficientFullLicenseError, InsufficientBasicLicenseError } from './errors';
export type LicenseCheck = () => void;
export function licenseChecks(
mlLicense: MlServerLicense
mlLicense: MlLicense
): { isFullLicense: LicenseCheck; isMinimumLicense: LicenseCheck } {
return {
isFullLicense() {

Some files were not shown because too many files have changed in this diff Show more