[ML] Switching to new es client (#74965)

* [ML] Switching to new es client

* further conversions

* fixing tests

* updating responses

* test commit

* refactoring shared services to removed context parameter

* removing last scoped clients

* removing ml js client

* udating file data viz errors

* fixing jest tests

* fixing types after merge with master

* error response changes

* adding default sizes to some requests

* adding custom error types for license checks

* tidying up shared function checks

* removing test data

* removing need for DummyKibanaRequest

* updating comment

* fixing functional api tests

* removing comments

* fixing types after master merge

* throw error rather than return it

* removing placeholder error

* changes based on review comments

* fixing types after merge with master

* fixing missing return

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
James Gowdy 2020-09-02 14:07:57 +01:00 committed by GitHub
parent 946e9f0914
commit 4762cf56f5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
106 changed files with 1579 additions and 2574 deletions

View file

@ -73,10 +73,9 @@ export function registerTransactionDurationAnomalyAlertType({
return;
}
const alertParams = params as TypeOf<typeof paramsSchema>;
const mlClient = services.getLegacyScopedClusterClient(ml.mlClient);
const request = { params: 'DummyKibanaRequest' } as KibanaRequest;
const { mlAnomalySearch } = ml.mlSystemProvider(mlClient, request);
const anomalyDetectors = ml.anomalyDetectorsProvider(mlClient, request);
const request = {} as KibanaRequest;
const { mlAnomalySearch } = ml.mlSystemProvider(request);
const anomalyDetectors = ml.anomalyDetectorsProvider(request);
const mlJobIds = await getMLJobIds(
anomalyDetectors,

View file

@ -122,13 +122,10 @@ function getMlSetup(
if (!ml) {
return;
}
const mlClient = ml.mlClient.asScoped(request);
const mlSystem = ml.mlSystemProvider(mlClient, request);
return {
mlClient,
mlSystem,
modules: ml.modulesProvider(mlClient, request, savedObjectsClient),
anomalyDetectors: ml.anomalyDetectorsProvider(mlClient, request),
mlAnomalySearch: mlSystem.mlAnomalySearch,
mlSystem: ml.mlSystemProvider(request),
anomalyDetectors: ml.anomalyDetectorsProvider(request),
modules: ml.modulesProvider(request, savedObjectsClient),
};
}

View file

@ -152,9 +152,8 @@ export class InfraServerPlugin {
core.http.registerRouteHandlerContext(
'infra',
(context, request): InfraRequestHandlerContext => {
const mlSystem = context.ml && plugins.ml?.mlSystemProvider(context.ml?.mlClient, request);
const mlAnomalyDetectors =
context.ml && plugins.ml?.anomalyDetectorsProvider(context.ml?.mlClient, request);
const mlSystem = plugins.ml?.mlSystemProvider(request);
const mlAnomalyDetectors = plugins.ml?.anomalyDetectorsProvider(request);
const spaceId = plugins.spaces?.spacesService.getSpaceId(request) || 'default';
return {

View file

@ -93,11 +93,11 @@ function title(statuses: Statuses) {
}
}
function ImportError(error: any, key: number) {
const ImportError: FC<{ error: any }> = ({ error }) => {
const errorObj = toString(error);
return (
<React.Fragment>
<p key={key}>{errorObj.msg}</p>
<>
<p>{errorObj.msg}</p>
{errorObj.more !== undefined && (
<EuiAccordion
@ -113,9 +113,9 @@ function ImportError(error: any, key: number) {
{errorObj.more}
</EuiAccordion>
)}
</React.Fragment>
</>
);
}
};
function toString(error: any): ImportError {
if (typeof error === 'string') {
@ -127,11 +127,11 @@ function toString(error: any): ImportError {
return { msg: error.msg };
} else if (error.error !== undefined) {
if (typeof error.error === 'object') {
if (error.error.msg !== undefined) {
if (error.error.reason !== undefined) {
// this will catch a bulk ingest failure
const errorObj: ImportError = { msg: error.error.msg };
if (error.error.body !== undefined) {
errorObj.more = error.error.response;
const errorObj: ImportError = { msg: error.error.reason };
if (error.error.root_cause !== undefined) {
errorObj.more = JSON.stringify(error.error.root_cause);
}
return errorObj;
}

View file

@ -71,9 +71,7 @@ export class ChartLoader {
splitFieldName,
splitFieldValue
);
if (resp.error !== undefined) {
throw resp.error;
}
return resp.results;
}
return {};
@ -105,9 +103,7 @@ export class ChartLoader {
aggFieldPairNames,
splitFieldName
);
if (resp.error !== undefined) {
throw resp.error;
}
return resp.results;
}
return {};

View file

@ -1,60 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { elasticsearchJsPlugin } from './elasticsearch_ml';
interface Endpoint {
fmt: string;
}
interface ClientAction {
urls?: Endpoint[];
url: Endpoint;
}
describe('ML - Endpoints', () => {
// Check all paths in the ML elasticsearchJsPlugin start with a leading forward slash
// so they work if Kibana is run behind a reverse proxy
const PATH_START: string = '/';
const urls: string[] = [];
// Stub objects
const Client = {
prototype: {},
};
const components = {
clientAction: {
factory(obj: ClientAction) {
// add each endpoint URL to a list
if (obj.urls) {
obj.urls.forEach((url) => {
urls.push(url.fmt);
});
}
if (obj.url) {
urls.push(obj.url.fmt);
}
},
namespaceFactory() {
return {
prototype: {},
};
},
},
};
// Stub elasticsearchJsPlugin
elasticsearchJsPlugin(Client, null, components);
describe('paths', () => {
it(`should start with ${PATH_START}`, () => {
urls.forEach((url) => {
expect(url[0]).toEqual(PATH_START);
});
});
});
});

View file

@ -1,929 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export const elasticsearchJsPlugin = (Client: any, config: any, components: any) => {
const ca = components.clientAction.factory;
Client.prototype.ml = components.clientAction.namespaceFactory();
const ml = Client.prototype.ml.prototype;
/**
* Perform a [ml.authenticate](Retrieve details about the currently authenticated user) request
*
* @param {Object} params - An object with parameters used to carry out this action
*/
ml.jobs = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>',
req: {
jobId: {
type: 'list',
},
},
},
{
fmt: '/_ml/anomaly_detectors/',
},
],
method: 'GET',
});
ml.jobStats = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_stats',
req: {
jobId: {
type: 'list',
},
},
},
{
fmt: '/_ml/anomaly_detectors/_stats',
},
],
method: 'GET',
});
ml.addJob = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>',
req: {
jobId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'PUT',
});
ml.openJob = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_open',
req: {
jobId: {
type: 'string',
},
},
},
],
method: 'POST',
});
ml.closeJob = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_close?force=<%=force%>',
req: {
jobId: {
type: 'string',
},
force: {
type: 'boolean',
},
},
},
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_close',
req: {
jobId: {
type: 'string',
},
},
},
],
method: 'POST',
});
// Currently the endpoint uses a default size of 100 unless a size is supplied.
// So until paging is supported in the UI, explicitly supply a size of 1000
// to match the max number of docs that the endpoint can return.
ml.getDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>',
req: {
analyticsId: {
type: 'string',
},
},
},
{
fmt: '/_ml/data_frame/analytics/_all?size=1000',
},
],
method: 'GET',
});
ml.getDataFrameAnalyticsStats = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_stats',
req: {
analyticsId: {
type: 'string',
},
},
},
{
// Currently the endpoint uses a default size of 100 unless a size is supplied.
// So until paging is supported in the UI, explicitly supply a size of 1000
// to match the max number of docs that the endpoint can return.
fmt: '/_ml/data_frame/analytics/_all/_stats?size=1000',
},
],
method: 'GET',
});
ml.createDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>',
req: {
analyticsId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'PUT',
});
ml.evaluateDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/_evaluate',
},
],
needBody: true,
method: 'POST',
});
ml.explainDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/_explain',
},
],
needBody: true,
method: 'POST',
});
ml.deleteDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>',
req: {
analyticsId: {
type: 'string',
},
},
},
],
method: 'DELETE',
});
ml.startDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_start',
req: {
analyticsId: {
type: 'string',
},
},
},
],
method: 'POST',
});
ml.stopDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_stop?&force=<%=force%>',
req: {
analyticsId: {
type: 'string',
},
force: {
type: 'boolean',
},
},
},
],
method: 'POST',
});
ml.updateDataFrameAnalytics = ca({
urls: [
{
fmt: '/_ml/data_frame/analytics/<%=analyticsId%>/_update',
req: {
analyticsId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'POST',
});
ml.deleteJob = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>?&force=<%=force%>&wait_for_completion=false',
req: {
jobId: {
type: 'string',
},
force: {
type: 'boolean',
},
},
},
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>?&wait_for_completion=false',
req: {
jobId: {
type: 'string',
},
},
},
],
method: 'DELETE',
});
ml.updateJob = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_update',
req: {
jobId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'POST',
});
ml.datafeeds = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>',
req: {
datafeedId: {
type: 'list',
},
},
},
{
fmt: '/_ml/datafeeds/',
},
],
method: 'GET',
});
ml.datafeedStats = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_stats',
req: {
datafeedId: {
type: 'list',
},
},
},
{
fmt: '/_ml/datafeeds/_stats',
},
],
method: 'GET',
});
ml.addDatafeed = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>',
req: {
datafeedId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'PUT',
});
ml.updateDatafeed = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_update',
req: {
datafeedId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'POST',
});
ml.deleteDatafeed = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>?force=<%=force%>',
req: {
datafeedId: {
type: 'string',
},
force: {
type: 'boolean',
},
},
},
{
fmt: '/_ml/datafeeds/<%=datafeedId%>',
req: {
datafeedId: {
type: 'string',
},
},
},
],
method: 'DELETE',
});
ml.startDatafeed = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_start?&start=<%=start%>&end=<%=end%>',
req: {
datafeedId: {
type: 'string',
},
start: {
type: 'string',
},
end: {
type: 'string',
},
},
},
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_start?&start=<%=start%>',
req: {
datafeedId: {
type: 'string',
},
start: {
type: 'string',
},
},
},
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_start',
req: {
datafeedId: {
type: 'string',
},
},
},
],
method: 'POST',
});
ml.stopDatafeed = ca({
urls: [
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_stop?force=<%=force%>',
req: {
datafeedId: {
type: 'string',
},
force: {
type: 'boolean',
},
},
},
{
fmt: '/_ml/datafeeds/<%=datafeedId%>/_stop',
req: {
datafeedId: {
type: 'string',
},
},
},
],
method: 'POST',
});
ml.validateDetector = ca({
url: {
fmt: '/_ml/anomaly_detectors/_validate/detector',
},
needBody: true,
method: 'POST',
});
ml.estimateModelMemory = ca({
url: {
fmt: '/_ml/anomaly_detectors/_estimate_model_memory',
},
needBody: true,
method: 'POST',
});
ml.datafeedPreview = ca({
url: {
fmt: '/_ml/datafeeds/<%=datafeedId%>/_preview',
req: {
datafeedId: {
type: 'string',
},
},
},
method: 'GET',
});
ml.forecast = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_forecast?&duration=<%=duration%>',
req: {
jobId: {
type: 'string',
},
duration: {
type: 'string',
},
},
},
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/_forecast',
req: {
jobId: {
type: 'string',
},
},
},
],
method: 'POST',
});
ml.records = ca({
url: {
fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/records',
req: {
jobId: {
type: 'string',
},
},
},
method: 'POST',
});
ml.buckets = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/buckets',
req: {
jobId: {
type: 'string',
},
},
},
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/buckets/<%=timestamp%>',
req: {
jobId: {
type: 'string',
},
timestamp: {
type: 'string',
},
},
},
],
method: 'POST',
});
ml.overallBuckets = ca({
url: {
fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/overall_buckets',
req: {
jobId: {
type: 'string',
},
},
},
method: 'POST',
});
ml.privilegeCheck = ca({
url: {
fmt: '/_security/user/_has_privileges',
},
needBody: true,
method: 'POST',
});
// Currently the endpoint uses a default size of 100 unless a size is supplied. So until paging is supported in the UI, explicitly supply a size of 1000
ml.calendars = ca({
urls: [
{
fmt: '/_ml/calendars/<%=calendarId%>',
req: {
calendarId: {
type: 'string',
},
},
},
{
fmt: '/_ml/calendars?size=1000',
},
],
method: 'GET',
});
ml.deleteCalendar = ca({
url: {
fmt: '/_ml/calendars/<%=calendarId%>',
req: {
calendarId: {
type: 'string',
},
},
},
method: 'DELETE',
});
ml.addCalendar = ca({
url: {
fmt: '/_ml/calendars/<%=calendarId%>',
req: {
calendarId: {
type: 'string',
},
},
},
needBody: true,
method: 'PUT',
});
ml.addJobToCalendar = ca({
url: {
fmt: '/_ml/calendars/<%=calendarId%>/jobs/<%=jobId%>',
req: {
calendarId: {
type: 'string',
},
jobId: {
type: 'string',
},
},
},
method: 'PUT',
});
ml.removeJobFromCalendar = ca({
url: {
fmt: '/_ml/calendars/<%=calendarId%>/jobs/<%=jobId%>',
req: {
calendarId: {
type: 'string',
},
jobId: {
type: 'string',
},
},
},
method: 'DELETE',
});
ml.events = ca({
urls: [
{
fmt: '/_ml/calendars/<%=calendarId%>/events',
req: {
calendarId: {
type: 'string',
},
},
},
{
fmt: '/_ml/calendars/<%=calendarId%>/events?&job_id=<%=jobId%>',
req: {
calendarId: {
type: 'string',
},
jobId: {
type: 'string',
},
},
},
{
fmt: '/_ml/calendars/<%=calendarId%>/events?&after=<%=start%>&before=<%=end%>',
req: {
calendarId: {
type: 'string',
},
start: {
type: 'string',
},
end: {
type: 'string',
},
},
},
{
fmt:
'/_ml/calendars/<%=calendarId%>/events?&after=<%=start%>&before=<%=end%>&job_id=<%=jobId%>',
req: {
calendarId: {
type: 'string',
},
start: {
type: 'string',
},
end: {
type: 'string',
},
jobId: {
type: 'string',
},
},
},
],
method: 'GET',
});
ml.addEvent = ca({
url: {
fmt: '/_ml/calendars/<%=calendarId%>/events',
req: {
calendarId: {
type: 'string',
},
},
},
needBody: true,
method: 'POST',
});
ml.deleteEvent = ca({
url: {
fmt: '/_ml/calendars/<%=calendarId%>/events/<%=eventId%>',
req: {
calendarId: {
type: 'string',
},
eventId: {
type: 'string',
},
},
},
method: 'DELETE',
});
// Currently the endpoint uses a default size of 100 unless a size is supplied. So until paging is supported in the UI, explicitly supply a size of 1000
ml.filters = ca({
urls: [
{
fmt: '/_ml/filters/<%=filterId%>',
req: {
filterId: {
type: 'string',
},
},
},
{
fmt: '/_ml/filters?size=1000',
},
],
method: 'GET',
});
ml.addFilter = ca({
url: {
fmt: '/_ml/filters/<%=filterId%>',
req: {
filterId: {
type: 'string',
},
},
},
needBody: true,
method: 'PUT',
});
ml.updateFilter = ca({
urls: [
{
fmt: '/_ml/filters/<%=filterId%>/_update',
req: {
filterId: {
type: 'string',
},
},
},
],
needBody: true,
method: 'POST',
});
ml.deleteFilter = ca({
url: {
fmt: '/_ml/filters/<%=filterId%>',
req: {
filterId: {
type: 'string',
},
},
},
method: 'DELETE',
});
ml.info = ca({
url: {
fmt: '/_ml/info',
},
method: 'GET',
});
ml.fileStructure = ca({
urls: [
{
fmt:
'/_ml/find_file_structure?&explain=true&charset=<%=charset%>&format=<%=format%>&has_header_row=<%=has_header_row%>&column_names=<%=column_names%>&delimiter=<%=delimiter%>&quote=<%=quote%>&should_trim_fields=<%=should_trim_fields%>&grok_pattern=<%=grok_pattern%>&timestamp_field=<%=timestamp_field%>&timestamp_format=<%=timestamp_format%>&lines_to_sample=<%=lines_to_sample%>',
req: {
charset: {
type: 'string',
},
format: {
type: 'string',
},
has_header_row: {
type: 'string',
},
column_names: {
type: 'string',
},
delimiter: {
type: 'string',
},
quote: {
type: 'string',
},
should_trim_fields: {
type: 'string',
},
grok_pattern: {
type: 'string',
},
timestamp_field: {
type: 'string',
},
timestamp_format: {
type: 'string',
},
lines_to_sample: {
type: 'string',
},
},
},
{
fmt: '/_ml/find_file_structure?&explain=true',
},
],
needBody: true,
method: 'POST',
});
ml.rollupIndexCapabilities = ca({
urls: [
{
fmt: '/<%=indexPattern%>/_rollup/data',
req: {
indexPattern: {
type: 'string',
},
},
},
],
method: 'GET',
});
ml.categories = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/categories/<%=categoryId%>',
req: {
jobId: {
type: 'string',
},
categoryId: {
type: 'string',
},
},
},
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/results/categories',
req: {
jobId: {
type: 'string',
},
},
},
],
method: 'GET',
});
ml.modelSnapshots = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>',
req: {
jobId: {
type: 'string',
},
snapshotId: {
type: 'string',
},
},
},
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots',
req: {
jobId: {
type: 'string',
},
},
},
],
method: 'GET',
});
ml.updateModelSnapshot = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>/_update',
req: {
jobId: {
type: 'string',
},
snapshotId: {
type: 'string',
},
},
},
],
method: 'POST',
needBody: true,
});
ml.deleteModelSnapshot = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>',
req: {
jobId: {
type: 'string',
},
snapshotId: {
type: 'string',
},
},
},
],
method: 'DELETE',
});
ml.revertModelSnapshot = ca({
urls: [
{
fmt: '/_ml/anomaly_detectors/<%=jobId%>/model_snapshots/<%=snapshotId%>/_revert',
req: {
jobId: {
type: 'string',
},
snapshotId: {
type: 'string',
},
},
},
],
method: 'POST',
});
};

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { getAdminCapabilities, getUserCapabilities } from './__mocks__/ml_capabilities';
import { capabilitiesProvider } from './check_capabilities';
import { MlLicense } from '../../../common/license';
@ -24,16 +24,28 @@ const mlIsEnabled = async () => true;
const mlIsNotEnabled = async () => false;
const mlClusterClientNonUpgrade = ({
callAsInternalUser: async () => ({
upgrade_mode: false,
}),
} as unknown) as ILegacyScopedClusterClient;
asInternalUser: {
ml: {
info: async () => ({
body: {
upgrade_mode: false,
},
}),
},
},
} as unknown) as IScopedClusterClient;
const mlClusterClientUpgrade = ({
callAsInternalUser: async () => ({
upgrade_mode: true,
}),
} as unknown) as ILegacyScopedClusterClient;
asInternalUser: {
ml: {
info: async () => ({
body: {
upgrade_mode: true,
},
}),
},
},
} as unknown) as IScopedClusterClient;
describe('check_capabilities', () => {
describe('getCapabilities() - right number of capabilities', () => {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server';
import { IScopedClusterClient, KibanaRequest } from 'kibana/server';
import { mlLog } from '../../client/log';
import {
MlCapabilities,
@ -22,12 +22,12 @@ import {
} from './errors';
export function capabilitiesProvider(
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
capabilities: MlCapabilities,
mlLicense: MlLicense,
isMlEnabledInSpace: () => Promise<boolean>
) {
const { isUpgradeInProgress } = upgradeCheckProvider(mlClusterClient);
const { isUpgradeInProgress } = upgradeCheckProvider(client);
async function getCapabilities(): Promise<MlCapabilitiesResponse> {
const upgradeInProgress = await isUpgradeInProgress();
const isPlatinumOrTrialLicense = mlLicense.isFullLicense();

View file

@ -4,17 +4,17 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { mlLog } from '../../client/log';
export function upgradeCheckProvider({ callAsInternalUser }: ILegacyScopedClusterClient) {
export function upgradeCheckProvider({ asInternalUser }: IScopedClusterClient) {
async function isUpgradeInProgress(): Promise<boolean> {
let upgradeInProgress = false;
try {
const info = await callAsInternalUser('ml.info');
const { body } = await asInternalUser.ml.info();
// if ml indices are currently being migrated, upgrade_mode will be set to true
// pass this back with the privileges to allow for the disabling of UI controls.
upgradeInProgress = info.upgrade_mode === true;
upgradeInProgress = body.upgrade_mode === true;
} catch (error) {
// if the ml.info check fails, it could be due to the user having insufficient privileges
// most likely they do not have the ml_user role and therefore will be blocked from using

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { mlLog } from '../../client/log';
import {
@ -17,18 +17,16 @@ import {
// - ML_ANNOTATIONS_INDEX_PATTERN index is present
// - ML_ANNOTATIONS_INDEX_ALIAS_READ alias is present
// - ML_ANNOTATIONS_INDEX_ALIAS_WRITE alias is present
export async function isAnnotationsFeatureAvailable({
callAsInternalUser,
}: ILegacyScopedClusterClient) {
export async function isAnnotationsFeatureAvailable({ asInternalUser }: IScopedClusterClient) {
try {
const indexParams = { index: ML_ANNOTATIONS_INDEX_PATTERN };
const annotationsIndexExists = await callAsInternalUser('indices.exists', indexParams);
const { body: annotationsIndexExists } = await asInternalUser.indices.exists(indexParams);
if (!annotationsIndexExists) {
return false;
}
const annotationsReadAliasExists = await callAsInternalUser('indices.existsAlias', {
const { body: annotationsReadAliasExists } = await asInternalUser.indices.existsAlias({
index: ML_ANNOTATIONS_INDEX_ALIAS_READ,
name: ML_ANNOTATIONS_INDEX_ALIAS_READ,
});
@ -37,7 +35,7 @@ export async function isAnnotationsFeatureAvailable({
return false;
}
const annotationsWriteAliasExists = await callAsInternalUser('indices.existsAlias', {
const { body: annotationsWriteAliasExists } = await asInternalUser.indices.existsAlias({
index: ML_ANNOTATIONS_INDEX_ALIAS_WRITE,
name: ML_ANNOTATIONS_INDEX_ALIAS_WRITE,
});

View file

@ -7,7 +7,6 @@ import {
KibanaRequest,
KibanaResponseFactory,
RequestHandlerContext,
ILegacyScopedClusterClient,
IScopedClusterClient,
RequestHandler,
} from 'kibana/server';
@ -15,7 +14,6 @@ import {
import { MlLicense } from '../../../common/license';
type Handler = (handlerParams: {
legacyClient: ILegacyScopedClusterClient;
client: IScopedClusterClient;
request: KibanaRequest<any, any, any, any>;
response: KibanaResponseFactory;
@ -42,7 +40,6 @@ function guard(check: () => boolean, handler: Handler) {
}
return handler({
legacyClient: context.ml!.mlClient,
client: context.core.elasticsearch.client,
request,
response,

View file

@ -22,19 +22,15 @@ describe('annotation_service', () => {
let mlClusterClientSpy = {} as any;
beforeEach(() => {
const callAs = jest.fn((action: string) => {
switch (action) {
case 'delete':
case 'index':
return Promise.resolve(acknowledgedResponseMock);
case 'search':
return Promise.resolve(getAnnotationsResponseMock);
}
});
const callAs = {
delete: jest.fn(() => Promise.resolve({ body: acknowledgedResponseMock })),
index: jest.fn(() => Promise.resolve({ body: acknowledgedResponseMock })),
search: jest.fn(() => Promise.resolve({ body: getAnnotationsResponseMock })),
};
mlClusterClientSpy = {
callAsCurrentUser: callAs,
callAsInternalUser: callAs,
asCurrentUser: callAs,
asInternalUser: callAs,
};
});
@ -52,8 +48,7 @@ describe('annotation_service', () => {
const response = await deleteAnnotation(annotationMockId);
expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('delete');
expect(mockFunct.callAsInternalUser.mock.calls[0][1]).toEqual(deleteParamsMock);
expect(mockFunct.asInternalUser.delete.mock.calls[0][0]).toStrictEqual(deleteParamsMock);
expect(response).toBe(acknowledgedResponseMock);
done();
});
@ -73,8 +68,9 @@ describe('annotation_service', () => {
const response: GetResponse = await getAnnotations(indexAnnotationArgsMock);
expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('search');
expect(mockFunct.callAsInternalUser.mock.calls[0][1]).toEqual(getAnnotationsRequestMock);
expect(mockFunct.asInternalUser.search.mock.calls[0][0]).toStrictEqual(
getAnnotationsRequestMock
);
expect(Object.keys(response.annotations)).toHaveLength(1);
expect(response.annotations[jobIdMock]).toHaveLength(2);
expect(isAnnotations(response.annotations[jobIdMock])).toBeTruthy();
@ -89,9 +85,9 @@ describe('annotation_service', () => {
};
const mlClusterClientSpyError: any = {
callAsInternalUser: jest.fn(() => {
return Promise.resolve(mockEsError);
}),
asInternalUser: {
search: jest.fn(() => Promise.resolve({ body: mockEsError })),
},
};
const { getAnnotations } = annotationServiceProvider(mlClusterClientSpyError);
@ -124,10 +120,8 @@ describe('annotation_service', () => {
const response = await indexAnnotation(annotationMock, usernameMock);
expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('index');
// test if the annotation has been correctly augmented
const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[0][1];
const indexParamsCheck = mockFunct.asInternalUser.index.mock.calls[0][0];
const annotation = indexParamsCheck.body;
expect(annotation.create_username).toBe(usernameMock);
expect(annotation.modified_username).toBe(usernameMock);
@ -154,10 +148,8 @@ describe('annotation_service', () => {
const response = await indexAnnotation(annotationMock, usernameMock);
expect(mockFunct.callAsInternalUser.mock.calls[0][0]).toBe('index');
// test if the annotation has been correctly augmented
const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[0][1];
const indexParamsCheck = mockFunct.asInternalUser.index.mock.calls[0][0];
const annotation = indexParamsCheck.body;
expect(annotation.create_username).toBe(usernameMock);
expect(annotation.modified_username).toBe(usernameMock);
@ -196,9 +188,8 @@ describe('annotation_service', () => {
await indexAnnotation(annotation, modifiedUsernameMock);
expect(mockFunct.callAsInternalUser.mock.calls[1][0]).toBe('index');
// test if the annotation has been correctly updated
const indexParamsCheck = mockFunct.callAsInternalUser.mock.calls[1][1];
const indexParamsCheck = mockFunct.asInternalUser.index.mock.calls[0][0];
const modifiedAnnotation = indexParamsCheck.body;
expect(modifiedAnnotation.annotation).toBe(modifiedAnnotationText);
expect(modifiedAnnotation.create_username).toBe(originalUsernameMock);

View file

@ -7,7 +7,7 @@
import Boom from 'boom';
import each from 'lodash/each';
import get from 'lodash/get';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ANNOTATION_EVENT_USER, ANNOTATION_TYPE } from '../../../common/constants/annotations';
import { PARTITION_FIELDS } from '../../../common/constants/anomalies';
@ -67,17 +67,17 @@ export interface GetResponse {
export interface IndexParams {
index: string;
body: Annotation;
refresh?: string;
refresh: boolean | 'wait_for' | undefined;
id?: string;
}
export interface DeleteParams {
index: string;
refresh?: string;
refresh: boolean | 'wait_for' | undefined;
id: string;
}
export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterClient) {
export function annotationProvider({ asInternalUser }: IScopedClusterClient) {
async function indexAnnotation(annotation: Annotation, username: string) {
if (isAnnotation(annotation) === false) {
// No need to translate, this will not be exposed in the UI.
@ -104,7 +104,8 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
delete params.body.key;
}
return await callAsInternalUser('index', params);
const { body } = await asInternalUser.index(params);
return body;
}
async function getAnnotations({
@ -287,14 +288,14 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
};
try {
const resp = await callAsInternalUser('search', params);
const { body } = await asInternalUser.search(params);
if (resp.error !== undefined && resp.message !== undefined) {
if (body.error !== undefined && body.message !== undefined) {
// No need to translate, this will not be exposed in the UI.
throw new Error(`Annotations couldn't be retrieved from Elasticsearch.`);
}
const docs: Annotations = get(resp, ['hits', 'hits'], []).map((d: EsResult) => {
const docs: Annotations = get(body, ['hits', 'hits'], []).map((d: EsResult) => {
// get the original source document and the document id, we need it
// to identify the annotation when editing/deleting it.
// if original `event` is undefined then substitute with 'user` by default
@ -306,7 +307,7 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
} as Annotation;
});
const aggregations = get(resp, ['aggregations'], {}) as EsAggregationResult;
const aggregations = get(body, ['aggregations'], {}) as EsAggregationResult;
if (fields) {
obj.aggregations = aggregations;
}
@ -330,13 +331,14 @@ export function annotationProvider({ callAsInternalUser }: ILegacyScopedClusterC
}
async function deleteAnnotation(id: string) {
const param: DeleteParams = {
const params: DeleteParams = {
index: ML_ANNOTATIONS_INDEX_ALIAS_WRITE,
id,
refresh: 'wait_for',
};
return await callAsInternalUser('delete', param);
const { body } = await asInternalUser.delete(params);
return body;
}
return {

View file

@ -4,11 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { annotationProvider } from './annotation';
export function annotationServiceProvider(mlClusterClient: ILegacyScopedClusterClient) {
export function annotationServiceProvider(client: IScopedClusterClient) {
return {
...annotationProvider(mlClusterClient),
...annotationProvider(client),
};
}

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ES_AGGREGATION } from '../../../common/constants/aggregation_types';
export interface BucketSpanEstimatorData {
@ -21,6 +21,6 @@ export interface BucketSpanEstimatorData {
}
export function estimateBucketSpanFactory({
callAsCurrentUser,
callAsInternalUser,
}: ILegacyScopedClusterClient): (config: BucketSpanEstimatorData) => Promise<any>;
asCurrentUser,
asInternalUser,
}: IScopedClusterClient): (config: BucketSpanEstimatorData) => Promise<any>;

View file

@ -16,10 +16,10 @@ import { INTERVALS } from './intervals';
import { singleSeriesCheckerFactory } from './single_series_checker';
import { polledDataCheckerFactory } from './polled_data_checker';
export function estimateBucketSpanFactory(mlClusterClient) {
const { callAsCurrentUser, callAsInternalUser } = mlClusterClient;
const PolledDataChecker = polledDataCheckerFactory(mlClusterClient);
const SingleSeriesChecker = singleSeriesCheckerFactory(mlClusterClient);
export function estimateBucketSpanFactory(client) {
const { asCurrentUser, asInternalUser } = client;
const PolledDataChecker = polledDataCheckerFactory(client);
const SingleSeriesChecker = singleSeriesCheckerFactory(client);
class BucketSpanEstimator {
constructor(
@ -246,21 +246,22 @@ export function estimateBucketSpanFactory(mlClusterClient) {
const getFieldCardinality = function (index, field) {
return new Promise((resolve, reject) => {
callAsCurrentUser('search', {
index,
size: 0,
body: {
aggs: {
field_count: {
cardinality: {
field,
asCurrentUser
.search({
index,
size: 0,
body: {
aggs: {
field_count: {
cardinality: {
field,
},
},
},
},
},
})
.then((resp) => {
const value = get(resp, ['aggregations', 'field_count', 'value'], 0);
})
.then(({ body }) => {
const value = get(body, ['aggregations', 'field_count', 'value'], 0);
resolve(value);
})
.catch((resp) => {
@ -278,28 +279,29 @@ export function estimateBucketSpanFactory(mlClusterClient) {
getFieldCardinality(index, field)
.then((value) => {
const numPartitions = Math.floor(value / NUM_PARTITIONS) || 1;
callAsCurrentUser('search', {
index,
size: 0,
body: {
query,
aggs: {
fields_bucket_counts: {
terms: {
field,
include: {
partition: 0,
num_partitions: numPartitions,
asCurrentUser
.search({
index,
size: 0,
body: {
query,
aggs: {
fields_bucket_counts: {
terms: {
field,
include: {
partition: 0,
num_partitions: numPartitions,
},
},
},
},
},
},
})
.then((partitionResp) => {
})
.then(({ body }) => {
// eslint-disable-next-line camelcase
if (partitionResp.aggregations?.fields_bucket_counts?.buckets !== undefined) {
const buckets = partitionResp.aggregations.fields_bucket_counts.buckets;
if (body.aggregations?.fields_bucket_counts?.buckets !== undefined) {
const buckets = body.aggregations.fields_bucket_counts.buckets;
fieldValues = buckets.map((b) => b.key);
}
resolve(fieldValues);
@ -338,21 +340,21 @@ export function estimateBucketSpanFactory(mlClusterClient) {
return new Promise((resolve, reject) => {
// fetch the `search.max_buckets` cluster setting so we're able to
// adjust aggregations to not exceed that limit.
callAsInternalUser('cluster.getSettings', {
flatSettings: true,
includeDefaults: true,
filterPath: '*.*max_buckets',
})
.then((settings) => {
if (typeof settings !== 'object') {
asInternalUser.cluster
.getSettings({
flat_settings: true,
include_defaults: true,
filter_path: '*.*max_buckets',
})
.then(({ body }) => {
if (typeof body !== 'object') {
reject('Unable to retrieve cluster settings');
}
// search.max_buckets could exist in default, persistent or transient cluster settings
const maxBucketsSetting = (settings.defaults ||
settings.persistent ||
settings.transient ||
{})['search.max_buckets'];
const maxBucketsSetting = (body.defaults || body.persistent || body.transient || {})[
'search.max_buckets'
];
if (maxBucketsSetting === undefined) {
reject('Unable to retrieve cluster setting search.max_buckets');

View file

@ -4,22 +4,21 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ES_AGGREGATION } from '../../../common/constants/aggregation_types';
import { estimateBucketSpanFactory, BucketSpanEstimatorData } from './bucket_span_estimator';
const callAs = () => {
return new Promise((resolve) => {
resolve({});
}) as Promise<any>;
const callAs = {
search: () => Promise.resolve({ body: {} }),
cluster: { getSettings: () => Promise.resolve({ body: {} }) },
};
const mlClusterClient: ILegacyScopedClusterClient = {
callAsCurrentUser: callAs,
callAsInternalUser: callAs,
};
const mlClusterClient = ({
asCurrentUser: callAs,
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
// mock configuration to be passed to the estimator
const formConfig: BucketSpanEstimatorData = {

View file

@ -12,7 +12,7 @@
import get from 'lodash/get';
export function polledDataCheckerFactory({ callAsCurrentUser }) {
export function polledDataCheckerFactory({ asCurrentUser }) {
class PolledDataChecker {
constructor(index, timeField, duration, query) {
this.index = index;
@ -65,14 +65,15 @@ export function polledDataCheckerFactory({ callAsCurrentUser }) {
return search;
}
performSearch(intervalMs) {
const body = this.createSearch(intervalMs);
async performSearch(intervalMs) {
const searchBody = this.createSearch(intervalMs);
return callAsCurrentUser('search', {
const { body } = await asCurrentUser.search({
index: this.index,
size: 0,
body,
body: searchBody,
});
return body;
}
// test that the coefficient of variation of time difference between non-empty buckets is small

View file

@ -13,7 +13,7 @@
import { mlLog } from '../../client/log';
import { INTERVALS, LONG_INTERVALS } from './intervals';
export function singleSeriesCheckerFactory({ callAsCurrentUser }) {
export function singleSeriesCheckerFactory({ asCurrentUser }) {
const REF_DATA_INTERVAL = { name: '1h', ms: 3600000 };
class SingleSeriesChecker {
@ -184,14 +184,15 @@ export function singleSeriesCheckerFactory({ callAsCurrentUser }) {
return search;
}
performSearch(intervalMs) {
const body = this.createSearch(intervalMs);
async performSearch(intervalMs) {
const searchBody = this.createSearch(intervalMs);
return callAsCurrentUser('search', {
const { body } = await asCurrentUser.search({
index: this.index,
size: 0,
body,
body: searchBody,
});
return body;
}
getFullBuckets(buckets) {

View file

@ -5,13 +5,13 @@
*/
import numeral from '@elastic/numeral';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { MLCATEGORY } from '../../../common/constants/field_types';
import { AnalysisConfig } from '../../../common/types/anomaly_detection_jobs';
import { fieldsServiceProvider } from '../fields_service';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
interface ModelMemoryEstimationResult {
export interface ModelMemoryEstimationResult {
/**
* Result model memory limit
*/
@ -29,15 +29,15 @@ interface ModelMemoryEstimationResult {
/**
* Response of the _estimate_model_memory endpoint.
*/
export interface ModelMemoryEstimate {
export interface ModelMemoryEstimateResponse {
model_memory_estimate: string;
}
/**
* Retrieves overall and max bucket cardinalities.
*/
const cardinalityCheckProvider = (mlClusterClient: ILegacyScopedClusterClient) => {
const fieldsService = fieldsServiceProvider(mlClusterClient);
const cardinalityCheckProvider = (client: IScopedClusterClient) => {
const fieldsService = fieldsServiceProvider(client);
return async (
analysisConfig: AnalysisConfig,
@ -123,9 +123,9 @@ const cardinalityCheckProvider = (mlClusterClient: ILegacyScopedClusterClient) =
};
};
export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScopedClusterClient) {
const { callAsInternalUser } = mlClusterClient;
const getCardinalities = cardinalityCheckProvider(mlClusterClient);
export function calculateModelMemoryLimitProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
const getCardinalities = cardinalityCheckProvider(client);
/**
* Retrieves an estimated size of the model memory limit used in the job config
@ -141,7 +141,7 @@ export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScoped
latestMs: number,
allowMMLGreaterThanMax = false
): Promise<ModelMemoryEstimationResult> {
const info = (await callAsInternalUser('ml.info')) as MlInfoResponse;
const { body: info } = await asInternalUser.ml.info<MlInfoResponse>();
const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase();
const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase();
@ -154,13 +154,14 @@ export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScoped
latestMs
);
const estimatedModelMemoryLimit = ((await callAsInternalUser('ml.estimateModelMemory', {
const { body } = await asInternalUser.ml.estimateModelMemory<ModelMemoryEstimateResponse>({
body: {
analysis_config: analysisConfig,
overall_cardinality: overallCardinality,
max_bucket_cardinality: maxBucketCardinality,
},
})) as ModelMemoryEstimate).model_memory_estimate.toUpperCase();
});
const estimatedModelMemoryLimit = body.model_memory_estimate.toUpperCase();
let modelMemoryLimit = estimatedModelMemoryLimit;
let mmlCappedAtMax = false;

View file

@ -5,7 +5,7 @@
*/
import { difference } from 'lodash';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { EventManager, CalendarEvent } from './event_manager';
interface BasicCalendar {
@ -23,30 +23,30 @@ export interface FormCalendar extends BasicCalendar {
}
export class CalendarManager {
private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser'];
private _asInternalUser: IScopedClusterClient['asInternalUser'];
private _eventManager: EventManager;
constructor(mlClusterClient: ILegacyScopedClusterClient) {
this._callAsInternalUser = mlClusterClient.callAsInternalUser;
this._eventManager = new EventManager(mlClusterClient);
constructor(client: IScopedClusterClient) {
this._asInternalUser = client.asInternalUser;
this._eventManager = new EventManager(client);
}
async getCalendar(calendarId: string) {
const resp = await this._callAsInternalUser('ml.calendars', {
calendarId,
const { body } = await this._asInternalUser.ml.getCalendars({
calendar_id: calendarId,
});
const calendars = resp.calendars;
const calendars = body.calendars;
const calendar = calendars[0]; // Endpoint throws a 404 if calendar is not found.
calendar.events = await this._eventManager.getCalendarEvents(calendarId);
return calendar;
}
async getAllCalendars() {
const calendarsResp = await this._callAsInternalUser('ml.calendars');
const { body } = await this._asInternalUser.ml.getCalendars({ size: 1000 });
const events: CalendarEvent[] = await this._eventManager.getAllEvents();
const calendars: Calendar[] = calendarsResp.calendars;
const calendars: Calendar[] = body.calendars;
calendars.forEach((cal) => (cal.events = []));
// loop events and combine with related calendars
@ -71,8 +71,8 @@ export class CalendarManager {
async newCalendar(calendar: FormCalendar) {
const { calendarId, events, ...newCalendar } = calendar;
await this._callAsInternalUser('ml.addCalendar', {
calendarId,
await this._asInternalUser.ml.putCalendar({
calendar_id: calendarId,
body: newCalendar,
});
@ -106,17 +106,17 @@ export class CalendarManager {
// add all new jobs
if (jobsToAdd.length) {
await this._callAsInternalUser('ml.addJobToCalendar', {
calendarId,
jobId: jobsToAdd.join(','),
await this._asInternalUser.ml.putCalendarJob({
calendar_id: calendarId,
job_id: jobsToAdd.join(','),
});
}
// remove all removed jobs
if (jobsToRemove.length) {
await this._callAsInternalUser('ml.removeJobFromCalendar', {
calendarId,
jobId: jobsToRemove.join(','),
await this._asInternalUser.ml.deleteCalendarJob({
calendar_id: calendarId,
job_id: jobsToRemove.join(','),
});
}
@ -137,6 +137,7 @@ export class CalendarManager {
}
async deleteCalendar(calendarId: string) {
return this._callAsInternalUser('ml.deleteCalendar', { calendarId });
const { body } = await this._asInternalUser.ml.deleteCalendar({ calendar_id: calendarId });
return body;
}
}

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { GLOBAL_CALENDAR } from '../../../common/constants/calendars';
export interface CalendarEvent {
@ -16,39 +16,42 @@ export interface CalendarEvent {
}
export class EventManager {
private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser'];
constructor({ callAsInternalUser }: ILegacyScopedClusterClient) {
this._callAsInternalUser = callAsInternalUser;
private _asInternalUser: IScopedClusterClient['asInternalUser'];
constructor({ asInternalUser }: IScopedClusterClient) {
this._asInternalUser = asInternalUser;
}
async getCalendarEvents(calendarId: string) {
const resp = await this._callAsInternalUser('ml.events', { calendarId });
const { body } = await this._asInternalUser.ml.getCalendarEvents({ calendar_id: calendarId });
return resp.events;
return body.events;
}
// jobId is optional
async getAllEvents(jobId?: string) {
const calendarId = GLOBAL_CALENDAR;
const resp = await this._callAsInternalUser('ml.events', {
calendarId,
jobId,
const { body } = await this._asInternalUser.ml.getCalendarEvents({
calendar_id: calendarId,
job_id: jobId,
});
return resp.events;
return body.events;
}
async addEvents(calendarId: string, events: CalendarEvent[]) {
const body = { events };
return await this._callAsInternalUser('ml.addEvent', {
calendarId,
return await this._asInternalUser.ml.postCalendarEvents({
calendar_id: calendarId,
body,
});
}
async deleteEvent(calendarId: string, eventId: string) {
return this._callAsInternalUser('ml.deleteEvent', { calendarId, eventId });
return this._asInternalUser.ml.deleteCalendarEvent({
calendar_id: calendarId,
event_id: eventId,
});
}
isEqual(ev1: CalendarEvent, ev2: CalendarEvent) {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ML_NOTIFICATION_INDEX_PATTERN } from '../../../common/constants/index_patterns';
import { JobMessage } from '../../../common/types/audit_message';
@ -23,7 +23,7 @@ interface BoolQuery {
bool: { [key: string]: any };
}
export function analyticsAuditMessagesProvider({ callAsInternalUser }: ILegacyScopedClusterClient) {
export function analyticsAuditMessagesProvider({ asInternalUser }: IScopedClusterClient) {
// search for audit messages,
// analyticsId is optional. without it, all analytics will be listed.
async function getAnalyticsAuditMessages(analyticsId: string) {
@ -68,27 +68,23 @@ export function analyticsAuditMessagesProvider({ callAsInternalUser }: ILegacySc
});
}
try {
const resp = await callAsInternalUser('search', {
index: ML_NOTIFICATION_INDEX_PATTERN,
ignore_unavailable: true,
rest_total_hits_as_int: true,
size: SIZE,
body: {
sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }],
query,
},
});
const { body } = await asInternalUser.search({
index: ML_NOTIFICATION_INDEX_PATTERN,
ignore_unavailable: true,
rest_total_hits_as_int: true,
size: SIZE,
body: {
sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }],
query,
},
});
let messages = [];
if (resp.hits.total !== 0) {
messages = resp.hits.hits.map((hit: Message) => hit._source);
messages.reverse();
}
return messages;
} catch (e) {
throw e;
let messages = [];
if (body.hits.total !== 0) {
messages = body.hits.hits.map((hit: Message) => hit._source);
messages.reverse();
}
return messages;
}
return {

View file

@ -4,13 +4,20 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SavedObjectsClientContract, KibanaRequest } from 'kibana/server';
import { SavedObjectsClientContract, KibanaRequest, IScopedClusterClient } from 'kibana/server';
import { Module } from '../../../common/types/modules';
import { DataRecognizer } from '../data_recognizer';
const callAs = () => Promise.resolve({ body: {} });
const mlClusterClient = ({
asCurrentUser: callAs,
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
describe('ML - data recognizer', () => {
const dr = new DataRecognizer(
{ callAsCurrentUser: jest.fn(), callAsInternalUser: jest.fn() },
mlClusterClient,
({
find: jest.fn(),
bulkCreate: jest.fn(),

View file

@ -7,15 +7,11 @@
import fs from 'fs';
import Boom from 'boom';
import numeral from '@elastic/numeral';
import {
KibanaRequest,
ILegacyScopedClusterClient,
SavedObjectsClientContract,
} from 'kibana/server';
import { KibanaRequest, IScopedClusterClient, SavedObjectsClientContract } from 'kibana/server';
import moment from 'moment';
import { IndexPatternAttributes } from 'src/plugins/data/server';
import { merge } from 'lodash';
import { AnalysisLimits, CombinedJobWithStats } from '../../../common/types/anomaly_detection_jobs';
import { AnalysisLimits } from '../../../common/types/anomaly_detection_jobs';
import { getAuthorizationHeader } from '../../lib/request_authorization';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
import {
@ -46,6 +42,7 @@ import { fieldsServiceProvider } from '../fields_service';
import { jobServiceProvider } from '../job_service';
import { resultsServiceProvider } from '../results_service';
import { JobExistResult, JobStat } from '../../../common/types/data_recognizer';
import { MlJobsStatsResponse } from '../job_service/jobs';
const ML_DIR = 'ml';
const KIBANA_DIR = 'kibana';
@ -74,10 +71,6 @@ interface RawModuleConfig {
};
}
interface MlJobStats {
jobs: CombinedJobWithStats[];
}
interface Config {
dirName: any;
json: RawModuleConfig;
@ -111,9 +104,9 @@ interface SaveResults {
}
export class DataRecognizer {
private _callAsCurrentUser: ILegacyScopedClusterClient['callAsCurrentUser'];
private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser'];
private _mlClusterClient: ILegacyScopedClusterClient;
private _asCurrentUser: IScopedClusterClient['asCurrentUser'];
private _asInternalUser: IScopedClusterClient['asInternalUser'];
private _client: IScopedClusterClient;
private _authorizationHeader: object;
private _modulesDir = `${__dirname}/modules`;
private _indexPatternName: string = '';
@ -124,13 +117,13 @@ export class DataRecognizer {
jobsForModelMemoryEstimation: Array<{ job: ModuleJob; query: any }> = [];
constructor(
mlClusterClient: ILegacyScopedClusterClient,
mlClusterClient: IScopedClusterClient,
private savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest
) {
this._mlClusterClient = mlClusterClient;
this._callAsCurrentUser = mlClusterClient.callAsCurrentUser;
this._callAsInternalUser = mlClusterClient.callAsInternalUser;
this._client = mlClusterClient;
this._asCurrentUser = mlClusterClient.asCurrentUser;
this._asInternalUser = mlClusterClient.asInternalUser;
this._authorizationHeader = getAuthorizationHeader(request);
}
@ -249,18 +242,18 @@ export class DataRecognizer {
const index = indexPattern;
const size = 0;
const body = {
const searchBody = {
query: moduleConfig.query,
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
rest_total_hits_as_int: true,
size,
body,
body: searchBody,
});
return resp.hits.total !== 0;
return body.hits.total !== 0;
}
async listModules() {
@ -518,7 +511,7 @@ export class DataRecognizer {
// Add a wildcard at the front of each of the job IDs in the module,
// as a prefix may have been supplied when creating the jobs in the module.
const jobIds = module.jobs.map((job) => `*${job.id}`);
const { jobsExist } = jobServiceProvider(this._mlClusterClient);
const { jobsExist } = jobServiceProvider(this._client);
const jobInfo = await jobsExist(jobIds);
// Check if the value for any of the jobs is false.
@ -527,16 +520,16 @@ export class DataRecognizer {
if (doJobsExist === true) {
// Get the IDs of the jobs created from the module, and their earliest / latest timestamps.
const jobStats: MlJobStats = await this._callAsInternalUser('ml.jobStats', {
jobId: jobIds,
const { body } = await this._asInternalUser.ml.getJobStats<MlJobsStatsResponse>({
job_id: jobIds.join(),
});
const jobStatsJobs: JobStat[] = [];
if (jobStats.jobs && jobStats.jobs.length > 0) {
const foundJobIds = jobStats.jobs.map((job) => job.job_id);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(this._mlClusterClient);
if (body.jobs && body.jobs.length > 0) {
const foundJobIds = body.jobs.map((job) => job.job_id);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(this._client);
const latestBucketTimestampsByJob = await getLatestBucketTimestampByJob(foundJobIds);
jobStats.jobs.forEach((job) => {
body.jobs.forEach((job) => {
const jobStat = {
id: job.job_id,
} as JobStat;
@ -704,16 +697,15 @@ export class DataRecognizer {
job.id = jobId;
await this.saveJob(job);
return { id: jobId, success: true };
} catch (error) {
return { id: jobId, success: false, error };
} catch ({ body }) {
return { id: jobId, success: false, error: body };
}
})
);
}
async saveJob(job: ModuleJob) {
const { id: jobId, config: body } = job;
return this._callAsInternalUser('ml.addJob', { jobId, body });
return this._asInternalUser.ml.putJob({ job_id: job.id, body: job.config });
}
// save the datafeeds.
@ -725,20 +717,21 @@ export class DataRecognizer {
try {
await this.saveDatafeed(datafeed);
return { id: datafeed.id, success: true, started: false };
} catch (error) {
return { id: datafeed.id, success: false, started: false, error };
} catch ({ body }) {
return { id: datafeed.id, success: false, started: false, error: body };
}
})
);
}
async saveDatafeed(datafeed: ModuleDataFeed) {
const { id: datafeedId, config: body } = datafeed;
return this._callAsInternalUser('ml.addDatafeed', {
datafeedId,
body,
...this._authorizationHeader,
});
return this._asInternalUser.ml.putDatafeed(
{
datafeed_id: datafeed.id,
body: datafeed.config,
},
this._authorizationHeader
);
}
async startDatafeeds(
@ -761,10 +754,10 @@ export class DataRecognizer {
const result = { started: false } as DatafeedResponse;
let opened = false;
try {
const openResult = await this._callAsInternalUser('ml.openJob', {
jobId: datafeed.config.job_id,
const { body } = await this._asInternalUser.ml.openJob({
job_id: datafeed.config.job_id,
});
opened = openResult.opened;
opened = body.opened;
} catch (error) {
// if the job is already open, a 409 will be returned.
if (error.statusCode === 409) {
@ -772,27 +765,27 @@ export class DataRecognizer {
} else {
opened = false;
result.started = false;
result.error = error;
result.error = error.body;
}
}
if (opened) {
try {
const duration: { start: number; end?: number } = { start: 0 };
const duration: { start: string; end?: string } = { start: '0' };
if (start !== undefined) {
duration.start = start;
duration.start = (start as unknown) as string;
}
if (end !== undefined) {
duration.end = end;
duration.end = (end as unknown) as string;
}
await this._callAsInternalUser('ml.startDatafeed', {
datafeedId: datafeed.id,
await this._asInternalUser.ml.startDatafeed({
datafeed_id: datafeed.id,
...duration,
});
result.started = true;
} catch (error) {
} catch ({ body }) {
result.started = false;
result.error = error;
result.error = body;
}
}
return result;
@ -995,7 +988,7 @@ export class DataRecognizer {
timeField: string,
query?: any
): Promise<{ start: number; end: number }> {
const fieldsService = fieldsServiceProvider(this._mlClusterClient);
const fieldsService = fieldsServiceProvider(this._client);
const timeFieldRange = await fieldsService.getTimeFieldRange(
this._indexPatternName,
@ -1025,7 +1018,7 @@ export class DataRecognizer {
if (estimateMML && this.jobsForModelMemoryEstimation.length > 0) {
try {
const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this._mlClusterClient);
const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this._client);
// Checks if all jobs in the module have the same time field configured
const firstJobTimeField = this.jobsForModelMemoryEstimation[0].job.config.data_description
@ -1074,11 +1067,13 @@ export class DataRecognizer {
job.config.analysis_limits.model_memory_limit = modelMemoryLimit;
}
} catch (error) {
mlLog.warn(`Data recognizer could not estimate model memory limit ${error}`);
mlLog.warn(`Data recognizer could not estimate model memory limit ${error.body}`);
}
}
const { limits } = (await this._callAsInternalUser('ml.info')) as MlInfoResponse;
const {
body: { limits },
} = await this._asInternalUser.ml.info<MlInfoResponse>();
const maxMml = limits.max_model_memory_limit;
if (!maxMml) {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import get from 'lodash/get';
import each from 'lodash/each';
import last from 'lodash/last';
@ -183,7 +183,7 @@ type BatchStats =
| FieldExamples;
const getAggIntervals = async (
{ callAsCurrentUser }: ILegacyScopedClusterClient,
{ asCurrentUser }: IScopedClusterClient,
indexPatternTitle: string,
query: any,
fields: HistogramField[],
@ -207,7 +207,7 @@ const getAggIntervals = async (
return aggs;
}, {} as Record<string, object>);
const respStats = await callAsCurrentUser('search', {
const { body } = await asCurrentUser.search({
index: indexPatternTitle,
size: 0,
body: {
@ -218,8 +218,7 @@ const getAggIntervals = async (
});
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const aggregations =
aggsPath.length > 0 ? get(respStats.aggregations, aggsPath) : respStats.aggregations;
const aggregations = aggsPath.length > 0 ? get(body.aggregations, aggsPath) : body.aggregations;
return Object.keys(aggregations).reduce((p, aggName) => {
const stats = [aggregations[aggName].min, aggregations[aggName].max];
@ -241,15 +240,15 @@ const getAggIntervals = async (
// export for re-use by transforms plugin
export const getHistogramsForFields = async (
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
indexPatternTitle: string,
query: any,
fields: HistogramField[],
samplerShardSize: number
) => {
const { callAsCurrentUser } = mlClusterClient;
const { asCurrentUser } = client;
const aggIntervals = await getAggIntervals(
mlClusterClient,
client,
indexPatternTitle,
query,
fields,
@ -291,7 +290,7 @@ export const getHistogramsForFields = async (
return [];
}
const respChartsData = await callAsCurrentUser('search', {
const { body } = await asCurrentUser.search({
index: indexPatternTitle,
size: 0,
body: {
@ -302,8 +301,7 @@ export const getHistogramsForFields = async (
});
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const aggregations =
aggsPath.length > 0 ? get(respChartsData.aggregations, aggsPath) : respChartsData.aggregations;
const aggregations = aggsPath.length > 0 ? get(body.aggregations, aggsPath) : body.aggregations;
const chartsData: ChartData[] = fields.map(
(field): ChartData => {
@ -350,12 +348,12 @@ export const getHistogramsForFields = async (
};
export class DataVisualizer {
private _mlClusterClient: ILegacyScopedClusterClient;
private _callAsCurrentUser: ILegacyScopedClusterClient['callAsCurrentUser'];
private _client: IScopedClusterClient;
private _asCurrentUser: IScopedClusterClient['asCurrentUser'];
constructor(mlClusterClient: ILegacyScopedClusterClient) {
this._callAsCurrentUser = mlClusterClient.callAsCurrentUser;
this._mlClusterClient = mlClusterClient;
constructor(client: IScopedClusterClient) {
this._asCurrentUser = client.asCurrentUser;
this._client = client;
}
// Obtains overall stats on the fields in the supplied index pattern, returning an object
@ -451,7 +449,7 @@ export class DataVisualizer {
samplerShardSize: number
): Promise<any> {
return await getHistogramsForFields(
this._mlClusterClient,
this._client,
indexPatternTitle,
query,
fields,
@ -621,7 +619,7 @@ export class DataVisualizer {
};
});
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -630,14 +628,14 @@ export class DataVisualizer {
aggs: buildSamplerAggregation(aggs, samplerShardSize),
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
rest_total_hits_as_int: true,
size,
body,
body: searchBody,
});
const aggregations = resp.aggregations;
const totalCount = get(resp, ['hits', 'total'], 0);
const aggregations = body.aggregations;
const totalCount = get(body, ['hits', 'total'], 0);
const stats = {
totalCount,
aggregatableExistsFields: [] as FieldData[],
@ -688,7 +686,7 @@ export class DataVisualizer {
const size = 0;
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -697,13 +695,13 @@ export class DataVisualizer {
};
filterCriteria.push({ exists: { field } });
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
rest_total_hits_as_int: true,
size,
body,
body: searchBody,
});
return resp.hits.total > 0;
return body.hits.total > 0;
}
async getDocumentCountStats(
@ -730,7 +728,7 @@ export class DataVisualizer {
},
};
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -739,15 +737,15 @@ export class DataVisualizer {
aggs,
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
size,
body,
body: searchBody,
});
const buckets: { [key: string]: number } = {};
const dataByTimeBucket: Array<{ key: string; doc_count: number }> = get(
resp,
body,
['aggregations', 'eventRate', 'buckets'],
[]
);
@ -833,7 +831,7 @@ export class DataVisualizer {
}
});
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -842,12 +840,12 @@ export class DataVisualizer {
aggs: buildSamplerAggregation(aggs, samplerShardSize),
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
size,
body,
body: searchBody,
});
const aggregations = resp.aggregations;
const aggregations = body.aggregations;
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const batchStats: NumericFieldStats[] = [];
fields.forEach((field, i) => {
@ -954,7 +952,7 @@ export class DataVisualizer {
}
});
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -963,12 +961,12 @@ export class DataVisualizer {
aggs: buildSamplerAggregation(aggs, samplerShardSize),
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
size,
body,
body: searchBody,
});
const aggregations = resp.aggregations;
const aggregations = body.aggregations;
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const batchStats: StringFieldStats[] = [];
fields.forEach((field, i) => {
@ -1028,7 +1026,7 @@ export class DataVisualizer {
};
});
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -1037,12 +1035,12 @@ export class DataVisualizer {
aggs: buildSamplerAggregation(aggs, samplerShardSize),
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
size,
body,
body: searchBody,
});
const aggregations = resp.aggregations;
const aggregations = body.aggregations;
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const batchStats: DateFieldStats[] = [];
fields.forEach((field, i) => {
@ -1095,7 +1093,7 @@ export class DataVisualizer {
};
});
const body = {
const searchBody = {
query: {
bool: {
filter: filterCriteria,
@ -1104,12 +1102,12 @@ export class DataVisualizer {
aggs: buildSamplerAggregation(aggs, samplerShardSize),
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
size,
body,
body: searchBody,
});
const aggregations = resp.aggregations;
const aggregations = body.aggregations;
const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize);
const batchStats: BooleanFieldStats[] = [];
fields.forEach((field, i) => {
@ -1157,7 +1155,7 @@ export class DataVisualizer {
exists: { field },
});
const body = {
const searchBody = {
_source: field,
query: {
bool: {
@ -1166,18 +1164,18 @@ export class DataVisualizer {
},
};
const resp = await this._callAsCurrentUser('search', {
const { body } = await this._asCurrentUser.search({
index,
rest_total_hits_as_int: true,
size,
body,
body: searchBody,
});
const stats = {
fieldName: field,
examples: [] as any[],
};
if (resp.hits.total !== 0) {
const hits = resp.hits.hits;
if (body.hits.total !== 0) {
const hits = body.hits.hits;
for (let i = 0; i < hits.length; i++) {
// Look in the _source for the field value.
// If the field is not in the _source (as will happen if the

View file

@ -5,7 +5,7 @@
*/
import Boom from 'boom';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { duration } from 'moment';
import { parseInterval } from '../../../common/util/parse_interval';
import { initCardinalityFieldsCache } from './fields_aggs_cache';
@ -14,7 +14,7 @@ import { initCardinalityFieldsCache } from './fields_aggs_cache';
* Service for carrying out queries to obtain data
* specific to fields in Elasticsearch indices.
*/
export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) {
export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
const fieldsAggsCache = initCardinalityFieldsCache();
/**
@ -37,13 +37,13 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste
index: string | string[],
fieldNames: string[]
): Promise<string[]> {
const fieldCapsResp = await callAsCurrentUser('fieldCaps', {
const { body } = await asCurrentUser.fieldCaps({
index,
fields: fieldNames,
});
const aggregatableFields: string[] = [];
fieldNames.forEach((fieldName) => {
const fieldInfo = fieldCapsResp.fields[fieldName];
const fieldInfo = body.fields[fieldName];
const typeKeys = fieldInfo !== undefined ? Object.keys(fieldInfo) : [];
if (typeKeys.length > 0) {
const fieldType = typeKeys[0];
@ -130,12 +130,12 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste
aggs,
};
const aggregations = (
await callAsCurrentUser('search', {
index,
body,
})
)?.aggregations;
const {
body: { aggregations },
} = await asCurrentUser.search({
index,
body,
});
if (!aggregations) {
return {};
@ -170,7 +170,9 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste
}> {
const obj = { success: true, start: { epoch: 0, string: '' }, end: { epoch: 0, string: '' } };
const resp = await callAsCurrentUser('search', {
const {
body: { aggregations },
} = await asCurrentUser.search({
index,
size: 0,
body: {
@ -190,12 +192,12 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste
},
});
if (resp.aggregations && resp.aggregations.earliest && resp.aggregations.latest) {
obj.start.epoch = resp.aggregations.earliest.value;
obj.start.string = resp.aggregations.earliest.value_as_string;
if (aggregations && aggregations.earliest && aggregations.latest) {
obj.start.epoch = aggregations.earliest.value;
obj.start.string = aggregations.earliest.value_as_string;
obj.end.epoch = resp.aggregations.latest.value;
obj.end.string = resp.aggregations.latest.value_as_string;
obj.end.epoch = aggregations.latest.value;
obj.end.string = aggregations.latest.value_as_string;
}
return obj;
}
@ -338,12 +340,12 @@ export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedCluste
},
};
const aggregations = (
await callAsCurrentUser('search', {
index,
body,
})
)?.aggregations;
const {
body: { aggregations },
} = await asCurrentUser.search({
index,
body,
});
if (!aggregations) {
return cachedValues;

View file

@ -4,18 +4,20 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import {
AnalysisResult,
FormattedOverrides,
InputOverrides,
FindFileStructureResponse,
} from '../../../common/types/file_datavisualizer';
export type InputData = any[];
export function fileDataVisualizerProvider({ callAsInternalUser }: ILegacyScopedClusterClient) {
async function analyzeFile(data: any, overrides: any): Promise<AnalysisResult> {
const results = await callAsInternalUser('ml.fileStructure', {
export function fileDataVisualizerProvider({ asInternalUser }: IScopedClusterClient) {
async function analyzeFile(data: InputData, overrides: InputOverrides): Promise<AnalysisResult> {
overrides.explain = overrides.explain === undefined ? 'true' : overrides.explain;
const { body } = await asInternalUser.ml.findFileStructure<FindFileStructureResponse>({
body: data,
...overrides,
});
@ -24,7 +26,7 @@ export function fileDataVisualizerProvider({ callAsInternalUser }: ILegacyScoped
return {
...(hasOverrides && { overrides: reducedOverrides }),
results,
results: body,
};
}

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_datavisualizer';
import {
ImportResponse,
@ -15,7 +15,7 @@ import {
} from '../../../common/types/file_datavisualizer';
import { InputData } from './file_data_visualizer';
export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) {
export function importDataProvider({ asCurrentUser }: IScopedClusterClient) {
async function importData(
id: string,
index: string,
@ -40,9 +40,9 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl
// create the pipeline if one has been supplied
if (pipelineId !== undefined) {
const success = await createPipeline(pipelineId, pipeline);
if (success.acknowledged !== true) {
throw success;
const resp = await createPipeline(pipelineId, pipeline);
if (resp.acknowledged !== true) {
throw resp;
}
}
createdPipelineId = pipelineId;
@ -80,7 +80,7 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl
id,
index: createdIndex,
pipelineId: createdPipelineId,
error: error.error !== undefined ? error.error : error,
error: error.body !== undefined ? error.body : error,
docCount,
ingestError: error.ingestError,
failures: error.failures || [],
@ -102,7 +102,7 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl
body.settings = settings;
}
await callAsCurrentUser('indices.create', { index, body });
await asCurrentUser.indices.create({ index, body });
}
async function indexData(index: string, pipelineId: string, data: InputData) {
@ -118,7 +118,7 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl
settings.pipeline = pipelineId;
}
const resp = await callAsCurrentUser('bulk', settings);
const { body: resp } = await asCurrentUser.bulk(settings);
if (resp.errors) {
throw resp;
} else {
@ -151,7 +151,8 @@ export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterCl
}
async function createPipeline(id: string, pipeline: any) {
return await callAsCurrentUser('ingest.putPipeline', { id, body: pipeline });
const { body } = await asCurrentUser.ingest.putPipeline({ id, body: pipeline });
return body;
}
function getFailures(items: any[], data: InputData): ImportFailure[] {

View file

@ -5,7 +5,7 @@
*/
import Boom from 'boom';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { DetectorRule, DetectorRuleScope } from '../../../common/types/detector_rules';
@ -58,26 +58,26 @@ interface PartialJob {
}
export class FilterManager {
private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser'];
constructor({ callAsInternalUser }: ILegacyScopedClusterClient) {
this._callAsInternalUser = callAsInternalUser;
private _asInternalUser: IScopedClusterClient['asInternalUser'];
constructor({ asInternalUser }: IScopedClusterClient) {
this._asInternalUser = asInternalUser;
}
async getFilter(filterId: string) {
try {
const [JOBS, FILTERS] = [0, 1];
const results = await Promise.all([
this._callAsInternalUser('ml.jobs'),
this._callAsInternalUser('ml.filters', { filterId }),
this._asInternalUser.ml.getJobs(),
this._asInternalUser.ml.getFilters({ filter_id: filterId }),
]);
if (results[FILTERS] && results[FILTERS].filters.length) {
if (results[FILTERS] && results[FILTERS].body.filters.length) {
let filtersInUse: FiltersInUse = {};
if (results[JOBS] && results[JOBS].jobs) {
filtersInUse = this.buildFiltersInUse(results[JOBS].jobs);
if (results[JOBS] && results[JOBS].body.jobs) {
filtersInUse = this.buildFiltersInUse(results[JOBS].body.jobs);
}
const filter = results[FILTERS].filters[0];
const filter = results[FILTERS].body.filters[0];
filter.used_by = filtersInUse[filter.filter_id];
return filter;
} else {
@ -90,8 +90,8 @@ export class FilterManager {
async getAllFilters() {
try {
const filtersResp = await this._callAsInternalUser('ml.filters');
return filtersResp.filters;
const { body } = await this._asInternalUser.ml.getFilters({ size: 1000 });
return body.filters;
} catch (error) {
throw Boom.badRequest(error);
}
@ -101,14 +101,14 @@ export class FilterManager {
try {
const [JOBS, FILTERS] = [0, 1];
const results = await Promise.all([
this._callAsInternalUser('ml.jobs'),
this._callAsInternalUser('ml.filters'),
this._asInternalUser.ml.getJobs(),
this._asInternalUser.ml.getFilters({ size: 1000 }),
]);
// Build a map of filter_ids against jobs and detectors using that filter.
let filtersInUse: FiltersInUse = {};
if (results[JOBS] && results[JOBS].jobs) {
filtersInUse = this.buildFiltersInUse(results[JOBS].jobs);
if (results[JOBS] && results[JOBS].body.jobs) {
filtersInUse = this.buildFiltersInUse(results[JOBS].body.jobs);
}
// For each filter, return just
@ -117,8 +117,8 @@ export class FilterManager {
// item_count
// jobs using the filter
const filterStats: FilterStats[] = [];
if (results[FILTERS] && results[FILTERS].filters) {
results[FILTERS].filters.forEach((filter: Filter) => {
if (results[FILTERS] && results[FILTERS].body.filters) {
results[FILTERS].body.filters.forEach((filter: Filter) => {
const stats: FilterStats = {
filter_id: filter.filter_id,
description: filter.description,
@ -139,7 +139,8 @@ export class FilterManager {
const { filterId, ...body } = filter;
try {
// Returns the newly created filter.
return await this._callAsInternalUser('ml.addFilter', { filterId, body });
const { body: resp } = await this._asInternalUser.ml.putFilter({ filter_id: filterId, body });
return resp;
} catch (error) {
throw Boom.badRequest(error);
}
@ -159,17 +160,19 @@ export class FilterManager {
}
// Returns the newly updated filter.
return await this._callAsInternalUser('ml.updateFilter', {
filterId,
const { body: resp } = await this._asInternalUser.ml.updateFilter({
filter_id: filterId,
body,
});
return resp;
} catch (error) {
throw Boom.badRequest(error);
}
}
async deleteFilter(filterId: string) {
return this._callAsInternalUser('ml.deleteFilter', { filterId });
const { body } = await this._asInternalUser.ml.deleteFilter({ filter_id: filterId });
return body;
}
buildFiltersInUse(jobsList: PartialJob[]) {

View file

@ -4,10 +4,10 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
export function jobAuditMessagesProvider(
mlClusterClient: ILegacyScopedClusterClient
client: IScopedClusterClient
): {
getJobAuditMessages: (jobId?: string, from?: string) => any;
getAuditMessagesSummary: (jobIds?: string[]) => any;

View file

@ -34,14 +34,14 @@ const anomalyDetectorTypeFilter = {
},
};
export function jobAuditMessagesProvider({ callAsInternalUser }) {
export function jobAuditMessagesProvider({ asInternalUser }) {
// search for audit messages,
// jobId is optional. without it, all jobs will be listed.
// from is optional and should be a string formatted in ES time units. e.g. 12h, 1d, 7d
async function getJobAuditMessages(jobId, from) {
let gte = null;
if (jobId !== undefined && from === undefined) {
const jobs = await callAsInternalUser('ml.jobs', { jobId });
const jobs = await asInternalUser.ml.getJobs({ job_id: jobId });
if (jobs.count > 0 && jobs.jobs !== undefined) {
gte = moment(jobs.jobs[0].create_time).valueOf();
}
@ -99,26 +99,22 @@ export function jobAuditMessagesProvider({ callAsInternalUser }) {
});
}
try {
const resp = await callAsInternalUser('search', {
index: ML_NOTIFICATION_INDEX_PATTERN,
ignore_unavailable: true,
rest_total_hits_as_int: true,
size: SIZE,
body: {
sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }],
query,
},
});
const { body } = await asInternalUser.search({
index: ML_NOTIFICATION_INDEX_PATTERN,
ignore_unavailable: true,
rest_total_hits_as_int: true,
size: SIZE,
body: {
sort: [{ timestamp: { order: 'desc' } }, { job_id: { order: 'asc' } }],
query,
},
});
let messages = [];
if (resp.hits.total !== 0) {
messages = resp.hits.hits.map((hit) => hit._source);
}
return messages;
} catch (e) {
throw e;
let messages = [];
if (body.hits.total !== 0) {
messages = body.hits.hits.map((hit) => hit._source);
}
return messages;
}
// search highest, most recent audit messages for all jobs for the last 24hrs.
@ -128,65 +124,63 @@ export function jobAuditMessagesProvider({ callAsInternalUser }) {
const maxBuckets = 10000;
let levelsPerJobAggSize = maxBuckets;
try {
const query = {
bool: {
filter: [
{
range: {
timestamp: {
gte: 'now-1d',
},
const query = {
bool: {
filter: [
{
range: {
timestamp: {
gte: 'now-1d',
},
},
anomalyDetectorTypeFilter,
],
},
};
// If the jobIds arg is supplied, add a query filter
// to only include those jobIds in the aggregations.
if (Array.isArray(jobIds) && jobIds.length > 0) {
query.bool.filter.push({
terms: {
job_id: jobIds,
},
});
levelsPerJobAggSize = jobIds.length;
}
anomalyDetectorTypeFilter,
],
},
};
const resp = await callAsInternalUser('search', {
index: ML_NOTIFICATION_INDEX_PATTERN,
ignore_unavailable: true,
rest_total_hits_as_int: true,
size: 0,
body: {
query,
aggs: {
levelsPerJob: {
terms: {
field: 'job_id',
size: levelsPerJobAggSize,
},
aggs: {
levels: {
terms: {
field: 'level',
},
aggs: {
latestMessage: {
terms: {
field: 'message.raw',
size: 1,
order: {
latestMessage: 'desc',
},
// If the jobIds arg is supplied, add a query filter
// to only include those jobIds in the aggregations.
if (Array.isArray(jobIds) && jobIds.length > 0) {
query.bool.filter.push({
terms: {
job_id: jobIds,
},
});
levelsPerJobAggSize = jobIds.length;
}
const { body } = await asInternalUser.search({
index: ML_NOTIFICATION_INDEX_PATTERN,
ignore_unavailable: true,
rest_total_hits_as_int: true,
size: 0,
body: {
query,
aggs: {
levelsPerJob: {
terms: {
field: 'job_id',
size: levelsPerJobAggSize,
},
aggs: {
levels: {
terms: {
field: 'level',
},
aggs: {
latestMessage: {
terms: {
field: 'message.raw',
size: 1,
order: {
latestMessage: 'desc',
},
aggs: {
latestMessage: {
max: {
field: 'timestamp',
},
},
aggs: {
latestMessage: {
max: {
field: 'timestamp',
},
},
},
@ -196,67 +190,65 @@ export function jobAuditMessagesProvider({ callAsInternalUser }) {
},
},
},
});
},
});
let messagesPerJob = [];
const jobMessages = [];
if (
resp.hits.total !== 0 &&
resp.aggregations &&
resp.aggregations.levelsPerJob &&
resp.aggregations.levelsPerJob.buckets &&
resp.aggregations.levelsPerJob.buckets.length
) {
messagesPerJob = resp.aggregations.levelsPerJob.buckets;
}
messagesPerJob.forEach((job) => {
// ignore system messages (id==='')
if (job.key !== '' && job.levels && job.levels.buckets && job.levels.buckets.length) {
let highestLevel = 0;
let highestLevelText = '';
let msgTime = 0;
job.levels.buckets.forEach((level) => {
const label = level.key;
// note the highest message level
if (LEVEL[label] > highestLevel) {
highestLevel = LEVEL[label];
if (
level.latestMessage &&
level.latestMessage.buckets &&
level.latestMessage.buckets.length
) {
level.latestMessage.buckets.forEach((msg) => {
// there should only be one result here.
highestLevelText = msg.key;
// note the time in ms for the highest level
// so we can filter them out later if they're earlier than the
// job's create time.
if (msg.latestMessage && msg.latestMessage.value_as_string) {
const time = moment(msg.latestMessage.value_as_string);
msgTime = time.valueOf();
}
});
}
}
});
if (msgTime !== 0 && highestLevel !== 0) {
jobMessages.push({
job_id: job.key,
highestLevelText,
highestLevel: levelToText(highestLevel),
msgTime,
});
}
}
});
return jobMessages;
} catch (e) {
throw e;
let messagesPerJob = [];
const jobMessages = [];
if (
body.hits.total !== 0 &&
body.aggregations &&
body.aggregations.levelsPerJob &&
body.aggregations.levelsPerJob.buckets &&
body.aggregations.levelsPerJob.buckets.length
) {
messagesPerJob = body.aggregations.levelsPerJob.buckets;
}
messagesPerJob.forEach((job) => {
// ignore system messages (id==='')
if (job.key !== '' && job.levels && job.levels.buckets && job.levels.buckets.length) {
let highestLevel = 0;
let highestLevelText = '';
let msgTime = 0;
job.levels.buckets.forEach((level) => {
const label = level.key;
// note the highest message level
if (LEVEL[label] > highestLevel) {
highestLevel = LEVEL[label];
if (
level.latestMessage &&
level.latestMessage.buckets &&
level.latestMessage.buckets.length
) {
level.latestMessage.buckets.forEach((msg) => {
// there should only be one result here.
highestLevelText = msg.key;
// note the time in ms for the highest level
// so we can filter them out later if they're earlier than the
// job's create time.
if (msg.latestMessage && msg.latestMessage.value_as_string) {
const time = moment(msg.latestMessage.value_as_string);
msgTime = time.valueOf();
}
});
}
}
});
if (msgTime !== 0 && highestLevel !== 0) {
jobMessages.push({
job_id: job.key,
highestLevelText,
highestLevel: levelToText(highestLevel),
msgTime,
});
}
}
});
return jobMessages;
}
function levelToText(level) {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { i18n } from '@kbn/i18n';
import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states';
import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils';
@ -26,7 +26,7 @@ interface Results {
};
}
export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterClient) {
export function datafeedsProvider({ asInternalUser }: IScopedClusterClient) {
async function forceStartDatafeeds(datafeedIds: string[], start?: number, end?: number) {
const jobIds = await getJobIdsByDatafeedId();
const doStartsCalled = datafeedIds.reduce((acc, cur) => {
@ -42,8 +42,8 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
try {
await startDatafeed(datafeedId, start, end);
return { started: true };
} catch (error) {
return { started: false, error };
} catch ({ body }) {
return { started: false, error: body };
}
} else {
return { started: true };
@ -66,7 +66,7 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
results[datafeedId] = await doStart(datafeedId);
return fillResultsWithTimeouts(results, datafeedId, datafeedIds, JOB_STATE.OPENED);
}
results[datafeedId] = { started: false, error };
results[datafeedId] = { started: false, error: error.body };
}
} else {
results[datafeedId] = {
@ -84,8 +84,8 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
async function openJob(jobId: string) {
let opened = false;
try {
const resp = await callAsInternalUser('ml.openJob', { jobId });
opened = resp.opened;
const { body } = await asInternalUser.ml.openJob({ job_id: jobId });
opened = body.opened;
} catch (error) {
if (error.statusCode === 409) {
opened = true;
@ -97,7 +97,11 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
}
async function startDatafeed(datafeedId: string, start?: number, end?: number) {
return callAsInternalUser('ml.startDatafeed', { datafeedId, start, end });
return asInternalUser.ml.startDatafeed({
datafeed_id: datafeedId,
start: (start as unknown) as string,
end: (end as unknown) as string,
});
}
async function stopDatafeeds(datafeedIds: string[]) {
@ -105,7 +109,12 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
for (const datafeedId of datafeedIds) {
try {
results[datafeedId] = await callAsInternalUser('ml.stopDatafeed', { datafeedId });
const { body } = await asInternalUser.ml.stopDatafeed<{
started: boolean;
}>({
datafeed_id: datafeedId,
});
results[datafeedId] = body;
} catch (error) {
if (isRequestTimeout(error)) {
return fillResultsWithTimeouts(results, datafeedId, datafeedIds, DATAFEED_STATE.STOPPED);
@ -117,11 +126,17 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
}
async function forceDeleteDatafeed(datafeedId: string) {
return callAsInternalUser('ml.deleteDatafeed', { datafeedId, force: true });
const { body } = await asInternalUser.ml.deleteDatafeed({
datafeed_id: datafeedId,
force: true,
});
return body;
}
async function getDatafeedIdsByJobId() {
const { datafeeds } = (await callAsInternalUser('ml.datafeeds')) as MlDatafeedsResponse;
const {
body: { datafeeds },
} = await asInternalUser.ml.getDatafeeds<MlDatafeedsResponse>();
return datafeeds.reduce((acc, cur) => {
acc[cur.job_id] = cur.datafeed_id;
return acc;
@ -129,7 +144,9 @@ export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterCl
}
async function getJobIdsByDatafeedId() {
const { datafeeds } = (await callAsInternalUser('ml.datafeeds')) as MlDatafeedsResponse;
const {
body: { datafeeds },
} = await asInternalUser.ml.getDatafeeds<MlDatafeedsResponse>();
return datafeeds.reduce((acc, cur) => {
acc[cur.datafeed_id] = cur.job_id;
return acc;

View file

@ -7,11 +7,11 @@
import { i18n } from '@kbn/i18n';
import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states';
const REQUEST_TIMEOUT = 'RequestTimeout';
const REQUEST_TIMEOUT_NAME = 'RequestTimeout';
type ACTION_STATE = DATAFEED_STATE | JOB_STATE;
export function isRequestTimeout(error: { displayName: string }) {
return error.displayName === REQUEST_TIMEOUT;
export function isRequestTimeout(error: { name: string }) {
return error.name === REQUEST_TIMEOUT_NAME;
}
interface Results {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { CalendarManager } from '../calendar';
import { GLOBAL_CALENDAR } from '../../../common/constants/calendars';
import { Job } from '../../../common/types/anomaly_detection_jobs';
@ -23,18 +23,19 @@ interface Results {
};
}
export function groupsProvider(mlClusterClient: ILegacyScopedClusterClient) {
const calMngr = new CalendarManager(mlClusterClient);
const { callAsInternalUser } = mlClusterClient;
export function groupsProvider(client: IScopedClusterClient) {
const calMngr = new CalendarManager(client);
const { asInternalUser } = client;
async function getAllGroups() {
const groups: { [id: string]: Group } = {};
const jobIds: { [id: string]: undefined | null } = {};
const [{ jobs }, calendars] = await Promise.all([
callAsInternalUser('ml.jobs') as Promise<MlJobsResponse>,
const [{ body }, calendars] = await Promise.all([
asInternalUser.ml.getJobs<MlJobsResponse>(),
calMngr.getAllCalendars(),
]);
const { jobs } = body;
if (jobs) {
jobs.forEach((job) => {
jobIds[job.job_id] = null;
@ -80,10 +81,10 @@ export function groupsProvider(mlClusterClient: ILegacyScopedClusterClient) {
for (const job of jobs) {
const { job_id: jobId, groups } = job;
try {
await callAsInternalUser('ml.updateJob', { jobId, body: { groups } });
await asInternalUser.ml.updateJob({ job_id: jobId, body: { groups } });
results[jobId] = { success: true };
} catch (error) {
results[jobId] = { success: false, error };
} catch ({ body }) {
results[jobId] = { success: false, error: body };
}
}
return results;

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { datafeedsProvider } from './datafeeds';
import { jobsProvider } from './jobs';
import { groupsProvider } from './groups';
@ -12,14 +12,14 @@ import { newJobCapsProvider } from './new_job_caps';
import { newJobChartsProvider, topCategoriesProvider } from './new_job';
import { modelSnapshotProvider } from './model_snapshots';
export function jobServiceProvider(mlClusterClient: ILegacyScopedClusterClient) {
export function jobServiceProvider(client: IScopedClusterClient) {
return {
...datafeedsProvider(mlClusterClient),
...jobsProvider(mlClusterClient),
...groupsProvider(mlClusterClient),
...newJobCapsProvider(mlClusterClient),
...newJobChartsProvider(mlClusterClient),
...topCategoriesProvider(mlClusterClient),
...modelSnapshotProvider(mlClusterClient),
...datafeedsProvider(client),
...jobsProvider(client),
...groupsProvider(client),
...newJobCapsProvider(client),
...newJobChartsProvider(client),
...topCategoriesProvider(client),
...modelSnapshotProvider(client),
};
}

View file

@ -7,7 +7,7 @@
import { i18n } from '@kbn/i18n';
import { uniq } from 'lodash';
import Boom from 'boom';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { parseTimeIntervalForJob } from '../../../common/util/job_utils';
import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states';
import {
@ -22,7 +22,7 @@ import { GLOBAL_CALENDAR } from '../../../common/constants/calendars';
import { datafeedsProvider, MlDatafeedsResponse, MlDatafeedsStatsResponse } from './datafeeds';
import { jobAuditMessagesProvider } from '../job_audit_messages';
import { resultsServiceProvider } from '../results_service';
import { CalendarManager, Calendar } from '../calendar';
import { CalendarManager } from '../calendar';
import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils';
import {
getEarliestDatafeedStartTime,
@ -47,16 +47,16 @@ interface Results {
};
}
export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
const { callAsInternalUser } = mlClusterClient;
export function jobsProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient);
const { getAuditMessagesSummary } = jobAuditMessagesProvider(mlClusterClient);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(mlClusterClient);
const calMngr = new CalendarManager(mlClusterClient);
const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(client);
const { getAuditMessagesSummary } = jobAuditMessagesProvider(client);
const { getLatestBucketTimestampByJob } = resultsServiceProvider(client);
const calMngr = new CalendarManager(client);
async function forceDeleteJob(jobId: string) {
return callAsInternalUser('ml.deleteJob', { jobId, force: true });
return asInternalUser.ml.deleteJob({ job_id: jobId, force: true, wait_for_completion: false });
}
async function deleteJobs(jobIds: string[]) {
@ -78,7 +78,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
if (isRequestTimeout(error)) {
return fillResultsWithTimeouts(results, jobId, jobIds, DATAFEED_STATE.DELETED);
}
results[jobId] = { deleted: false, error };
results[jobId] = { deleted: false, error: error.body };
}
}
} catch (error) {
@ -90,7 +90,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
DATAFEED_STATE.DELETED
);
}
results[jobId] = { deleted: false, error };
results[jobId] = { deleted: false, error: error.body };
}
}
return results;
@ -100,7 +100,7 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
const results: Results = {};
for (const jobId of jobIds) {
try {
await callAsInternalUser('ml.closeJob', { jobId });
await asInternalUser.ml.closeJob({ job_id: jobId });
results[jobId] = { closed: true };
} catch (error) {
if (isRequestTimeout(error)) {
@ -109,23 +109,23 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
if (
error.statusCode === 409 &&
error.response &&
error.response.includes('datafeed') === false
error.body.error?.reason &&
error.body.error.reason.includes('datafeed') === false
) {
// the close job request may fail (409) if the job has failed or if the datafeed hasn't been stopped.
// if the job has failed we want to attempt a force close.
// however, if we received a 409 due to the datafeed being started we should not attempt a force close.
try {
await callAsInternalUser('ml.closeJob', { jobId, force: true });
await asInternalUser.ml.closeJob({ job_id: jobId, force: true });
results[jobId] = { closed: true };
} catch (error2) {
if (isRequestTimeout(error)) {
if (isRequestTimeout(error2)) {
return fillResultsWithTimeouts(results, jobId, jobIds, JOB_STATE.CLOSED);
}
results[jobId] = { closed: false, error: error2 };
results[jobId] = { closed: false, error: error2.body };
}
} else {
results[jobId] = { closed: false, error };
results[jobId] = { closed: false, error: error.body };
}
}
}
@ -139,12 +139,12 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
throw Boom.notFound(`Cannot find datafeed for job ${jobId}`);
}
const dfResult = await callAsInternalUser('ml.stopDatafeed', { datafeedId, force: true });
if (!dfResult || dfResult.stopped !== true) {
const { body } = await asInternalUser.ml.stopDatafeed({ datafeed_id: datafeedId, force: true });
if (body.stopped !== true) {
return { success: false };
}
await callAsInternalUser('ml.closeJob', { jobId, force: true });
await asInternalUser.ml.closeJob({ job_id: jobId, force: true });
return { success: true };
}
@ -256,41 +256,26 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
const calendarsByJobId: { [jobId: string]: string[] } = {};
const globalCalendars: string[] = [];
const requests: [
Promise<MlJobsResponse>,
Promise<MlJobsStatsResponse>,
Promise<MlDatafeedsResponse>,
Promise<MlDatafeedsStatsResponse>,
Promise<Calendar[]>,
Promise<{ [id: string]: number | undefined }>
] = [
jobIds.length > 0
? (callAsInternalUser('ml.jobs', { jobId: jobIds }) as Promise<MlJobsResponse>) // move length check in side call
: (callAsInternalUser('ml.jobs') as Promise<MlJobsResponse>),
jobIds.length > 0
? (callAsInternalUser('ml.jobStats', { jobId: jobIds }) as Promise<MlJobsStatsResponse>)
: (callAsInternalUser('ml.jobStats') as Promise<MlJobsStatsResponse>),
callAsInternalUser('ml.datafeeds') as Promise<MlDatafeedsResponse>,
callAsInternalUser('ml.datafeedStats') as Promise<MlDatafeedsStatsResponse>,
calMngr.getAllCalendars(),
getLatestBucketTimestampByJob(),
];
const jobIdsString = jobIds.join();
const [
jobResults,
jobStatsResults,
datafeedResults,
datafeedStatsResults,
{ body: jobResults },
{ body: jobStatsResults },
{ body: datafeedResults },
{ body: datafeedStatsResults },
calendarResults,
latestBucketTimestampByJob,
] = await Promise.all<
MlJobsResponse,
MlJobsStatsResponse,
MlDatafeedsResponse,
MlDatafeedsStatsResponse,
Calendar[],
{ [id: string]: number | undefined }
>(requests);
] = await Promise.all([
asInternalUser.ml.getJobs<MlJobsResponse>(
jobIds.length > 0 ? { job_id: jobIdsString } : undefined
),
asInternalUser.ml.getJobStats<MlJobsStatsResponse>(
jobIds.length > 0 ? { job_id: jobIdsString } : undefined
),
asInternalUser.ml.getDatafeeds<MlDatafeedsResponse>(),
asInternalUser.ml.getDatafeedStats<MlDatafeedsStatsResponse>(),
calMngr.getAllCalendars(),
getLatestBucketTimestampByJob(),
]);
if (datafeedResults && datafeedResults.datafeeds) {
datafeedResults.datafeeds.forEach((datafeed) => {
@ -400,9 +385,9 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
const detailed = true;
const jobIds = [];
try {
const tasksList = await callAsInternalUser('tasks.list', { actions, detailed });
Object.keys(tasksList.nodes).forEach((nodeId) => {
const tasks = tasksList.nodes[nodeId].tasks;
const { body } = await asInternalUser.tasks.list({ actions, detailed });
Object.keys(body.nodes).forEach((nodeId) => {
const tasks = body.nodes[nodeId].tasks;
Object.keys(tasks).forEach((taskId) => {
jobIds.push(tasks[taskId].description.replace(/^delete-job-/, ''));
});
@ -410,7 +395,9 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
} catch (e) {
// if the user doesn't have permission to load the task list,
// use the jobs list to get the ids of deleting jobs
const { jobs } = (await callAsInternalUser('ml.jobs')) as MlJobsResponse;
const {
body: { jobs },
} = await asInternalUser.ml.getJobs<MlJobsResponse>();
jobIds.push(...jobs.filter((j) => j.deleting === true).map((j) => j.job_id));
}
return { jobIds };
@ -421,13 +408,13 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
// e.g. *_low_request_rate_ecs
async function jobsExist(jobIds: string[] = []) {
// Get the list of job IDs.
const jobsInfo = (await callAsInternalUser('ml.jobs', {
jobId: jobIds,
})) as MlJobsResponse;
const { body } = await asInternalUser.ml.getJobs<MlJobsResponse>({
job_id: jobIds.join(),
});
const results: { [id: string]: boolean } = {};
if (jobsInfo.count > 0) {
const allJobIds = jobsInfo.jobs.map((job) => job.job_id);
if (body.count > 0) {
const allJobIds = body.jobs.map((job) => job.job_id);
// Check if each of the supplied IDs match existing jobs.
jobIds.forEach((jobId) => {
@ -446,9 +433,9 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
}
async function getAllJobAndGroupIds() {
const { getAllGroups } = groupsProvider(mlClusterClient);
const jobs = (await callAsInternalUser('ml.jobs')) as MlJobsResponse;
const jobIds = jobs.jobs.map((job) => job.job_id);
const { getAllGroups } = groupsProvider(client);
const { body } = await asInternalUser.ml.getJobs<MlJobsResponse>();
const jobIds = body.jobs.map((job) => job.job_id);
const groups = await getAllGroups();
const groupIds = groups.map((group) => group.id);
@ -460,13 +447,13 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
async function getLookBackProgress(jobId: string, start: number, end: number) {
const datafeedId = `datafeed-${jobId}`;
const [jobStats, isRunning] = await Promise.all([
callAsInternalUser('ml.jobStats', { jobId: [jobId] }) as Promise<MlJobsStatsResponse>,
const [{ body }, isRunning] = await Promise.all([
asInternalUser.ml.getJobStats<MlJobsStatsResponse>({ job_id: jobId }),
isDatafeedRunning(datafeedId),
]);
if (jobStats.jobs.length) {
const statsForJob = jobStats.jobs[0];
if (body.jobs.length) {
const statsForJob = body.jobs[0];
const time = statsForJob.data_counts.latest_record_timestamp;
const progress = (time - start) / (end - start);
const isJobClosed = statsForJob.state === JOB_STATE.CLOSED;
@ -480,11 +467,11 @@ export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) {
}
async function isDatafeedRunning(datafeedId: string) {
const stats = (await callAsInternalUser('ml.datafeedStats', {
datafeedId: [datafeedId],
})) as MlDatafeedsStatsResponse;
if (stats.datafeeds.length) {
const state = stats.datafeeds[0].state;
const { body } = await asInternalUser.ml.getDatafeedStats<MlDatafeedsStatsResponse>({
datafeed_id: datafeedId,
});
if (body.datafeeds.length) {
const state = body.datafeeds[0].state;
return (
state === DATAFEED_STATE.STARTED ||
state === DATAFEED_STATE.STARTING ||

View file

@ -6,7 +6,7 @@
import Boom from 'boom';
import { i18n } from '@kbn/i18n';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ModelSnapshot } from '../../../common/types/anomaly_detection_jobs';
import { datafeedsProvider } from './datafeeds';
import { FormCalendar, CalendarManager } from '../calendar';
@ -19,9 +19,9 @@ export interface RevertModelSnapshotResponse {
model: ModelSnapshot;
}
export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClient) {
const { callAsInternalUser } = mlClusterClient;
const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient);
export function modelSnapshotProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(client);
async function revertModelSnapshot(
jobId: string,
@ -33,13 +33,13 @@ export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClien
) {
let datafeedId = `datafeed-${jobId}`;
// ensure job exists
await callAsInternalUser('ml.jobs', { jobId: [jobId] });
await asInternalUser.ml.getJobs({ job_id: jobId });
try {
// ensure the datafeed exists
// the datafeed is probably called datafeed-<jobId>
await callAsInternalUser('ml.datafeeds', {
datafeedId: [datafeedId],
await asInternalUser.ml.getDatafeeds({
datafeed_id: datafeedId,
});
} catch (e) {
// if the datafeed isn't called datafeed-<jobId>
@ -52,19 +52,21 @@ export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClien
}
// ensure the snapshot exists
const snapshot = (await callAsInternalUser('ml.modelSnapshots', {
jobId,
snapshotId,
})) as ModelSnapshotsResponse;
const { body: snapshot } = await asInternalUser.ml.getModelSnapshots<ModelSnapshotsResponse>({
job_id: jobId,
snapshot_id: snapshotId,
});
// apply the snapshot revert
const { model } = (await callAsInternalUser('ml.revertModelSnapshot', {
jobId,
snapshotId,
const {
body: { model },
} = await asInternalUser.ml.revertModelSnapshot<RevertModelSnapshotResponse>({
job_id: jobId,
snapshot_id: snapshotId,
body: {
delete_intervening_results: deleteInterveningResults,
},
})) as RevertModelSnapshotResponse;
});
// create calendar (if specified) and replay datafeed
if (replay && model.snapshot_id === snapshotId && snapshot.model_snapshots.length) {
@ -85,7 +87,7 @@ export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClien
end_time: s.end,
})),
};
const cm = new CalendarManager(mlClusterClient);
const cm = new CalendarManager(client);
await cm.newCalendar(calendar);
}

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { chunk } from 'lodash';
import { SearchResponse } from 'elasticsearch';
import { CATEGORY_EXAMPLES_SAMPLE_SIZE } from '../../../../../common/constants/categorization_job';
@ -18,9 +18,9 @@ import { ValidationResults } from './validation_results';
const CHUNK_SIZE = 100;
export function categorizationExamplesProvider({
callAsCurrentUser,
callAsInternalUser,
}: ILegacyScopedClusterClient) {
asCurrentUser,
asInternalUser,
}: IScopedClusterClient) {
const validationResults = new ValidationResults();
async function categorizationExamples(
@ -57,7 +57,7 @@ export function categorizationExamplesProvider({
}
}
const results: SearchResponse<{ [id: string]: string }> = await callAsCurrentUser('search', {
const { body } = await asCurrentUser.search<SearchResponse<{ [id: string]: string }>>({
index: indexPatternTitle,
size,
body: {
@ -67,7 +67,7 @@ export function categorizationExamplesProvider({
},
});
const tempExamples = results.hits.hits.map(({ _source }) => _source[categorizationFieldName]);
const tempExamples = body.hits.hits.map(({ _source }) => _source[categorizationFieldName]);
validationResults.createNullValueResult(tempExamples);
@ -112,7 +112,9 @@ export function categorizationExamplesProvider({
}
async function loadTokens(examples: string[], analyzer: CategorizationAnalyzer) {
const { tokens }: { tokens: Token[] } = await callAsInternalUser('indices.analyze', {
const {
body: { tokens },
} = await asInternalUser.indices.analyze<{ tokens: Token[] }>({
body: {
...getAnalyzer(analyzer),
text: examples,

View file

@ -5,13 +5,13 @@
*/
import { SearchResponse } from 'elasticsearch';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ML_RESULTS_INDEX_PATTERN } from '../../../../../common/constants/index_patterns';
import { CategoryId, Category } from '../../../../../common/types/categories';
export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClusterClient) {
async function getTotalCategories(jobId: string): Promise<{ total: number }> {
const totalResp = await callAsInternalUser('search', {
export function topCategoriesProvider({ asInternalUser }: IScopedClusterClient) {
async function getTotalCategories(jobId: string): Promise<number> {
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
@ -33,11 +33,12 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust
},
},
});
return totalResp?.hits?.total?.value ?? 0;
// @ts-ignore total is an object here
return body?.hits?.total?.value ?? 0;
}
async function getTopCategoryCounts(jobId: string, numberOfCategories: number) {
const top: SearchResponse<any> = await callAsInternalUser('search', {
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
@ -76,7 +77,7 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust
const catCounts: Array<{
id: CategoryId;
count: number;
}> = top.aggregations?.cat_count?.buckets.map((c: any) => ({
}> = body.aggregations?.cat_count?.buckets.map((c: any) => ({
id: c.key,
count: c.doc_count,
}));
@ -99,7 +100,7 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust
field: 'category_id',
},
};
const result: SearchResponse<any> = await callAsInternalUser('search', {
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size,
body: {
@ -118,7 +119,7 @@ export function topCategoriesProvider({ callAsInternalUser }: ILegacyScopedClust
},
});
return result.hits.hits?.map((c: { _source: Category }) => c._source) || [];
return body.hits.hits?.map((c: { _source: Category }) => c._source) || [];
}
async function topCategories(jobId: string, numberOfCategories: number) {

View file

@ -144,7 +144,7 @@ export class ValidationResults {
this.createPrivilegesErrorResult(error);
return;
}
const message: string = error.message;
const message: string = error.body.error?.reason;
if (message) {
const rxp = /exceeded the allowed maximum of \[(\d+?)\]/;
const match = rxp.exec(message);
@ -170,7 +170,7 @@ export class ValidationResults {
}
public createPrivilegesErrorResult(error: any) {
const message: string = error.message;
const message: string = error.body.error?.reason;
if (message) {
this._results.push({
id: VALIDATION_RESULT.INSUFFICIENT_PRIVILEGES,

View file

@ -4,13 +4,13 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { newJobLineChartProvider } from './line_chart';
import { newJobPopulationChartProvider } from './population_chart';
export function newJobChartsProvider(mlClusterClient: ILegacyScopedClusterClient) {
const { newJobLineChart } = newJobLineChartProvider(mlClusterClient);
const { newJobPopulationChart } = newJobPopulationChartProvider(mlClusterClient);
export function newJobChartsProvider(client: IScopedClusterClient) {
const { newJobLineChart } = newJobLineChartProvider(client);
const { newJobPopulationChart } = newJobPopulationChartProvider(client);
return {
newJobLineChart,

View file

@ -5,7 +5,7 @@
*/
import { get } from 'lodash';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { AggFieldNamePair, EVENT_RATE_FIELD_ID } from '../../../../common/types/fields';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
@ -23,7 +23,7 @@ interface ProcessedResults {
totalResults: number;
}
export function newJobLineChartProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) {
export function newJobLineChartProvider({ asCurrentUser }: IScopedClusterClient) {
async function newJobLineChart(
indexPatternTitle: string,
timeField: string,
@ -47,9 +47,9 @@ export function newJobLineChartProvider({ callAsCurrentUser }: ILegacyScopedClus
splitFieldValue
);
const results = await callAsCurrentUser('search', json);
const { body } = await asCurrentUser.search(json);
return processSearchResults(
results,
body,
aggFieldNamePairs.map((af) => af.field)
);
}

View file

@ -5,7 +5,7 @@
*/
import { get } from 'lodash';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { AggFieldNamePair, EVENT_RATE_FIELD_ID } from '../../../../common/types/fields';
import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils';
@ -29,7 +29,7 @@ interface ProcessedResults {
totalResults: number;
}
export function newJobPopulationChartProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) {
export function newJobPopulationChartProvider({ asCurrentUser }: IScopedClusterClient) {
async function newJobPopulationChart(
indexPatternTitle: string,
timeField: string,
@ -51,15 +51,11 @@ export function newJobPopulationChartProvider({ callAsCurrentUser }: ILegacyScop
splitFieldName
);
try {
const results = await callAsCurrentUser('search', json);
return processSearchResults(
results,
aggFieldNamePairs.map((af) => af.field)
);
} catch (error) {
return { error };
}
const { body } = await asCurrentUser.search(json);
return processSearchResults(
body,
aggFieldNamePairs.map((af) => af.field)
);
}
return {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { cloneDeep } from 'lodash';
import { SavedObjectsClientContract } from 'kibana/server';
import {
@ -40,35 +40,36 @@ const supportedTypes: string[] = [
export function fieldServiceProvider(
indexPattern: string,
isRollup: boolean,
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract
) {
return new FieldsService(indexPattern, isRollup, mlClusterClient, savedObjectsClient);
return new FieldsService(indexPattern, isRollup, client, savedObjectsClient);
}
class FieldsService {
private _indexPattern: string;
private _isRollup: boolean;
private _mlClusterClient: ILegacyScopedClusterClient;
private _mlClusterClient: IScopedClusterClient;
private _savedObjectsClient: SavedObjectsClientContract;
constructor(
indexPattern: string,
isRollup: boolean,
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract
) {
this._indexPattern = indexPattern;
this._isRollup = isRollup;
this._mlClusterClient = mlClusterClient;
this._mlClusterClient = client;
this._savedObjectsClient = savedObjectsClient;
}
private async loadFieldCaps(): Promise<any> {
return this._mlClusterClient.callAsCurrentUser('fieldCaps', {
const { body } = await this._mlClusterClient.asCurrentUser.fieldCaps({
index: this._indexPattern,
fields: '*',
});
return body;
}
// create field object from the results from _field_caps

View file

@ -21,28 +21,23 @@ describe('job_service - job_caps', () => {
let savedObjectsClientMock: any;
beforeEach(() => {
const callAsNonRollupMock = jest.fn((action: string) => {
switch (action) {
case 'fieldCaps':
return farequoteFieldCaps;
}
});
mlClusterClientNonRollupMock = {
callAsCurrentUser: callAsNonRollupMock,
callAsInternalUser: callAsNonRollupMock,
const asNonRollupMock = {
fieldCaps: jest.fn(() => ({ body: farequoteFieldCaps })),
};
mlClusterClientNonRollupMock = {
asCurrentUser: asNonRollupMock,
asInternalUser: asNonRollupMock,
};
const callAsRollupMock = {
fieldCaps: jest.fn(() => ({ body: cloudwatchFieldCaps })),
rollup: { getRollupIndexCaps: jest.fn(() => Promise.resolve({ body: rollupCaps })) },
};
const callAsRollupMock = jest.fn((action: string) => {
switch (action) {
case 'fieldCaps':
return cloudwatchFieldCaps;
case 'ml.rollupIndexCapabilities':
return Promise.resolve(rollupCaps);
}
});
mlClusterClientRollupMock = {
callAsCurrentUser: callAsRollupMock,
callAsInternalUser: callAsRollupMock,
asCurrentUser: callAsRollupMock,
asInternalUser: callAsRollupMock,
};
savedObjectsClientMock = {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient, SavedObjectsClientContract } from 'kibana/server';
import { IScopedClusterClient, SavedObjectsClientContract } from 'kibana/server';
import { Aggregation, Field, NewJobCaps } from '../../../../common/types/fields';
import { fieldServiceProvider } from './field_service';
@ -12,18 +12,13 @@ interface NewJobCapsResponse {
[indexPattern: string]: NewJobCaps;
}
export function newJobCapsProvider(mlClusterClient: ILegacyScopedClusterClient) {
export function newJobCapsProvider(client: IScopedClusterClient) {
async function newJobCaps(
indexPattern: string,
isRollup: boolean = false,
savedObjectsClient: SavedObjectsClientContract
): Promise<NewJobCapsResponse> {
const fieldService = fieldServiceProvider(
indexPattern,
isRollup,
mlClusterClient,
savedObjectsClient
);
const fieldService = fieldServiceProvider(indexPattern, isRollup, client, savedObjectsClient);
const { aggs, fields } = await fieldService.getData();
convertForStringify(aggs, fields);

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { SavedObject } from 'kibana/server';
import { IndexPatternAttributes } from 'src/plugins/data/server';
import { SavedObjectsClientContract } from 'kibana/server';
@ -22,7 +22,7 @@ export interface RollupJob {
export async function rollupServiceProvider(
indexPattern: string,
{ callAsCurrentUser }: ILegacyScopedClusterClient,
{ asCurrentUser }: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract
) {
const rollupIndexPatternObject = await loadRollupIndexPattern(indexPattern, savedObjectsClient);
@ -32,8 +32,8 @@ export async function rollupServiceProvider(
if (rollupIndexPatternObject !== null) {
const parsedTypeMetaData = JSON.parse(rollupIndexPatternObject.attributes.typeMeta);
const rollUpIndex: string = parsedTypeMetaData.params.rollup_index;
const rollupCaps = await callAsCurrentUser('ml.rollupIndexCapabilities', {
indexPattern: rollUpIndex,
const { body: rollupCaps } = await asCurrentUser.rollup.getRollupIndexCaps({
index: rollUpIndex,
});
const indexRollupCaps = rollupCaps[rollUpIndex];

View file

@ -4,48 +4,31 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { validateJob, ValidateJobPayload } from './job_validation';
import { JobValidationMessage } from '../../../common/constants/messages';
const mlClusterClient = ({
// mock callAsCurrentUser
callAsCurrentUser: (method: string) => {
return new Promise((resolve) => {
if (method === 'fieldCaps') {
resolve({ fields: [] });
return;
} else if (method === 'ml.info') {
resolve({
const callAs = {
fieldCaps: () => Promise.resolve({ body: { fields: [] } }),
ml: {
info: () =>
Promise.resolve({
body: {
limits: {
effective_max_model_memory_limit: '100MB',
max_model_memory_limit: '1GB',
},
});
}
resolve({});
}) as Promise<any>;
},
}),
},
search: () => Promise.resolve({ body: {} }),
};
// mock callAsInternalUser
callAsInternalUser: (method: string) => {
return new Promise((resolve) => {
if (method === 'fieldCaps') {
resolve({ fields: [] });
return;
} else if (method === 'ml.info') {
resolve({
limits: {
effective_max_model_memory_limit: '100MB',
max_model_memory_limit: '1GB',
},
});
}
resolve({});
}) as Promise<any>;
},
} as unknown) as ILegacyScopedClusterClient;
const mlClusterClient = ({
asCurrentUser: callAs,
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
// Note: The tests cast `payload` as any
// so we can simulate possible runtime payloads

View file

@ -6,7 +6,7 @@
import { i18n } from '@kbn/i18n';
import Boom from 'boom';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { TypeOf } from '@kbn/config-schema';
import { fieldsServiceProvider } from '../fields_service';
import { renderTemplate } from '../../../common/util/string_utils';
@ -34,7 +34,7 @@ export type ValidateJobPayload = TypeOf<typeof validateJobSchema>;
* @kbn/config-schema has checked the payload {@link validateJobSchema}.
*/
export async function validateJob(
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
payload: ValidateJobPayload,
kbnVersion = 'current',
isSecurityDisabled?: boolean
@ -63,8 +63,8 @@ export async function validateJob(
// if no duration was part of the request, fall back to finding out
// the time range of the time field of the index, but also check first
// if the time field is a valid field of type 'date' using isValidTimeField()
if (typeof duration === 'undefined' && (await isValidTimeField(mlClusterClient, job))) {
const fs = fieldsServiceProvider(mlClusterClient);
if (typeof duration === 'undefined' && (await isValidTimeField(client, job))) {
const fs = fieldsServiceProvider(client);
const index = job.datafeed_config.indices.join(',');
const timeField = job.data_description.time_field;
const timeRange = await fs.getTimeFieldRange(index, timeField, job.datafeed_config.query);
@ -79,24 +79,22 @@ export async function validateJob(
// next run only the cardinality tests to find out if they trigger an error
// so we can decide later whether certain additional tests should be run
const cardinalityMessages = await validateCardinality(mlClusterClient, job);
const cardinalityMessages = await validateCardinality(client, job);
validationMessages.push(...cardinalityMessages);
const cardinalityError = cardinalityMessages.some((m) => {
return messages[m.id as MessageId].status === VALIDATION_STATUS.ERROR;
});
validationMessages.push(
...(await validateBucketSpan(mlClusterClient, job, duration, isSecurityDisabled))
...(await validateBucketSpan(client, job, duration, isSecurityDisabled))
);
validationMessages.push(...(await validateTimeRange(mlClusterClient, job, duration)));
validationMessages.push(...(await validateTimeRange(client, job, duration)));
// only run the influencer and model memory limit checks
// if cardinality checks didn't return a message with an error level
if (cardinalityError === false) {
validationMessages.push(...(await validateInfluencers(job)));
validationMessages.push(
...(await validateModelMemoryLimit(mlClusterClient, job, duration))
);
validationMessages.push(...(await validateModelMemoryLimit(client, job, duration)));
}
} else {
validationMessages = basicValidation.messages;

View file

@ -45,7 +45,7 @@ const pickBucketSpan = (bucketSpans) => {
return bucketSpans[i];
};
export async function validateBucketSpan(mlClusterClient, job, duration) {
export async function validateBucketSpan(client, job, duration) {
validateJobObject(job);
// if there is no duration, do not run the estimate test
@ -117,7 +117,7 @@ export async function validateBucketSpan(mlClusterClient, job, duration) {
try {
const estimations = estimatorConfigs.map((data) => {
return new Promise((resolve) => {
estimateBucketSpanFactory(mlClusterClient)(data)
estimateBucketSpanFactory(client)(data)
.then(resolve)
// this catch gets triggered when the estimation code runs without error
// but isn't able to come up with a bucket span estimation.

View file

@ -24,12 +24,12 @@ import mockItSearchResponse from './__mocks__/mock_it_search_response.json';
const mlClusterClientFactory = (mockSearchResponse: any) => {
const callAs = () => {
return new Promise((resolve) => {
resolve(mockSearchResponse);
resolve({ body: mockSearchResponse });
});
};
return {
callAsCurrentUser: callAs,
callAsInternalUser: callAs,
asCurrentUser: callAs,
asInternalUser: callAs,
};
};

View file

@ -6,7 +6,7 @@
import cloneDeep from 'lodash/cloneDeep';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
@ -24,21 +24,21 @@ const mockResponses = {
const mlClusterClientFactory = (
responses: Record<string, any>,
fail = false
): ILegacyScopedClusterClient => {
const callAs = (requestName: string) => {
return new Promise((resolve, reject) => {
const response = responses[requestName];
if (fail) {
reject(response);
} else {
resolve(response);
}
}) as Promise<any>;
): IScopedClusterClient => {
const callAs = {
search: () => Promise.resolve({ body: responses.search }),
fieldCaps: () => Promise.resolve({ body: responses.fieldCaps }),
};
return {
callAsCurrentUser: callAs,
callAsInternalUser: callAs,
const callAsFail = {
search: () => Promise.reject({ body: {} }),
fieldCaps: () => Promise.reject({ body: {} }),
};
return ({
asCurrentUser: fail === false ? callAs : callAsFail,
asInternalUser: fail === false ? callAs : callAsFail,
} as unknown) as IScopedClusterClient;
};
describe('ML - validateCardinality', () => {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { DataVisualizer } from '../data_visualizer';
import { validateJobObject } from './validate_job_object';
@ -43,12 +43,9 @@ type Validator = (obj: {
messages: Messages;
}>;
const validateFactory = (
mlClusterClient: ILegacyScopedClusterClient,
job: CombinedJob
): Validator => {
const { callAsCurrentUser } = mlClusterClient;
const dv = new DataVisualizer(mlClusterClient);
const validateFactory = (client: IScopedClusterClient, job: CombinedJob): Validator => {
const { asCurrentUser } = client;
const dv = new DataVisualizer(client);
const modelPlotConfigTerms = job?.model_plot_config?.terms ?? '';
const modelPlotConfigFieldCount =
@ -77,7 +74,7 @@ const validateFactory = (
] as string[];
// use fieldCaps endpoint to get data about whether fields are aggregatable
const fieldCaps = await callAsCurrentUser('fieldCaps', {
const { body: fieldCaps } = await asCurrentUser.fieldCaps({
index: job.datafeed_config.indices.join(','),
fields: uniqueFieldNames,
});
@ -154,7 +151,7 @@ const validateFactory = (
};
export async function validateCardinality(
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
job?: CombinedJob
): Promise<Messages> | never {
const messages: Messages = [];
@ -174,7 +171,7 @@ export async function validateCardinality(
}
// validate({ type, isInvalid }) asynchronously returns an array of validation messages
const validate = validateFactory(mlClusterClient, job);
const validate = validateFactory(client, job);
const modelPlotEnabled = job.model_plot_config?.enabled ?? false;

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { CombinedJob, Detector } from '../../../common/types/anomaly_detection_jobs';
import { ModelMemoryEstimate } from '../calculate_model_memory_limit/calculate_model_memory_limit';
import { ModelMemoryEstimateResponse } from '../calculate_model_memory_limit/calculate_model_memory_limit';
import { validateModelMemoryLimit } from './validate_model_memory_limit';
describe('ML - validateModelMemoryLimit', () => {
@ -65,44 +65,36 @@ describe('ML - validateModelMemoryLimit', () => {
};
// mock estimate model memory
const modelMemoryEstimateResponse: ModelMemoryEstimate = {
const modelMemoryEstimateResponse: ModelMemoryEstimateResponse = {
model_memory_estimate: '40mb',
};
interface MockAPICallResponse {
'ml.estimateModelMemory'?: ModelMemoryEstimate;
'ml.estimateModelMemory'?: ModelMemoryEstimateResponse;
}
// mock callAsCurrentUser
// mock asCurrentUser
// used in three places:
// - to retrieve the info endpoint
// - to search for cardinality of split field
// - to retrieve field capabilities used in search for split field cardinality
const getMockMlClusterClient = ({
'ml.estimateModelMemory': estimateModelMemory,
}: MockAPICallResponse = {}): ILegacyScopedClusterClient => {
const callAs = (call: string) => {
if (typeof call === undefined) {
return Promise.reject();
}
let response = {};
if (call === 'ml.info') {
response = mlInfoResponse;
} else if (call === 'search') {
response = cardinalitySearchResponse;
} else if (call === 'fieldCaps') {
response = fieldCapsResponse;
} else if (call === 'ml.estimateModelMemory') {
response = estimateModelMemory || modelMemoryEstimateResponse;
}
return Promise.resolve(response);
}: MockAPICallResponse = {}): IScopedClusterClient => {
const callAs = {
ml: {
info: () => Promise.resolve({ body: mlInfoResponse }),
estimateModelMemory: () =>
Promise.resolve({ body: estimateModelMemory || modelMemoryEstimateResponse }),
},
search: () => Promise.resolve({ body: cardinalitySearchResponse }),
fieldCaps: () => Promise.resolve({ body: fieldCapsResponse }),
};
return {
callAsCurrentUser: callAs,
callAsInternalUser: callAs,
};
return ({
asCurrentUser: callAs,
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
};
function getJobConfig(influencers: string[] = [], detectors: Detector[] = []) {

View file

@ -5,7 +5,7 @@
*/
import numeral from '@elastic/numeral';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
import { validateJobObject } from './validate_job_object';
import { calculateModelMemoryLimitProvider } from '../calculate_model_memory_limit';
@ -16,11 +16,11 @@ import { MlInfoResponse } from '../../../common/types/ml_server_info';
const MODEL_MEMORY_LIMIT_MINIMUM_BYTES = 1048576;
export async function validateModelMemoryLimit(
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
job: CombinedJob,
duration?: { start?: number; end?: number }
) {
const { callAsInternalUser } = mlClusterClient;
const { asInternalUser } = client;
validateJobObject(job);
// retrieve the model memory limit specified by the user in the job config.
@ -52,12 +52,12 @@ export async function validateModelMemoryLimit(
// retrieve the max_model_memory_limit value from the server
// this will be unset unless the user has set this on their cluster
const info = (await callAsInternalUser('ml.info')) as MlInfoResponse;
const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase();
const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase();
const { body } = await asInternalUser.ml.info<MlInfoResponse>();
const maxModelMemoryLimit = body.limits.max_model_memory_limit?.toUpperCase();
const effectiveMaxModelMemoryLimit = body.limits.effective_max_model_memory_limit?.toUpperCase();
if (runCalcModelMemoryTest) {
const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(mlClusterClient)(
const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(client)(
job.analysis_config,
job.datafeed_config.indices.join(','),
job.datafeed_config.query,

View file

@ -6,7 +6,7 @@
import cloneDeep from 'lodash/cloneDeep';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
@ -21,16 +21,15 @@ const mockSearchResponse = {
search: mockTimeRange,
};
const mlClusterClientFactory = (resp: any): ILegacyScopedClusterClient => {
const callAs = (path: string) => {
return new Promise((resolve) => {
resolve(resp[path]);
}) as Promise<any>;
};
return {
callAsCurrentUser: callAs,
callAsInternalUser: callAs,
const mlClusterClientFactory = (response: any): IScopedClusterClient => {
const callAs = {
fieldCaps: () => Promise.resolve({ body: response.fieldCaps }),
search: () => Promise.resolve({ body: response.search }),
};
return ({
asCurrentUser: callAs,
asInternalUser: callAs,
} as unknown) as IScopedClusterClient;
};
function getMinimalValidJob() {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/server';
import { parseInterval } from '../../../common/util/parse_interval';
import { CombinedJob } from '../../../common/types/anomaly_detection_jobs';
@ -26,15 +26,12 @@ const BUCKET_SPAN_COMPARE_FACTOR = 25;
const MIN_TIME_SPAN_MS = 7200000;
const MIN_TIME_SPAN_READABLE = '2 hours';
export async function isValidTimeField(
{ callAsCurrentUser }: ILegacyScopedClusterClient,
job: CombinedJob
) {
export async function isValidTimeField({ asCurrentUser }: IScopedClusterClient, job: CombinedJob) {
const index = job.datafeed_config.indices.join(',');
const timeField = job.data_description.time_field;
// check if time_field is of type 'date' or 'date_nanos'
const fieldCaps = await callAsCurrentUser('fieldCaps', {
const { body: fieldCaps } = await asCurrentUser.fieldCaps({
index,
fields: [timeField],
});
@ -47,7 +44,7 @@ export async function isValidTimeField(
}
export async function validateTimeRange(
mlClientCluster: ILegacyScopedClusterClient,
mlClientCluster: IScopedClusterClient,
job: CombinedJob,
timeRange?: Partial<TimeRange>
) {

View file

@ -5,7 +5,7 @@
*/
import Boom from 'boom';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { PARTITION_FIELDS } from '../../../common/constants/anomalies';
import { PartitionFieldsType } from '../../../common/types/anomalies';
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
@ -74,9 +74,7 @@ function getFieldObject(fieldType: PartitionFieldsType, aggs: any) {
: {};
}
export const getPartitionFieldsValuesFactory = ({
callAsInternalUser,
}: ILegacyScopedClusterClient) =>
export const getPartitionFieldsValuesFactory = ({ asInternalUser }: IScopedClusterClient) =>
/**
* Gets the record of partition fields with possible values that fit the provided queries.
* @param jobId - Job ID
@ -92,7 +90,7 @@ export const getPartitionFieldsValuesFactory = ({
earliestMs: number,
latestMs: number
) {
const jobsResponse = await callAsInternalUser('ml.jobs', { jobId: [jobId] });
const { body: jobsResponse } = await asInternalUser.ml.getJobs({ job_id: jobId });
if (jobsResponse.count === 0 || jobsResponse.jobs === undefined) {
throw Boom.notFound(`Job with the id "${jobId}" not found`);
}
@ -101,7 +99,7 @@ export const getPartitionFieldsValuesFactory = ({
const isModelPlotEnabled = job?.model_plot_config?.enabled;
const resp = await callAsInternalUser('search', {
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
@ -151,7 +149,7 @@ export const getPartitionFieldsValuesFactory = ({
return PARTITION_FIELDS.reduce((acc, key) => {
return {
...acc,
...getFieldObject(key, resp.aggregations),
...getFieldObject(key, body.aggregations),
};
}, {});
};

View file

@ -9,7 +9,7 @@ import slice from 'lodash/slice';
import get from 'lodash/get';
import moment from 'moment';
import { SearchResponse } from 'elasticsearch';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import Boom from 'boom';
import { buildAnomalyTableItems } from './build_anomaly_table_items';
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
@ -40,8 +40,8 @@ interface Influencer {
fieldValue: any;
}
export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient) {
const { callAsInternalUser } = mlClusterClient;
export function resultsServiceProvider(client: IScopedClusterClient) {
const { asInternalUser } = client;
// Obtains data for the anomalies table, aggregating anomalies by day or hour as requested.
// Return an Object with properties 'anomalies' and 'interval' (interval used to aggregate anomalies,
// one of day, hour or second. Note 'auto' can be provided as the aggregationInterval in the request,
@ -144,7 +144,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
});
}
const resp: SearchResponse<any> = await callAsInternalUser('search', {
const { body } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
rest_total_hits_as_int: true,
size: maxRecords,
@ -178,9 +178,9 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
anomalies: [],
interval: 'second',
};
if (resp.hits.total !== 0) {
if (body.hits.total !== 0) {
let records: AnomalyRecordDoc[] = [];
resp.hits.hits.forEach((hit) => {
body.hits.hits.forEach((hit) => {
records.push(hit._source);
});
@ -298,8 +298,8 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
},
};
const resp = await callAsInternalUser('search', query);
const maxScore = get(resp, ['aggregations', 'max_score', 'value'], null);
const { body } = await asInternalUser.search(query);
const maxScore = get(body, ['aggregations', 'max_score', 'value'], null);
return { maxScore };
}
@ -336,7 +336,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
// Size of job terms agg, consistent with maximum number of jobs supported by Java endpoints.
const maxJobs = 10000;
const resp = await callAsInternalUser('search', {
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
@ -364,7 +364,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
});
const bucketsByJobId: Array<{ key: string; maxTimestamp: { value?: number } }> = get(
resp,
body,
['aggregations', 'byJobId', 'buckets'],
[]
);
@ -380,7 +380,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
// from the given index and job ID.
// Returned response consists of a list of examples against category ID.
async function getCategoryExamples(jobId: string, categoryIds: any, maxExamples: number) {
const resp = await callAsInternalUser('search', {
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
rest_total_hits_as_int: true,
size: ANOMALIES_TABLE_DEFAULT_QUERY_SIZE, // Matches size of records in anomaly summary table.
@ -394,8 +394,8 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
});
const examplesByCategoryId: { [key: string]: any } = {};
if (resp.hits.total !== 0) {
resp.hits.hits.forEach((hit: any) => {
if (body.hits.total !== 0) {
body.hits.hits.forEach((hit: any) => {
if (maxExamples) {
examplesByCategoryId[hit._source.category_id] = slice(
hit._source.examples,
@ -415,7 +415,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
// Returned response contains four properties - categoryId, regex, examples
// and terms (space delimited String of the common tokens matched in values of the category).
async function getCategoryDefinition(jobId: string, categoryId: string) {
const resp = await callAsInternalUser('search', {
const { body } = await asInternalUser.search({
index: ML_RESULTS_INDEX_PATTERN,
rest_total_hits_as_int: true,
size: 1,
@ -429,8 +429,8 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
});
const definition = { categoryId, terms: null, regex: null, examples: [] };
if (resp.hits.total !== 0) {
const source = resp.hits.hits[0]._source;
if (body.hits.total !== 0) {
const source = body.hits.hits[0]._source;
definition.categoryId = source.category_id;
definition.regex = source.regex;
definition.terms = source.terms;
@ -456,7 +456,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
},
});
}
const results: SearchResponse<AnomalyCategorizerStatsDoc> = await callAsInternalUser('search', {
const { body } = await asInternalUser.search<SearchResponse<AnomalyCategorizerStatsDoc>>({
index: ML_RESULTS_INDEX_PATTERN,
body: {
query: {
@ -473,7 +473,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
},
},
});
return results ? results.hits.hits.map((r) => r._source) : [];
return body ? body.hits.hits.map((r) => r._source) : [];
}
async function getCategoryStoppedPartitions(
@ -485,15 +485,15 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
};
// first determine from job config if stop_on_warn is true
// if false return []
const jobConfigResponse: MlJobsResponse = await callAsInternalUser('ml.jobs', {
jobId: jobIds,
const { body } = await asInternalUser.ml.getJobs<MlJobsResponse>({
job_id: jobIds.join(),
});
if (!jobConfigResponse || jobConfigResponse.jobs.length < 1) {
if (!body || body.jobs.length < 1) {
throw Boom.notFound(`Unable to find anomaly detector jobs ${jobIds.join(', ')}`);
}
const jobIdsWithStopOnWarnSet = jobConfigResponse.jobs
const jobIdsWithStopOnWarnSet = body.jobs
.filter(
(jobConfig) =>
jobConfig.analysis_config?.per_partition_categorization?.stop_on_warn === true
@ -543,7 +543,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
},
},
];
const results: SearchResponse<any> = await callAsInternalUser('search', {
const { body: results } = await asInternalUser.search<SearchResponse<any>>({
index: ML_RESULTS_INDEX_PATTERN,
size: 0,
body: {
@ -594,7 +594,7 @@ export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClie
getCategoryExamples,
getLatestBucketTimestampByJob,
getMaxAnomalyScore,
getPartitionFieldsValues: getPartitionFieldsValuesFactory(mlClusterClient),
getPartitionFieldsValues: getPartitionFieldsValuesFactory(client),
getCategorizerStats,
getCategoryStoppedPartitions,
};

View file

@ -9,18 +9,16 @@ import {
CoreSetup,
CoreStart,
Plugin,
ILegacyScopedClusterClient,
KibanaRequest,
Logger,
PluginInitializerContext,
ILegacyCustomClusterClient,
CapabilitiesStart,
IClusterClient,
} from 'kibana/server';
import { PluginsSetup, RouteInitialization } from './types';
import { PLUGIN_ID, PLUGIN_ICON } from '../common/constants/app';
import { MlCapabilities } from '../common/types/capabilities';
import { elasticsearchJsPlugin } from './client/elasticsearch_ml';
import { initMlTelemetry } from './lib/telemetry';
import { initMlServerLog } from './client/log';
import { initSampleDataSets } from './lib/sample_data_sets';
@ -50,17 +48,7 @@ import { setupCapabilitiesSwitcher } from './lib/capabilities';
import { registerKibanaSettings } from './lib/register_settings';
import { inferenceRoutes } from './routes/inference';
declare module 'kibana/server' {
interface RequestHandlerContext {
[PLUGIN_ID]?: {
mlClient: ILegacyScopedClusterClient;
};
}
}
export interface MlPluginSetup extends SharedServices {
mlClient: ILegacyCustomClusterClient;
}
export type MlPluginSetup = SharedServices;
export type MlPluginStart = void;
export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, PluginsSetup> {
@ -68,6 +56,7 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
private version: string;
private mlLicense: MlServerLicense;
private capabilities: CapabilitiesStart | null = null;
private clusterClient: IClusterClient | null = null;
constructor(ctx: PluginInitializerContext) {
this.log = ctx.logger.get();
@ -125,17 +114,6 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
// initialize capabilities switcher to add license filter to ml capabilities
setupCapabilitiesSwitcher(coreSetup, plugins.licensing.license$, this.log);
// Can access via router's handler function 'context' parameter - context.ml.mlClient
const mlClient = coreSetup.elasticsearch.legacy.createClient(PLUGIN_ID, {
plugins: [elasticsearchJsPlugin],
});
coreSetup.http.registerRouteHandlerContext(PLUGIN_ID, (context, request) => {
return {
mlClient: mlClient.asScoped(request),
};
});
const routeInit: RouteInitialization = {
router: coreSetup.http.createRouter(),
mlLicense: this.mlLicense,
@ -176,13 +154,19 @@ export class MlServerPlugin implements Plugin<MlPluginSetup, MlPluginStart, Plug
inferenceRoutes(routeInit);
return {
...createSharedServices(this.mlLicense, plugins.spaces, plugins.cloud, resolveMlCapabilities),
mlClient,
...createSharedServices(
this.mlLicense,
plugins.spaces,
plugins.cloud,
resolveMlCapabilities,
() => this.clusterClient
),
};
}
public start(coreStart: CoreStart): MlPluginStart {
this.capabilities = coreStart.capabilities;
this.clusterClient = coreStart.elasticsearch.client;
}
public stop() {

View file

@ -58,9 +58,9 @@ export function annotationRoutes(
tags: ['access:ml:canGetAnnotations'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getAnnotations } = annotationServiceProvider(legacyClient);
const { getAnnotations } = annotationServiceProvider(client);
const resp = await getAnnotations(request.body);
return response.ok({
@ -91,14 +91,14 @@ export function annotationRoutes(
tags: ['access:ml:canCreateAnnotation'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(legacyClient);
const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(client);
if (annotationsFeatureAvailable === false) {
throw getAnnotationsFeatureUnavailableErrorMessage();
}
const { indexAnnotation } = annotationServiceProvider(legacyClient);
const { indexAnnotation } = annotationServiceProvider(client);
const currentUser =
securityPlugin !== undefined ? securityPlugin.authc.getCurrentUser(request) : {};
@ -134,15 +134,15 @@ export function annotationRoutes(
tags: ['access:ml:canDeleteAnnotation'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(legacyClient);
const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable(client);
if (annotationsFeatureAvailable === false) {
throw getAnnotationsFeatureUnavailableErrorMessage();
}
const annotationId = request.params.annotationId;
const { deleteAnnotation } = annotationServiceProvider(legacyClient);
const { deleteAnnotation } = annotationServiceProvider(client);
const resp = await deleteAnnotation(annotationId);
return response.ok({

View file

@ -5,6 +5,7 @@
*/
import { schema } from '@kbn/config-schema';
import { RequestParams } from '@elastic/elasticsearch';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import {
@ -20,6 +21,7 @@ import {
getModelSnapshotsSchema,
updateModelSnapshotSchema,
} from './schemas/anomaly_detectors_schema';
/**
* Routes for the anomaly detectors
*/
@ -42,11 +44,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ response, client }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.jobs');
const { body } = await client.asInternalUser.ml.getJobs();
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -73,12 +75,12 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.jobs', { jobId });
const { body } = await client.asInternalUser.ml.getJobs({ job_id: jobId });
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -104,11 +106,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.jobStats');
const { body } = await client.asInternalUser.ml.getJobStats();
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -135,12 +137,12 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.jobStats', { jobId });
const { body } = await client.asInternalUser.ml.getJobStats({ job_id: jobId });
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -171,15 +173,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.addJob', {
jobId,
const { body } = await client.asInternalUser.ml.putJob({
job_id: jobId,
body: request.body,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -208,15 +210,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canUpdateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.updateJob', {
jobId,
const { body } = await client.asInternalUser.ml.updateJob({
job_id: jobId,
body: request.body,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -243,14 +245,12 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canOpenJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.openJob', {
jobId,
});
const { body } = await client.asInternalUser.ml.openJob({ job_id: jobId });
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -277,18 +277,18 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCloseJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const options: { jobId: string; force?: boolean } = {
jobId: request.params.jobId,
const options: RequestParams.MlCloseJob = {
job_id: request.params.jobId,
};
const force = request.query.force;
if (force !== undefined) {
options.force = force;
}
const results = await legacyClient.callAsInternalUser('ml.closeJob', options);
const { body } = await client.asInternalUser.ml.closeJob(options);
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -315,18 +315,19 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const options: { jobId: string; force?: boolean } = {
jobId: request.params.jobId,
const options: RequestParams.MlDeleteJob = {
job_id: request.params.jobId,
wait_for_completion: false,
};
const force = request.query.force;
if (force !== undefined) {
options.force = force;
}
const results = await legacyClient.callAsInternalUser('ml.deleteJob', options);
const { body } = await client.asInternalUser.ml.deleteJob(options);
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -351,13 +352,11 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.validateDetector', {
body: request.body,
});
const { body } = await client.asInternalUser.ml.validateDetector({ body: request.body });
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -386,16 +385,16 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canForecastJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const jobId = request.params.jobId;
const duration = request.body.duration;
const results = await legacyClient.callAsInternalUser('ml.forecast', {
jobId,
const { body } = await client.asInternalUser.ml.forecast({
job_id: jobId,
duration,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -427,14 +426,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.records', {
jobId: request.params.jobId,
const { body } = await client.asInternalUser.ml.getRecords({
job_id: request.params.jobId,
body: request.body,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -466,15 +465,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.buckets', {
jobId: request.params.jobId,
const { body } = await client.asInternalUser.ml.getBuckets({
job_id: request.params.jobId,
timestamp: request.params.timestamp,
body: request.body,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -506,17 +505,17 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.overallBuckets', {
jobId: request.params.jobId,
const { body } = await client.asInternalUser.ml.getOverallBuckets({
job_id: request.params.jobId,
top_n: request.body.topN,
bucket_span: request.body.bucketSpan,
start: request.body.start,
end: request.body.end,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -543,14 +542,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.categories', {
jobId: request.params.jobId,
categoryId: request.params.categoryId,
const { body } = await client.asInternalUser.ml.getCategories({
job_id: request.params.jobId,
category_id: request.params.categoryId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -577,13 +576,13 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.modelSnapshots', {
jobId: request.params.jobId,
const { body } = await client.asInternalUser.ml.getModelSnapshots({
job_id: request.params.jobId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -610,14 +609,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.modelSnapshots', {
jobId: request.params.jobId,
snapshotId: request.params.snapshotId,
const { body } = await client.asInternalUser.ml.getModelSnapshots({
job_id: request.params.jobId,
snapshot_id: request.params.snapshotId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -646,15 +645,15 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.updateModelSnapshot', {
jobId: request.params.jobId,
snapshotId: request.params.snapshotId,
const { body } = await client.asInternalUser.ml.updateModelSnapshot({
job_id: request.params.jobId,
snapshot_id: request.params.snapshotId,
body: request.body,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -681,14 +680,14 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.deleteModelSnapshot', {
jobId: request.params.jobId,
snapshotId: request.params.snapshotId,
const { body } = await client.asInternalUser.ml.deleteModelSnapshot({
job_id: request.params.jobId,
snapshot_id: request.params.snapshotId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));

View file

@ -4,43 +4,39 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import { calendarSchema, calendarIdSchema, calendarIdsSchema } from './schemas/calendars_schema';
import { CalendarManager, Calendar, FormCalendar } from '../models/calendar';
function getAllCalendars(legacyClient: ILegacyScopedClusterClient) {
const cal = new CalendarManager(legacyClient);
function getAllCalendars(client: IScopedClusterClient) {
const cal = new CalendarManager(client);
return cal.getAllCalendars();
}
function getCalendar(legacyClient: ILegacyScopedClusterClient, calendarId: string) {
const cal = new CalendarManager(legacyClient);
function getCalendar(client: IScopedClusterClient, calendarId: string) {
const cal = new CalendarManager(client);
return cal.getCalendar(calendarId);
}
function newCalendar(legacyClient: ILegacyScopedClusterClient, calendar: FormCalendar) {
const cal = new CalendarManager(legacyClient);
function newCalendar(client: IScopedClusterClient, calendar: FormCalendar) {
const cal = new CalendarManager(client);
return cal.newCalendar(calendar);
}
function updateCalendar(
legacyClient: ILegacyScopedClusterClient,
calendarId: string,
calendar: Calendar
) {
const cal = new CalendarManager(legacyClient);
function updateCalendar(client: IScopedClusterClient, calendarId: string, calendar: Calendar) {
const cal = new CalendarManager(client);
return cal.updateCalendar(calendarId, calendar);
}
function deleteCalendar(legacyClient: ILegacyScopedClusterClient, calendarId: string) {
const cal = new CalendarManager(legacyClient);
function deleteCalendar(client: IScopedClusterClient, calendarId: string) {
const cal = new CalendarManager(client);
return cal.deleteCalendar(calendarId);
}
function getCalendarsByIds(legacyClient: ILegacyScopedClusterClient, calendarIds: string) {
const cal = new CalendarManager(legacyClient);
function getCalendarsByIds(client: IScopedClusterClient, calendarIds: string) {
const cal = new CalendarManager(client);
return cal.getCalendarsByIds(calendarIds);
}
@ -60,9 +56,9 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetCalendars'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const resp = await getAllCalendars(legacyClient);
const resp = await getAllCalendars(client);
return response.ok({
body: resp,
@ -92,15 +88,15 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetCalendars'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
let returnValue;
try {
const calendarIds = request.params.calendarIds.split(',');
if (calendarIds.length === 1) {
returnValue = await getCalendar(legacyClient, calendarIds[0]);
returnValue = await getCalendar(client, calendarIds[0]);
} else {
returnValue = await getCalendarsByIds(legacyClient, calendarIds);
returnValue = await getCalendarsByIds(client, calendarIds);
}
return response.ok({
@ -131,10 +127,10 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateCalendar'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const body = request.body;
const resp = await newCalendar(legacyClient, body);
const resp = await newCalendar(client, body);
return response.ok({
body: resp,
@ -166,11 +162,11 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateCalendar'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { calendarId } = request.params;
const body = request.body;
const resp = await updateCalendar(legacyClient, calendarId, body);
const resp = await updateCalendar(client, calendarId, body);
return response.ok({
body: resp,
@ -200,10 +196,10 @@ export function calendars({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteCalendar'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { calendarId } = request.params;
const resp = await deleteCalendar(legacyClient, calendarId);
const resp = await deleteCalendar(client, calendarId);
return response.ok({
body: resp,

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { RequestHandlerContext, ILegacyScopedClusterClient } from 'kibana/server';
import { RequestHandlerContext, IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { analyticsAuditMessagesProvider } from '../models/data_frame_analytics/analytics_audit_messages';
import { RouteInitialization } from '../types';
@ -36,13 +36,14 @@ function deleteDestIndexPatternById(context: RequestHandlerContext, indexPattern
*/
export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitialization) {
async function userCanDeleteIndex(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
destinationIndex: string
): Promise<boolean> {
if (!mlLicense.isSecurityEnabled()) {
return true;
}
const privilege = await legacyClient.callAsCurrentUser('ml.privilegeCheck', {
const { body } = await client.asCurrentUser.security.hasPrivileges({
body: {
index: [
{
@ -52,10 +53,8 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
],
},
});
if (!privilege) {
return false;
}
return privilege.has_all_requested === true;
return body?.has_all_requested === true;
}
/**
@ -76,11 +75,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalytics');
const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({ size: 1000 });
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -107,14 +106,14 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalytics', {
analyticsId,
const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({
id: analyticsId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -137,11 +136,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalyticsStats');
const { body } = await client.asInternalUser.ml.getDataFrameAnalyticsStats({ size: 1000 });
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -168,14 +167,14 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.getDataFrameAnalyticsStats', {
analyticsId,
const { body } = await client.asInternalUser.ml.getDataFrameAnalyticsStats({
id: analyticsId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -205,16 +204,18 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canCreateDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.createDataFrameAnalytics', {
body: request.body,
analyticsId,
...getAuthorizationHeader(request),
});
const { body } = await client.asInternalUser.ml.putDataFrameAnalytics(
{
id: analyticsId,
body: request.body,
},
getAuthorizationHeader(request)
);
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -241,14 +242,16 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.evaluateDataFrameAnalytics', {
body: request.body,
...getAuthorizationHeader(request),
});
const { body } = await client.asInternalUser.ml.evaluateDataFrame(
{
body: request.body,
},
getAuthorizationHeader(request)
);
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -276,13 +279,13 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canCreateDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const results = await legacyClient.callAsInternalUser('ml.explainDataFrameAnalytics', {
const { body } = await client.asInternalUser.ml.explainDataFrameAnalytics({
body: request.body,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -310,7 +313,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canDeleteDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
try {
const { analyticsId } = request.params;
const { deleteDestIndex, deleteDestIndexPattern } = request.query;
@ -324,11 +327,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
// Check if analyticsId is valid and get destination index
if (deleteDestIndex || deleteDestIndexPattern) {
try {
const dfa = await legacyClient.callAsInternalUser('ml.getDataFrameAnalytics', {
analyticsId,
const { body } = await client.asInternalUser.ml.getDataFrameAnalytics({
id: analyticsId,
});
if (Array.isArray(dfa.data_frame_analytics) && dfa.data_frame_analytics.length > 0) {
destinationIndex = dfa.data_frame_analytics[0].dest.index;
if (Array.isArray(body.data_frame_analytics) && body.data_frame_analytics.length > 0) {
destinationIndex = body.data_frame_analytics[0].dest.index;
}
} catch (e) {
return response.customError(wrapError(e));
@ -337,11 +340,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
// If user checks box to delete the destinationIndex associated with the job
if (destinationIndex && deleteDestIndex) {
// Verify if user has privilege to delete the destination index
const userCanDeleteDestIndex = await userCanDeleteIndex(legacyClient, destinationIndex);
const userCanDeleteDestIndex = await userCanDeleteIndex(client, destinationIndex);
// If user does have privilege to delete the index, then delete the index
if (userCanDeleteDestIndex) {
try {
await legacyClient.callAsCurrentUser('indices.delete', {
await client.asCurrentUser.indices.delete({
index: destinationIndex,
});
destIndexDeleted.success = true;
@ -370,8 +373,8 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
// Delete the data frame analytics
try {
await legacyClient.callAsInternalUser('ml.deleteDataFrameAnalytics', {
analyticsId,
await client.asInternalUser.ml.deleteDataFrameAnalytics({
id: analyticsId,
});
analyticsJobDeleted.success = true;
} catch (deleteDFAError) {
@ -413,14 +416,14 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canStartStopDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.startDataFrameAnalytics', {
analyticsId,
const { body } = await client.asInternalUser.ml.startDataFrameAnalytics({
id: analyticsId,
});
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -449,10 +452,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canStartStopDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const options: { analyticsId: string; force?: boolean | undefined } = {
analyticsId: request.params.analyticsId,
const options: { id: string; force?: boolean | undefined } = {
id: request.params.analyticsId,
};
// @ts-expect-error TODO: update types
if (request.url?.query?.force !== undefined) {
@ -460,9 +463,9 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
options.force = request.url.query.force;
}
const results = await legacyClient.callAsInternalUser('ml.stopDataFrameAnalytics', options);
const { body } = await client.asInternalUser.ml.stopDataFrameAnalytics(options);
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -490,16 +493,18 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canCreateDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const results = await legacyClient.callAsInternalUser('ml.updateDataFrameAnalytics', {
body: request.body,
analyticsId,
...getAuthorizationHeader(request),
});
const { body } = await client.asInternalUser.ml.updateDataFrameAnalytics(
{
id: analyticsId,
body: request.body,
},
getAuthorizationHeader(request)
);
return response.ok({
body: results,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -526,10 +531,10 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canGetDataFrameAnalytics'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { analyticsId } = request.params;
const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider(legacyClient);
const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider(client);
const results = await getAnalyticsAuditMessages(analyticsId);
return response.ok({

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { DataVisualizer } from '../models/data_visualizer';
import { Field, HistogramField } from '../models/data_visualizer/data_visualizer';
@ -17,7 +17,7 @@ import {
import { RouteInitialization } from '../types';
function getOverallStats(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
indexPatternTitle: string,
query: object,
aggregatableFields: string[],
@ -27,7 +27,7 @@ function getOverallStats(
earliestMs: number,
latestMs: number
) {
const dv = new DataVisualizer(legacyClient);
const dv = new DataVisualizer(client);
return dv.getOverallStats(
indexPatternTitle,
query,
@ -41,7 +41,7 @@ function getOverallStats(
}
function getStatsForFields(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
indexPatternTitle: string,
query: any,
fields: Field[],
@ -52,7 +52,7 @@ function getStatsForFields(
interval: number,
maxExamples: number
) {
const dv = new DataVisualizer(legacyClient);
const dv = new DataVisualizer(client);
return dv.getStatsForFields(
indexPatternTitle,
query,
@ -67,13 +67,13 @@ function getStatsForFields(
}
function getHistogramsForFields(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
indexPatternTitle: string,
query: any,
fields: HistogramField[],
samplerShardSize: number
) {
const dv = new DataVisualizer(legacyClient);
const dv = new DataVisualizer(client);
return dv.getHistogramsForFields(indexPatternTitle, query, fields, samplerShardSize);
}
@ -104,7 +104,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
params: { indexPatternTitle },
@ -112,7 +112,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
} = request;
const results = await getHistogramsForFields(
legacyClient,
client,
indexPatternTitle,
query,
fields,
@ -151,7 +151,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
params: { indexPatternTitle },
@ -168,7 +168,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
} = request;
const results = await getStatsForFields(
legacyClient,
client,
indexPatternTitle,
query,
fields,
@ -216,7 +216,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
params: { indexPatternTitle },
@ -232,7 +232,7 @@ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization)
} = request;
const results = await getOverallStats(
legacyClient,
client,
indexPatternTitle,
query,
aggregatableFields,

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { RequestParams } from '@elastic/elasticsearch';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import {
@ -33,12 +34,12 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const resp = await legacyClient.callAsInternalUser('ml.datafeeds');
const { body } = await client.asInternalUser.ml.getDatafeeds();
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -65,13 +66,13 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const resp = await legacyClient.callAsInternalUser('ml.datafeeds', { datafeedId });
const { body } = await client.asInternalUser.ml.getDatafeeds({ datafeed_id: datafeedId });
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -94,12 +95,12 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await legacyClient.callAsInternalUser('ml.datafeedStats');
const { body } = await client.asInternalUser.ml.getDatafeedStats();
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -126,15 +127,15 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetDatafeeds'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const resp = await legacyClient.callAsInternalUser('ml.datafeedStats', {
datafeedId,
const { body } = await client.asInternalUser.ml.getDatafeedStats({
datafeed_id: datafeedId,
});
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -163,17 +164,19 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const resp = await legacyClient.callAsInternalUser('ml.addDatafeed', {
datafeedId,
body: request.body,
...getAuthorizationHeader(request),
});
const { body } = await client.asInternalUser.ml.putDatafeed(
{
datafeed_id: datafeedId,
body: request.body,
},
getAuthorizationHeader(request)
);
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -202,17 +205,19 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canUpdateDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const resp = await legacyClient.callAsInternalUser('ml.updateDatafeed', {
datafeedId,
body: request.body,
...getAuthorizationHeader(request),
});
const { body } = await client.asInternalUser.ml.updateDatafeed(
{
datafeed_id: datafeedId,
body: request.body,
},
getAuthorizationHeader(request)
);
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -241,20 +246,20 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const options: { datafeedId: string; force?: boolean } = {
datafeedId: request.params.jobId,
const options: RequestParams.MlDeleteDatafeed = {
datafeed_id: request.params.jobId,
};
const force = request.query.force;
if (force !== undefined) {
options.force = force;
}
const resp = await legacyClient.callAsInternalUser('ml.deleteDatafeed', options);
const { body } = await client.asInternalUser.ml.deleteDatafeed(options);
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -283,19 +288,19 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const { start, end } = request.body;
const resp = await legacyClient.callAsInternalUser('ml.startDatafeed', {
datafeedId,
const { body } = await client.asInternalUser.ml.startDatafeed({
datafeed_id: datafeedId,
start,
end,
});
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -322,16 +327,16 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const resp = await legacyClient.callAsInternalUser('ml.stopDatafeed', {
datafeedId,
const { body } = await client.asInternalUser.ml.stopDatafeed({
datafeed_id: datafeedId,
});
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));
@ -358,16 +363,18 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canPreviewDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const datafeedId = request.params.datafeedId;
const resp = await legacyClient.callAsInternalUser('ml.datafeedPreview', {
datafeedId,
...getAuthorizationHeader(request),
});
const { body } = await client.asInternalUser.ml.previewDatafeed(
{
datafeed_id: datafeedId,
},
getAuthorizationHeader(request)
);
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import {
@ -13,14 +13,14 @@ import {
} from './schemas/fields_service_schema';
import { fieldsServiceProvider } from '../models/fields_service';
function getCardinalityOfFields(legacyClient: ILegacyScopedClusterClient, payload: any) {
const fs = fieldsServiceProvider(legacyClient);
function getCardinalityOfFields(client: IScopedClusterClient, payload: any) {
const fs = fieldsServiceProvider(client);
const { index, fieldNames, query, timeFieldName, earliestMs, latestMs } = payload;
return fs.getCardinalityOfFields(index, fieldNames, query, timeFieldName, earliestMs, latestMs);
}
function getTimeFieldRange(legacyClient: ILegacyScopedClusterClient, payload: any) {
const fs = fieldsServiceProvider(legacyClient);
function getTimeFieldRange(client: IScopedClusterClient, payload: any) {
const fs = fieldsServiceProvider(client);
const { index, timeFieldName, query } = payload;
return fs.getTimeFieldRange(index, timeFieldName, query);
}
@ -50,9 +50,9 @@ export function fieldsService({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getCardinalityOfFields(legacyClient, request.body);
const resp = await getCardinalityOfFields(client, request.body);
return response.ok({
body: resp,
@ -85,9 +85,9 @@ export function fieldsService({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getTimeFieldRange(legacyClient, request.body);
const resp = await getTimeFieldRange(client, request.body);
return response.ok({
body: resp,

View file

@ -5,7 +5,7 @@
*/
import { schema } from '@kbn/config-schema';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { MAX_FILE_SIZE_BYTES } from '../../common/constants/file_datavisualizer';
import {
InputOverrides,
@ -28,17 +28,13 @@ import {
importFileQuerySchema,
} from './schemas/file_data_visualizer_schema';
function analyzeFiles(
legacyClient: ILegacyScopedClusterClient,
data: InputData,
overrides: InputOverrides
) {
const { analyzeFile } = fileDataVisualizerProvider(legacyClient);
function analyzeFiles(client: IScopedClusterClient, data: InputData, overrides: InputOverrides) {
const { analyzeFile } = fileDataVisualizerProvider(client);
return analyzeFile(data, overrides);
}
function importData(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
id: string,
index: string,
settings: Settings,
@ -46,7 +42,7 @@ function importData(
ingestPipeline: IngestPipelineWrapper,
data: InputData
) {
const { importData: importDataFunc } = importDataProvider(legacyClient);
const { importData: importDataFunc } = importDataProvider(client);
return importDataFunc(id, index, settings, mappings, ingestPipeline, data);
}
@ -78,9 +74,9 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canFindFileStructure'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const result = await analyzeFiles(legacyClient, request.body, request.query);
const result = await analyzeFiles(client, request.body, request.query);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
@ -113,7 +109,7 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat
tags: ['access:ml:canFindFileStructure'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { id } = request.query;
const { index, data, settings, mappings, ingestPipeline } = request.body;
@ -126,7 +122,7 @@ export function fileDataVisualizerRoutes({ router, mlLicense }: RouteInitializat
}
const result = await importData(
legacyClient,
client,
id,
index,
settings,

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
import { createFilterSchema, filterIdSchema, updateFilterSchema } from './schemas/filters_schema';
@ -12,37 +12,33 @@ import { FilterManager, FormFilter } from '../models/filter';
// TODO - add function for returning a list of just the filter IDs.
// TODO - add function for returning a list of filter IDs plus item count.
function getAllFilters(legacyClient: ILegacyScopedClusterClient) {
const mgr = new FilterManager(legacyClient);
function getAllFilters(client: IScopedClusterClient) {
const mgr = new FilterManager(client);
return mgr.getAllFilters();
}
function getAllFilterStats(legacyClient: ILegacyScopedClusterClient) {
const mgr = new FilterManager(legacyClient);
function getAllFilterStats(client: IScopedClusterClient) {
const mgr = new FilterManager(client);
return mgr.getAllFilterStats();
}
function getFilter(legacyClient: ILegacyScopedClusterClient, filterId: string) {
const mgr = new FilterManager(legacyClient);
function getFilter(client: IScopedClusterClient, filterId: string) {
const mgr = new FilterManager(client);
return mgr.getFilter(filterId);
}
function newFilter(legacyClient: ILegacyScopedClusterClient, filter: FormFilter) {
const mgr = new FilterManager(legacyClient);
function newFilter(client: IScopedClusterClient, filter: FormFilter) {
const mgr = new FilterManager(client);
return mgr.newFilter(filter);
}
function updateFilter(
legacyClient: ILegacyScopedClusterClient,
filterId: string,
filter: FormFilter
) {
const mgr = new FilterManager(legacyClient);
function updateFilter(client: IScopedClusterClient, filterId: string, filter: FormFilter) {
const mgr = new FilterManager(client);
return mgr.updateFilter(filterId, filter);
}
function deleteFilter(legacyClient: ILegacyScopedClusterClient, filterId: string) {
const mgr = new FilterManager(legacyClient);
function deleteFilter(client: IScopedClusterClient, filterId: string) {
const mgr = new FilterManager(client);
return mgr.deleteFilter(filterId);
}
@ -65,9 +61,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetFilters'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const resp = await getAllFilters(legacyClient);
const resp = await getAllFilters(client);
return response.ok({
body: resp,
@ -100,9 +96,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetFilters'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getFilter(legacyClient, request.params.filterId);
const resp = await getFilter(client, request.params.filterId);
return response.ok({
body: resp,
});
@ -134,10 +130,10 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateFilter'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const body = request.body;
const resp = await newFilter(legacyClient, body);
const resp = await newFilter(client, body);
return response.ok({
body: resp,
@ -172,11 +168,11 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateFilter'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { filterId } = request.params;
const body = request.body;
const resp = await updateFilter(legacyClient, filterId, body);
const resp = await updateFilter(client, filterId, body);
return response.ok({
body: resp,
@ -206,10 +202,10 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteFilter'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { filterId } = request.params;
const resp = await deleteFilter(legacyClient, filterId);
const resp = await deleteFilter(client, filterId);
return response.ok({
body: resp,
@ -239,9 +235,9 @@ export function filtersRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetFilters'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const resp = await getAllFilterStats(legacyClient);
const resp = await getAllFilterStats(client);
return response.ok({
body: resp,

View file

@ -31,7 +31,7 @@ export function indicesRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
body: { index, fields: requestFields },
@ -40,8 +40,8 @@ export function indicesRoutes({ router, mlLicense }: RouteInitialization) {
requestFields !== undefined && Array.isArray(requestFields)
? requestFields.join(',')
: '*';
const result = await legacyClient.callAsCurrentUser('fieldCaps', { index, fields });
return response.ok({ body: result });
const { body } = await client.asInternalUser.fieldCaps({ index, fields });
return response.ok({ body });
} catch (e) {
return response.customError(wrapError(e));
}

View file

@ -37,9 +37,9 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getJobAuditMessages } = jobAuditMessagesProvider(legacyClient);
const { getJobAuditMessages } = jobAuditMessagesProvider(client);
const { jobId } = request.params;
const { from } = request.query;
const resp = await getJobAuditMessages(jobId, from);
@ -72,9 +72,9 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getJobAuditMessages } = jobAuditMessagesProvider(legacyClient);
const { getJobAuditMessages } = jobAuditMessagesProvider(client);
const { from } = request.query;
const resp = await getJobAuditMessages(undefined, from);

View file

@ -48,9 +48,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { forceStartDatafeeds } = jobServiceProvider(legacyClient);
const { forceStartDatafeeds } = jobServiceProvider(client);
const { datafeedIds, start, end } = request.body;
const resp = await forceStartDatafeeds(datafeedIds, start, end);
@ -82,9 +82,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { stopDatafeeds } = jobServiceProvider(legacyClient);
const { stopDatafeeds } = jobServiceProvider(client);
const { datafeedIds } = request.body;
const resp = await stopDatafeeds(datafeedIds);
@ -116,9 +116,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canDeleteJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { deleteJobs } = jobServiceProvider(legacyClient);
const { deleteJobs } = jobServiceProvider(client);
const { jobIds } = request.body;
const resp = await deleteJobs(jobIds);
@ -150,9 +150,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCloseJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { closeJobs } = jobServiceProvider(legacyClient);
const { closeJobs } = jobServiceProvider(client);
const { jobIds } = request.body;
const resp = await closeJobs(jobIds);
@ -184,9 +184,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCloseJob', 'access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { forceStopAndCloseJob } = jobServiceProvider(legacyClient);
const { forceStopAndCloseJob } = jobServiceProvider(client);
const { jobId } = request.body;
const resp = await forceStopAndCloseJob(jobId);
@ -223,9 +223,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobsSummary } = jobServiceProvider(legacyClient);
const { jobsSummary } = jobServiceProvider(client);
const { jobIds } = request.body;
const resp = await jobsSummary(jobIds);
@ -257,9 +257,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const { jobsWithTimerange } = jobServiceProvider(legacyClient);
const { jobsWithTimerange } = jobServiceProvider(client);
const resp = await jobsWithTimerange();
return response.ok({
@ -290,9 +290,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { createFullJobsList } = jobServiceProvider(legacyClient);
const { createFullJobsList } = jobServiceProvider(client);
const { jobIds } = request.body;
const resp = await createFullJobsList(jobIds);
@ -320,9 +320,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const { getAllGroups } = jobServiceProvider(legacyClient);
const { getAllGroups } = jobServiceProvider(client);
const resp = await getAllGroups();
return response.ok({
@ -353,9 +353,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canUpdateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { updateGroups } = jobServiceProvider(legacyClient);
const { updateGroups } = jobServiceProvider(client);
const { jobs } = request.body;
const resp = await updateGroups(jobs);
@ -383,9 +383,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const { deletingJobTasks } = jobServiceProvider(legacyClient);
const { deletingJobTasks } = jobServiceProvider(client);
const resp = await deletingJobTasks();
return response.ok({
@ -416,9 +416,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { jobsExist } = jobServiceProvider(legacyClient);
const { jobsExist } = jobServiceProvider(client);
const { jobIds } = request.body;
const resp = await jobsExist(jobIds);
@ -449,12 +449,12 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
try {
const { indexPattern } = request.params;
const isRollup = request.query.rollup === 'true';
const savedObjectsClient = context.core.savedObjects.client;
const { newJobCaps } = jobServiceProvider(legacyClient);
const { newJobCaps } = jobServiceProvider(client);
const resp = await newJobCaps(indexPattern, isRollup, savedObjectsClient);
return response.ok({
@ -485,7 +485,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
indexPatternTitle,
@ -499,7 +499,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
splitFieldValue,
} = request.body;
const { newJobLineChart } = jobServiceProvider(legacyClient);
const { newJobLineChart } = jobServiceProvider(client);
const resp = await newJobLineChart(
indexPatternTitle,
timeField,
@ -540,7 +540,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const {
indexPatternTitle,
@ -553,7 +553,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
splitFieldName,
} = request.body;
const { newJobPopulationChart } = jobServiceProvider(legacyClient);
const { newJobPopulationChart } = jobServiceProvider(client);
const resp = await newJobPopulationChart(
indexPatternTitle,
timeField,
@ -589,9 +589,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const { getAllJobAndGroupIds } = jobServiceProvider(legacyClient);
const { getAllJobAndGroupIds } = jobServiceProvider(client);
const resp = await getAllJobAndGroupIds();
return response.ok({
@ -622,9 +622,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { getLookBackProgress } = jobServiceProvider(legacyClient);
const { getLookBackProgress } = jobServiceProvider(client);
const { jobId, start, end } = request.body;
const resp = await getLookBackProgress(jobId, start, end);
@ -656,9 +656,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { validateCategoryExamples } = categorizationExamplesProvider(legacyClient);
const { validateCategoryExamples } = categorizationExamplesProvider(client);
const {
indexPatternTitle,
timeField,
@ -709,9 +709,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { topCategories } = jobServiceProvider(legacyClient);
const { topCategories } = jobServiceProvider(client);
const { jobId, count } = request.body;
const resp = await topCategories(jobId, count);
@ -743,9 +743,9 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob', 'access:ml:canStartStopDatafeed'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { revertModelSnapshot } = jobServiceProvider(legacyClient);
const { revertModelSnapshot } = jobServiceProvider(client);
const {
jobId,
snapshotId,

View file

@ -5,7 +5,7 @@
*/
import Boom from 'boom';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { TypeOf } from '@kbn/config-schema';
import { AnalysisConfig } from '../../common/types/anomaly_detection_jobs';
import { wrapError } from '../client/error_wrapper';
@ -27,12 +27,12 @@ type CalculateModelMemoryLimitPayload = TypeOf<typeof modelMemoryLimitSchema>;
*/
export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, version: string) {
function calculateModelMemoryLimit(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
payload: CalculateModelMemoryLimitPayload
) {
const { analysisConfig, indexPattern, query, timeFieldName, earliestMs, latestMs } = payload;
return calculateModelMemoryLimitProvider(legacyClient)(
return calculateModelMemoryLimitProvider(client)(
analysisConfig as AnalysisConfig,
indexPattern,
query,
@ -61,10 +61,10 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
let errorResp;
const resp = await estimateBucketSpanFactory(legacyClient)(request.body)
const resp = await estimateBucketSpanFactory(client)(request.body)
// this catch gets triggered when the estimation code runs without error
// but isn't able to come up with a bucket span estimation.
// this doesn't return a HTTP error but an object with an error message
@ -109,9 +109,9 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await calculateModelMemoryLimit(legacyClient, request.body);
const resp = await calculateModelMemoryLimit(client, request.body);
return response.ok({
body: resp,
@ -141,9 +141,9 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await validateCardinality(legacyClient, request.body);
const resp = await validateCardinality(client, request.body);
return response.ok({
body: resp,
@ -173,11 +173,11 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization,
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
// version corresponds to the version used in documentation links.
const resp = await validateJob(
legacyClient,
client,
request.body,
version,
mlLicense.isSecurityEnabled() === false

View file

@ -6,11 +6,7 @@
import { TypeOf } from '@kbn/config-schema';
import {
ILegacyScopedClusterClient,
KibanaRequest,
SavedObjectsClientContract,
} from 'kibana/server';
import { IScopedClusterClient, KibanaRequest, SavedObjectsClientContract } from 'kibana/server';
import { DatafeedOverride, JobOverride } from '../../common/types/modules';
import { wrapError } from '../client/error_wrapper';
import { DataRecognizer } from '../models/data_recognizer';
@ -23,22 +19,22 @@ import {
import { RouteInitialization } from '../types';
function recognize(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
indexPatternTitle: string
) {
const dr = new DataRecognizer(legacyClient, savedObjectsClient, request);
const dr = new DataRecognizer(client, savedObjectsClient, request);
return dr.findMatches(indexPatternTitle);
}
function getModule(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
moduleId: string
) {
const dr = new DataRecognizer(legacyClient, savedObjectsClient, request);
const dr = new DataRecognizer(client, savedObjectsClient, request);
if (moduleId === undefined) {
return dr.listModules();
} else {
@ -47,7 +43,7 @@ function getModule(
}
function setup(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
moduleId: string,
@ -63,7 +59,7 @@ function setup(
datafeedOverrides?: DatafeedOverride | DatafeedOverride[],
estimateModelMemory?: boolean
) {
const dr = new DataRecognizer(legacyClient, savedObjectsClient, request);
const dr = new DataRecognizer(client, savedObjectsClient, request);
return dr.setup(
moduleId,
prefix,
@ -81,12 +77,12 @@ function setup(
}
function dataRecognizerJobsExist(
legacyClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest,
moduleId: string
) {
const dr = new DataRecognizer(legacyClient, savedObjectsClient, request);
const dr = new DataRecognizer(client, savedObjectsClient, request);
return dr.dataRecognizerJobsExist(moduleId);
}
@ -131,11 +127,11 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
try {
const { indexPatternTitle } = request.params;
const results = await recognize(
legacyClient,
client,
context.core.savedObjects.client,
request,
indexPatternTitle
@ -266,7 +262,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
try {
let { moduleId } = request.params;
if (moduleId === '') {
@ -275,7 +271,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
moduleId = undefined;
}
const results = await getModule(
legacyClient,
client,
context.core.savedObjects.client,
request,
moduleId
@ -439,7 +435,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canCreateJob'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
try {
const { moduleId } = request.params;
@ -458,7 +454,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
} = request.body as TypeOf<typeof setupModuleBodySchema>;
const result = await setup(
legacyClient,
client,
context.core.savedObjects.client,
request,
moduleId,
@ -544,11 +540,11 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response, context }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response, context }) => {
try {
const { moduleId } = request.params;
const result = await dataRecognizerJobsExist(
legacyClient,
client,
context.core.savedObjects.client,
request,
moduleId

View file

@ -26,16 +26,15 @@ export function notificationRoutes({ router, mlLicense }: RouteInitialization) {
tags: ['access:ml:canAccessML'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, response }) => {
try {
const params = {
includeDefaults: true,
filterPath: '**.xpack.notification',
};
const resp = await legacyClient.callAsCurrentUser('cluster.getSettings', params);
const { body } = await client.asCurrentUser.cluster.getSettings({
include_defaults: true,
filter_path: '**.xpack.notification',
});
return response.ok({
body: resp,
body,
});
} catch (e) {
return response.customError(wrapError(e));

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { schema } from '@kbn/config-schema';
import { wrapError } from '../client/error_wrapper';
import { RouteInitialization } from '../types';
@ -23,8 +23,8 @@ import {
getCategorizerStoppedPartitionsSchema,
} from './schemas/results_service_schema';
function getAnomaliesTableData(legacyClient: ILegacyScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(legacyClient);
function getAnomaliesTableData(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
const {
jobIds,
criteriaFields,
@ -53,39 +53,39 @@ function getAnomaliesTableData(legacyClient: ILegacyScopedClusterClient, payload
);
}
function getCategoryDefinition(legacyClient: ILegacyScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(legacyClient);
function getCategoryDefinition(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
return rs.getCategoryDefinition(payload.jobId, payload.categoryId);
}
function getCategoryExamples(legacyClient: ILegacyScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(legacyClient);
function getCategoryExamples(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
const { jobId, categoryIds, maxExamples } = payload;
return rs.getCategoryExamples(jobId, categoryIds, maxExamples);
}
function getMaxAnomalyScore(legacyClient: ILegacyScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(legacyClient);
function getMaxAnomalyScore(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
const { jobIds, earliestMs, latestMs } = payload;
return rs.getMaxAnomalyScore(jobIds, earliestMs, latestMs);
}
function getPartitionFieldsValues(legacyClient: ILegacyScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(legacyClient);
function getPartitionFieldsValues(client: IScopedClusterClient, payload: any) {
const rs = resultsServiceProvider(client);
const { jobId, searchTerm, criteriaFields, earliestMs, latestMs } = payload;
return rs.getPartitionFieldsValues(jobId, searchTerm, criteriaFields, earliestMs, latestMs);
}
function getCategorizerStats(legacyClient: ILegacyScopedClusterClient, params: any, query: any) {
function getCategorizerStats(client: IScopedClusterClient, params: any, query: any) {
const { jobId } = params;
const { partitionByValue } = query;
const rs = resultsServiceProvider(legacyClient);
const rs = resultsServiceProvider(client);
return rs.getCategorizerStats(jobId, partitionByValue);
}
function getCategoryStoppedPartitions(legacyClient: ILegacyScopedClusterClient, payload: any) {
function getCategoryStoppedPartitions(client: IScopedClusterClient, payload: any) {
const { jobIds, fieldToBucket } = payload;
const rs = resultsServiceProvider(legacyClient);
const rs = resultsServiceProvider(client);
return rs.getCategoryStoppedPartitions(jobIds, fieldToBucket);
}
@ -112,9 +112,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getAnomaliesTableData(legacyClient, request.body);
const resp = await getAnomaliesTableData(client, request.body);
return response.ok({
body: resp,
@ -144,9 +144,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getCategoryDefinition(legacyClient, request.body);
const resp = await getCategoryDefinition(client, request.body);
return response.ok({
body: resp,
@ -176,9 +176,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getMaxAnomalyScore(legacyClient, request.body);
const resp = await getMaxAnomalyScore(client, request.body);
return response.ok({
body: resp,
@ -208,9 +208,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getCategoryExamples(legacyClient, request.body);
const resp = await getCategoryExamples(client, request.body);
return response.ok({
body: resp,
@ -240,9 +240,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getPartitionFieldsValues(legacyClient, request.body);
const resp = await getPartitionFieldsValues(client, request.body);
return response.ok({
body: resp,
@ -269,14 +269,14 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
const body = {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
const { body } = await client.asInternalUser.search({
...request.body,
index: ML_RESULTS_INDEX_PATTERN,
};
});
try {
return response.ok({
body: await legacyClient.callAsInternalUser('search', body),
body,
});
} catch (error) {
return response.customError(wrapError(error));
@ -304,9 +304,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getCategorizerStats(legacyClient, request.params, request.query);
const resp = await getCategorizerStats(client, request.params, request.query);
return response.ok({
body: resp,
});
@ -334,9 +334,9 @@ export function resultsServiceRoutes({ router, mlLicense }: RouteInitialization)
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const resp = await getCategoryStoppedPartitions(legacyClient, request.body);
const resp = await getCategoryStoppedPartitions(client, request.body);
return response.ok({
body: resp,
});

View file

@ -7,7 +7,7 @@
import { schema } from '@kbn/config-schema';
import { Request } from 'hapi';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { IScopedClusterClient } from 'kibana/server';
import { wrapError } from '../client/error_wrapper';
import { mlLog } from '../client/log';
import { capabilitiesProvider } from '../lib/capabilities';
@ -21,17 +21,16 @@ export function systemRoutes(
{ router, mlLicense }: RouteInitialization,
{ spaces, cloud, resolveMlCapabilities }: SystemRouteDeps
) {
async function getNodeCount(legacyClient: ILegacyScopedClusterClient) {
const filterPath = 'nodes.*.attributes';
const resp = await legacyClient.callAsInternalUser('nodes.info', {
filterPath,
async function getNodeCount(client: IScopedClusterClient) {
const { body } = await client.asInternalUser.nodes.info({
filter_path: 'nodes.*.attributes',
});
let count = 0;
if (typeof resp.nodes === 'object') {
Object.keys(resp.nodes).forEach((k) => {
if (resp.nodes[k].attributes !== undefined) {
const maxOpenJobs = resp.nodes[k].attributes['ml.max_open_jobs'];
if (typeof body.nodes === 'object') {
Object.keys(body.nodes).forEach((k) => {
if (body.nodes[k].attributes !== undefined) {
const maxOpenJobs = body.nodes[k].attributes['ml.max_open_jobs'];
if (maxOpenJobs !== null && maxOpenJobs > 0) {
count++;
}
@ -58,15 +57,15 @@ export function systemRoutes(
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { callAsCurrentUser, callAsInternalUser } = legacyClient;
const { asCurrentUser, asInternalUser } = client;
let upgradeInProgress = false;
try {
const info = await callAsInternalUser('ml.info');
const { body } = await asInternalUser.ml.info();
// if ml indices are currently being migrated, upgrade_mode will be set to true
// pass this back with the privileges to allow for the disabling of UI controls.
upgradeInProgress = info.upgrade_mode === true;
upgradeInProgress = body.upgrade_mode === true;
} catch (error) {
// if the ml.info check fails, it could be due to the user having insufficient privileges
// most likely they do not have the ml_user role and therefore will be blocked from using
@ -90,11 +89,12 @@ export function systemRoutes(
},
});
} else {
const body = request.body;
const resp = await callAsCurrentUser('ml.privilegeCheck', { body });
resp.upgradeInProgress = upgradeInProgress;
const { body } = await asCurrentUser.security.hasPrivileges({ body: request.body });
return response.ok({
body: resp,
body: {
...body,
upgradeInProgress,
},
});
}
} catch (error) {
@ -115,7 +115,7 @@ export function systemRoutes(
path: '/api/ml/ml_capabilities',
validate: false,
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
// if spaces is disabled force isMlEnabledInSpace to be true
const { isMlEnabledInSpace } =
@ -129,7 +129,7 @@ export function systemRoutes(
}
const { getCapabilities } = capabilitiesProvider(
legacyClient,
client,
mlCapabilities,
mlLicense,
isMlEnabledInSpace
@ -159,10 +159,10 @@ export function systemRoutes(
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
return response.ok({
body: await getNodeCount(legacyClient),
body: await getNodeCount(client),
});
} catch (e) {
return response.customError(wrapError(e));
@ -185,12 +185,12 @@ export function systemRoutes(
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const info = await legacyClient.callAsInternalUser('ml.info');
const { body } = await client.asInternalUser.ml.info();
const cloudId = cloud && cloud.cloudId;
return response.ok({
body: { ...info, cloudId },
body: { ...body, cloudId },
});
} catch (error) {
return response.customError(wrapError(error));
@ -216,10 +216,11 @@ export function systemRoutes(
tags: ['access:ml:canGetJobs'],
},
},
mlLicense.fullLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.fullLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { body } = await client.asCurrentUser.search(request.body);
return response.ok({
body: await legacyClient.callAsCurrentUser('search', request.body),
body,
});
} catch (error) {
return response.customError(wrapError(error));
@ -243,22 +244,21 @@ export function systemRoutes(
tags: ['access:ml:canAccessML'],
},
},
mlLicense.basicLicenseAPIGuard(async ({ legacyClient, request, response }) => {
mlLicense.basicLicenseAPIGuard(async ({ client, request, response }) => {
try {
const { index } = request.body;
const options = {
index: [index],
fields: ['*'],
ignoreUnavailable: true,
allowNoIndices: true,
ignore: 404,
ignore_unavailable: true,
allow_no_indices: true,
};
const fieldsResult = await legacyClient.callAsCurrentUser('fieldCaps', options);
const { body } = await client.asCurrentUser.fieldCaps(options);
const result = { exists: false };
if (Array.isArray(fieldsResult.indices) && fieldsResult.indices.length !== 0) {
if (Array.isArray(body.indices) && body.indices.length !== 0) {
result.exists = true;
}

View file

@ -0,0 +1,12 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export class MLClusterClientUninitialized extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/* eslint-disable max-classes-per-file */
export class InsufficientFullLicenseError extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}
export class InsufficientBasicLicenseError extends Error {
constructor(message?: string) {
super(message);
Object.setPrototypeOf(this, new.target.prototype);
}
}

View file

@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { LicenseCheck, licenseChecks } from './license_checks';
export { InsufficientBasicLicenseError, InsufficientFullLicenseError } from './errors';

View file

@ -4,7 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { MlServerLicense } from '../lib/license';
import { MlServerLicense } from '../../lib/license';
import { InsufficientFullLicenseError, InsufficientBasicLicenseError } from './errors';
export type LicenseCheck = () => void;
@ -14,12 +15,12 @@ export function licenseChecks(
return {
isFullLicense() {
if (mlLicense.isFullLicense() === false) {
throw Error('Platinum, Enterprise or trial license needed');
throw new InsufficientFullLicenseError('Platinum, Enterprise or trial license needed');
}
},
isMinimumLicense() {
if (mlLicense.isMinimumLicense() === false) {
throw Error('Basic license needed');
throw new InsufficientBasicLicenseError('Basic license needed');
}
},
};

View file

@ -4,40 +4,33 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server';
import { KibanaRequest } from 'kibana/server';
import { Job } from '../../../common/types/anomaly_detection_jobs';
import { SharedServicesChecks } from '../shared_services';
import { GetGuards } from '../shared_services';
export interface AnomalyDetectorsProvider {
anomalyDetectorsProvider(
mlClusterClient: ILegacyScopedClusterClient,
request: KibanaRequest
): {
jobs(jobId?: string): Promise<{ count: number; jobs: Job[] }>;
};
}
export function getAnomalyDetectorsProvider({
isFullLicense,
getHasMlCapabilities,
}: SharedServicesChecks): AnomalyDetectorsProvider {
export function getAnomalyDetectorsProvider(getGuards: GetGuards): AnomalyDetectorsProvider {
return {
anomalyDetectorsProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) {
// APM is using this service in anomaly alert, kibana alerting doesn't provide request object
// So we are adding a dummy request for now
// TODO: Remove this once kibana alerting provides request object
const hasMlCapabilities =
request.params !== 'DummyKibanaRequest'
? getHasMlCapabilities(request)
: (_caps: string[]) => Promise.resolve();
anomalyDetectorsProvider(request: KibanaRequest) {
return {
async jobs(jobId?: string) {
isFullLicense();
await hasMlCapabilities(['canGetJobs']);
return mlClusterClient.callAsInternalUser(
'ml.jobs',
jobId !== undefined ? { jobId } : {}
);
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canGetJobs'])
.ok(async ({ scopedClient }) => {
const { body } = await scopedClient.asInternalUser.ml.getJobs<{
count: number;
jobs: Job[];
}>(jobId !== undefined ? { job_id: jobId } : undefined);
return body;
});
},
};
},

View file

@ -4,38 +4,32 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server';
import { KibanaRequest } from 'kibana/server';
import { jobServiceProvider } from '../../models/job_service';
import { SharedServicesChecks } from '../shared_services';
import { GetGuards } from '../shared_services';
type OrigJobServiceProvider = ReturnType<typeof jobServiceProvider>;
export interface JobServiceProvider {
jobServiceProvider(
mlClusterClient: ILegacyScopedClusterClient,
request: KibanaRequest
): {
jobsSummary: OrigJobServiceProvider['jobsSummary'];
};
}
export function getJobServiceProvider({
isFullLicense,
getHasMlCapabilities,
}: SharedServicesChecks): JobServiceProvider {
export function getJobServiceProvider(getGuards: GetGuards): JobServiceProvider {
return {
jobServiceProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) {
// const hasMlCapabilities = getHasMlCapabilities(request);
const { jobsSummary } = jobServiceProvider(mlClusterClient);
jobServiceProvider(request: KibanaRequest) {
return {
async jobsSummary(...args) {
isFullLicense();
// Removed while https://github.com/elastic/kibana/issues/64588 exists.
// SIEM are calling this endpoint with a dummy request object from their alerting
// integration and currently alerting does not supply a request object.
// await hasMlCapabilities(['canGetJobs']);
return jobsSummary(...args);
jobsSummary: async (...args) => {
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canGetJobs'])
.ok(async ({ scopedClient }) => {
const { jobsSummary } = jobServiceProvider(scopedClient);
return jobsSummary(...args);
});
},
};
},

View file

@ -4,23 +4,17 @@
* you may not use this file except in compliance with the Elastic License.
*/
import {
ILegacyScopedClusterClient,
KibanaRequest,
SavedObjectsClientContract,
} from 'kibana/server';
import { IScopedClusterClient, KibanaRequest, SavedObjectsClientContract } from 'kibana/server';
import { TypeOf } from '@kbn/config-schema';
import { DataRecognizer } from '../../models/data_recognizer';
import { SharedServicesChecks } from '../shared_services';
import { GetGuards } from '../shared_services';
import { moduleIdParamSchema, setupModuleBodySchema } from '../../routes/schemas/modules';
import { HasMlCapabilities } from '../../lib/capabilities';
export type ModuleSetupPayload = TypeOf<typeof moduleIdParamSchema> &
TypeOf<typeof setupModuleBodySchema>;
export interface ModulesProvider {
modulesProvider(
mlClusterClient: ILegacyScopedClusterClient,
request: KibanaRequest,
savedObjectsClient: SavedObjectsClientContract
): {
@ -31,61 +25,58 @@ export interface ModulesProvider {
};
}
export function getModulesProvider({
isFullLicense,
getHasMlCapabilities,
}: SharedServicesChecks): ModulesProvider {
export function getModulesProvider(getGuards: GetGuards): ModulesProvider {
return {
modulesProvider(
mlClusterClient: ILegacyScopedClusterClient,
request: KibanaRequest,
savedObjectsClient: SavedObjectsClientContract
) {
let hasMlCapabilities: HasMlCapabilities;
if (request.params === 'DummyKibanaRequest') {
hasMlCapabilities = () => Promise.resolve();
} else {
hasMlCapabilities = getHasMlCapabilities(request);
}
const dr = dataRecognizerFactory(mlClusterClient, savedObjectsClient, request);
modulesProvider(request: KibanaRequest, savedObjectsClient: SavedObjectsClientContract) {
return {
async recognize(...args) {
isFullLicense();
await hasMlCapabilities(['canCreateJob']);
return dr.findMatches(...args);
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canGetJobs'])
.ok(async ({ scopedClient }) => {
const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request);
return dr.findMatches(...args);
});
},
async getModule(moduleId: string) {
isFullLicense();
await hasMlCapabilities(['canGetJobs']);
return dr.getModule(moduleId);
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canGetJobs'])
.ok(async ({ scopedClient }) => {
const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request);
return dr.getModule(moduleId);
});
},
async listModules() {
isFullLicense();
await hasMlCapabilities(['canGetJobs']);
return dr.listModules();
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canGetJobs'])
.ok(async ({ scopedClient }) => {
const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request);
return dr.listModules();
});
},
async setup(payload: ModuleSetupPayload) {
isFullLicense();
await hasMlCapabilities(['canCreateJob']);
return dr.setup(
payload.moduleId,
payload.prefix,
payload.groups,
payload.indexPatternName,
payload.query,
payload.useDedicatedIndex,
payload.startDatafeed,
payload.start,
payload.end,
payload.jobOverrides,
payload.datafeedOverrides,
payload.estimateModelMemory
);
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canCreateJob'])
.ok(async ({ scopedClient }) => {
const dr = dataRecognizerFactory(scopedClient, savedObjectsClient, request);
return dr.setup(
payload.moduleId,
payload.prefix,
payload.groups,
payload.indexPatternName,
payload.query,
payload.useDedicatedIndex,
payload.startDatafeed,
payload.start,
payload.end,
payload.jobOverrides,
payload.datafeedOverrides,
payload.estimateModelMemory
);
});
},
};
},
@ -93,9 +84,9 @@ export function getModulesProvider({
}
function dataRecognizerFactory(
mlClusterClient: ILegacyScopedClusterClient,
client: IScopedClusterClient,
savedObjectsClient: SavedObjectsClientContract,
request: KibanaRequest
) {
return new DataRecognizer(mlClusterClient, savedObjectsClient, request);
return new DataRecognizer(client, savedObjectsClient, request);
}

View file

@ -4,41 +4,32 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server';
import { KibanaRequest } from 'kibana/server';
import { resultsServiceProvider } from '../../models/results_service';
import { SharedServicesChecks } from '../shared_services';
import { GetGuards } from '../shared_services';
type OrigResultsServiceProvider = ReturnType<typeof resultsServiceProvider>;
export interface ResultsServiceProvider {
resultsServiceProvider(
mlClusterClient: ILegacyScopedClusterClient,
request: KibanaRequest
): {
getAnomaliesTableData: OrigResultsServiceProvider['getAnomaliesTableData'];
};
}
export function getResultsServiceProvider({
isFullLicense,
getHasMlCapabilities,
}: SharedServicesChecks): ResultsServiceProvider {
export function getResultsServiceProvider(getGuards: GetGuards): ResultsServiceProvider {
return {
resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) {
// Uptime is using this service in anomaly alert, kibana alerting doesn't provide request object
// So we are adding a dummy request for now
// TODO: Remove this once kibana alerting provides request object
const hasMlCapabilities =
request.params !== 'DummyKibanaRequest'
? getHasMlCapabilities(request)
: (_caps: string[]) => Promise.resolve();
const { getAnomaliesTableData } = resultsServiceProvider(mlClusterClient);
resultsServiceProvider(request: KibanaRequest) {
return {
async getAnomaliesTableData(...args) {
isFullLicense();
await hasMlCapabilities(['canGetJobs']);
return getAnomaliesTableData(...args);
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canGetJobs'])
.ok(async ({ scopedClient }) => {
const { getAnomaliesTableData } = resultsServiceProvider(scopedClient);
return getAnomaliesTableData(...args);
});
},
};
},

View file

@ -4,8 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server';
import { SearchResponse, SearchParams } from 'elasticsearch';
import { KibanaRequest } from 'kibana/server';
import { SearchResponse } from 'elasticsearch';
import { RequestParams } from '@elastic/elasticsearch';
import { MlServerLicense } from '../../lib/license';
import { CloudSetup } from '../../../../cloud/server';
import { spacesUtilsProvider } from '../../lib/spaces_utils';
@ -14,73 +15,79 @@ import { capabilitiesProvider } from '../../lib/capabilities';
import { MlInfoResponse } from '../../../common/types/ml_server_info';
import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns';
import { MlCapabilitiesResponse, ResolveMlCapabilities } from '../../../common/types/capabilities';
import { SharedServicesChecks } from '../shared_services';
import { GetGuards } from '../shared_services';
export interface MlSystemProvider {
mlSystemProvider(
mlClusterClient: ILegacyScopedClusterClient,
request: KibanaRequest
): {
mlCapabilities(): Promise<MlCapabilitiesResponse>;
mlInfo(): Promise<MlInfoResponse>;
mlAnomalySearch<T>(searchParams: SearchParams): Promise<SearchResponse<T>>;
mlAnomalySearch<T>(searchParams: RequestParams.Search<any>): Promise<SearchResponse<T>>;
};
}
export function getMlSystemProvider(
{ isMinimumLicense, isFullLicense, getHasMlCapabilities }: SharedServicesChecks,
getGuards: GetGuards,
mlLicense: MlServerLicense,
spaces: SpacesPluginSetup | undefined,
cloud: CloudSetup | undefined,
resolveMlCapabilities: ResolveMlCapabilities
): MlSystemProvider {
return {
mlSystemProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) {
// const hasMlCapabilities = getHasMlCapabilities(request);
const { callAsInternalUser } = mlClusterClient;
mlSystemProvider(request: KibanaRequest) {
return {
async mlCapabilities() {
isMinimumLicense();
return await getGuards(request)
.isMinimumLicense()
.ok(async ({ scopedClient }) => {
const { isMlEnabledInSpace } =
spaces !== undefined
? spacesUtilsProvider(spaces, request)
: { isMlEnabledInSpace: async () => true };
const { isMlEnabledInSpace } =
spaces !== undefined
? spacesUtilsProvider(spaces, request)
: { isMlEnabledInSpace: async () => true };
const mlCapabilities = await resolveMlCapabilities(request);
if (mlCapabilities === null) {
throw new Error('mlCapabilities is not defined');
}
const mlCapabilities = await resolveMlCapabilities(request);
if (mlCapabilities === null) {
throw new Error('mlCapabilities is not defined');
}
const { getCapabilities } = capabilitiesProvider(
mlClusterClient,
mlCapabilities,
mlLicense,
isMlEnabledInSpace
);
return getCapabilities();
const { getCapabilities } = capabilitiesProvider(
scopedClient,
mlCapabilities,
mlLicense,
isMlEnabledInSpace
);
return getCapabilities();
});
},
async mlInfo(): Promise<MlInfoResponse> {
isMinimumLicense();
return await getGuards(request)
.isMinimumLicense()
.ok(async ({ scopedClient }) => {
const { asInternalUser } = scopedClient;
const info = await callAsInternalUser('ml.info');
const cloudId = cloud && cloud.cloudId;
return {
...info,
cloudId,
};
const { body: info } = await asInternalUser.ml.info<MlInfoResponse>();
const cloudId = cloud && cloud.cloudId;
return {
...info,
cloudId,
};
});
},
async mlAnomalySearch<T>(searchParams: SearchParams): Promise<SearchResponse<T>> {
isFullLicense();
// Removed while https://github.com/elastic/kibana/issues/64588 exists.
// SIEM are calling this endpoint with a dummy request object from their alerting
// integration and currently alerting does not supply a request object.
// await hasMlCapabilities(['canAccessML']);
return callAsInternalUser('search', {
...searchParams,
index: ML_RESULTS_INDEX_PATTERN,
});
async mlAnomalySearch<T>(
searchParams: RequestParams.Search<any>
): Promise<SearchResponse<T>> {
return await getGuards(request)
.isFullLicense()
.hasMlCapabilities(['canAccessML'])
.ok(async ({ scopedClient }) => {
const { asInternalUser } = scopedClient;
const { body } = await asInternalUser.search<SearchResponse<T>>({
...searchParams,
index: ML_RESULTS_INDEX_PATTERN,
});
return body;
});
},
};
},

View file

@ -4,7 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { KibanaRequest } from 'kibana/server';
import { IClusterClient, IScopedClusterClient } from 'kibana/server';
// including KibanaRequest from 'kibana/server' causes an error
// when being used with instanceof
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { KibanaRequest } from '../../.././../../src/core/server/http';
import { MlServerLicense } from '../lib/license';
import { SpacesPluginSetup } from '../../../spaces/server';
@ -18,8 +22,9 @@ import {
AnomalyDetectorsProvider,
getAnomalyDetectorsProvider,
} from './providers/anomaly_detectors';
import { ResolveMlCapabilities } from '../../common/types/capabilities';
import { ResolveMlCapabilities, MlCapabilitiesKey } from '../../common/types/capabilities';
import { hasMlCapabilitiesProvider, HasMlCapabilities } from '../lib/capabilities';
import { MLClusterClientUninitialized } from './errors';
export type SharedServices = JobServiceProvider &
AnomalyDetectorsProvider &
@ -27,31 +32,97 @@ export type SharedServices = JobServiceProvider &
ModulesProvider &
ResultsServiceProvider;
export interface SharedServicesChecks {
isFullLicense(): void;
isMinimumLicense(): void;
getHasMlCapabilities(request: KibanaRequest): HasMlCapabilities;
interface Guards {
isMinimumLicense(): Guards;
isFullLicense(): Guards;
hasMlCapabilities: (caps: MlCapabilitiesKey[]) => Guards;
ok(callback: OkCallback): any;
}
export type GetGuards = (request: KibanaRequest) => Guards;
export interface SharedServicesChecks {
getGuards(request: KibanaRequest): Guards;
}
interface OkParams {
scopedClient: IScopedClusterClient;
}
type OkCallback = (okParams: OkParams) => any;
export function createSharedServices(
mlLicense: MlServerLicense,
spaces: SpacesPluginSetup | undefined,
cloud: CloudSetup,
resolveMlCapabilities: ResolveMlCapabilities
resolveMlCapabilities: ResolveMlCapabilities,
getClusterClient: () => IClusterClient | null
): SharedServices {
const getRequestItems = getRequestItemsProvider(resolveMlCapabilities, getClusterClient);
const { isFullLicense, isMinimumLicense } = licenseChecks(mlLicense);
const getHasMlCapabilities = hasMlCapabilitiesProvider(resolveMlCapabilities);
const checks: SharedServicesChecks = {
isFullLicense,
isMinimumLicense,
getHasMlCapabilities,
};
function getGuards(request: KibanaRequest): Guards {
const { hasMlCapabilities, scopedClient } = getRequestItems(request);
const asyncGuards: Array<Promise<void>> = [];
const guards: Guards = {
isMinimumLicense: () => {
isMinimumLicense();
return guards;
},
isFullLicense: () => {
isFullLicense();
return guards;
},
hasMlCapabilities: (caps: MlCapabilitiesKey[]) => {
asyncGuards.push(hasMlCapabilities(caps));
return guards;
},
async ok(callback: OkCallback) {
await Promise.all(asyncGuards);
return callback({ scopedClient });
},
};
return guards;
}
return {
...getJobServiceProvider(checks),
...getAnomalyDetectorsProvider(checks),
...getModulesProvider(checks),
...getResultsServiceProvider(checks),
...getMlSystemProvider(checks, mlLicense, spaces, cloud, resolveMlCapabilities),
...getJobServiceProvider(getGuards),
...getAnomalyDetectorsProvider(getGuards),
...getModulesProvider(getGuards),
...getResultsServiceProvider(getGuards),
...getMlSystemProvider(getGuards, mlLicense, spaces, cloud, resolveMlCapabilities),
};
}
function getRequestItemsProvider(
resolveMlCapabilities: ResolveMlCapabilities,
getClusterClient: () => IClusterClient | null
) {
return (request: KibanaRequest) => {
const getHasMlCapabilities = hasMlCapabilitiesProvider(resolveMlCapabilities);
let hasMlCapabilities: HasMlCapabilities;
let scopedClient: IScopedClusterClient;
// While https://github.com/elastic/kibana/issues/64588 exists we
// will not receive a real request object when being called from an alert.
// instead a dummy request object will be supplied
const clusterClient = getClusterClient();
if (clusterClient === null) {
throw new MLClusterClientUninitialized(`ML's cluster client has not been initialized`);
}
if (request instanceof KibanaRequest) {
hasMlCapabilities = getHasMlCapabilities(request);
scopedClient = clusterClient.asScoped(request);
} else {
hasMlCapabilities = () => Promise.resolve();
const { asInternalUser } = clusterClient;
scopedClient = {
asInternalUser,
asCurrentUser: asInternalUser,
};
}
return { hasMlCapabilities, scopedClient };
};
}

View file

@ -6,13 +6,12 @@
import dateMath from '@elastic/datemath';
import { ILegacyScopedClusterClient, KibanaRequest } from '../../../../../../../src/core/server';
import { KibanaRequest } from '../../../../../../../src/core/server';
import { MlPluginSetup } from '../../../../../ml/server';
import { getAnomalies } from '../../machine_learning';
export const findMlSignals = async ({
ml,
clusterClient,
request,
jobId,
anomalyThreshold,
@ -20,14 +19,13 @@ export const findMlSignals = async ({
to,
}: {
ml: MlPluginSetup;
clusterClient: ILegacyScopedClusterClient;
request: KibanaRequest;
jobId: string;
anomalyThreshold: number;
from: string;
to: string;
}) => {
const { mlAnomalySearch } = ml.mlSystemProvider(clusterClient, request);
const { mlAnomalySearch } = ml.mlSystemProvider(request);
const params = {
jobIds: [jobId],
threshold: anomalyThreshold,

View file

@ -185,12 +185,12 @@ export const signalRulesAlertType = ({
);
}
const scopedClusterClient = services.getLegacyScopedClusterClient(ml.mlClient);
// Using fake KibanaRequest as it is needed to satisfy the ML Services API, but can be empty as it is
// currently unused by the jobsSummary function.
const summaryJobs = await (
await ml.jobServiceProvider(scopedClusterClient, ({} as unknown) as KibanaRequest)
).jobsSummary([machineLearningJobId]);
const fakeRequest = {} as KibanaRequest;
const summaryJobs = await ml
.jobServiceProvider(fakeRequest)
.jobsSummary([machineLearningJobId]);
const jobSummary = summaryJobs.find((job) => job.id === machineLearningJobId);
if (jobSummary == null || !isJobStarted(jobSummary.jobState, jobSummary.datafeedState)) {
@ -207,7 +207,6 @@ export const signalRulesAlertType = ({
const anomalyResults = await findMlSignals({
ml,
clusterClient: scopedClusterClient,
// Using fake KibanaRequest as it is needed to satisfy the ML Services API, but can be empty as it is
// currently unused by the mlAnomalySearch function.
request: ({} as unknown) as KibanaRequest,

View file

@ -114,7 +114,6 @@ export const isMlAdmin = async ({
request: KibanaRequest;
ml: MlPluginSetup;
}): Promise<boolean> => {
const scopedMlClient = ml.mlClient.asScoped(request);
const mlCapabilities = await ml.mlSystemProvider(scopedMlClient, request).mlCapabilities();
const mlCapabilities = await ml.mlSystemProvider(request).mlCapabilities();
return hasMlAdminPermissions(mlCapabilities);
};

View file

@ -4,13 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { SearchResponse, SearchParams } from 'elasticsearch';
import { SearchResponse } from 'elasticsearch';
import { RequestParams } from '@elastic/elasticsearch';
import { AnomalyRecordDoc as Anomaly } from '../../../../ml/server';
export { Anomaly };
export type AnomalyResults = SearchResponse<Anomaly>;
type MlAnomalySearch = <T>(searchParams: SearchParams) => Promise<SearchResponse<T>>;
type MlAnomalySearch = <T>(searchParams: RequestParams.Search) => Promise<SearchResponse<T>>;
export interface AnomaliesSearchParams {
jobIds: string[];

View file

@ -172,18 +172,12 @@ export const getMlJobsUsage = async (ml: MlPluginSetup | undefined): Promise<MlJ
if (ml) {
try {
const fakeRequest = { headers: {}, params: 'DummyKibanaRequest' } as KibanaRequest;
const fakeRequest = {} as KibanaRequest;
const fakeSOClient = {} as SavedObjectsClient;
const internalMlClient = {
callAsCurrentUser: ml?.mlClient.callAsInternalUser,
callAsInternalUser: ml?.mlClient.callAsInternalUser,
};
const modules = await ml
.modulesProvider(internalMlClient, fakeRequest, fakeSOClient)
.listModules();
const modules = await ml.modulesProvider(fakeRequest, fakeSOClient).listModules();
const moduleJobs = modules.flatMap((module) => module.jobs);
const jobs = await ml.jobServiceProvider(internalMlClient, fakeRequest).jobsSummary();
const jobs = await ml.jobServiceProvider(fakeRequest).jobsSummary();
jobsUsage = jobs.filter(isSecurityJob).reduce((usage, job) => {
const isElastic = moduleJobs.some((moduleJob) => moduleJob.id === job.id);

View file

@ -34,7 +34,7 @@ export function registerFieldHistogramsRoutes({ router, license }: RouteDependen
try {
const resp = await getHistogramsForFields(
ctx.transform!.dataClient,
ctx.core.elasticsearch.client,
indexPatternTitle,
query,
fields,

View file

@ -4,9 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { KibanaRequest } from 'kibana/server';
import moment from 'moment';
import { schema } from '@kbn/config-schema';
import { ILegacyScopedClusterClient } from 'kibana/server';
import { updateState } from './common';
import { ACTION_GROUP_DEFINITIONS } from '../../../common/constants/alerts';
import { commonStateTranslations, durationAnomalyTranslations } from './translations';
@ -36,13 +36,11 @@ export const getAnomalySummary = (anomaly: AnomaliesTableRecord, monitorInfo: Pi
const getAnomalies = async (
plugins: UptimeCorePlugins,
mlClusterClient: ILegacyScopedClusterClient,
params: Record<any, any>,
lastCheckedAt: string
) => {
const { getAnomaliesTableData } = plugins.ml.resultsServiceProvider(mlClusterClient, {
params: 'DummyKibanaRequest',
} as any);
const fakeRequest = {} as KibanaRequest;
const { getAnomaliesTableData } = plugins.ml.resultsServiceProvider(fakeRequest);
return await getAnomaliesTableData(
[getMLJobId(params.monitorId)],
@ -82,23 +80,12 @@ export const durationAnomalyAlertFactory: UptimeAlertTypeFactory = (_server, _li
producer: 'uptime',
async executor(options) {
const {
services: {
alertInstanceFactory,
callCluster,
savedObjectsClient,
getLegacyScopedClusterClient,
},
services: { alertInstanceFactory, callCluster, savedObjectsClient },
state,
params,
} = options;
const { anomalies } =
(await getAnomalies(
plugins,
getLegacyScopedClusterClient(plugins.ml.mlClient),
params,
state.lastCheckedAt
)) ?? {};
const { anomalies } = (await getAnomalies(plugins, params, state.lastCheckedAt)) ?? {};
const foundAnomalies = anomalies?.length > 0;

View file

@ -258,7 +258,7 @@ export default ({ getService }: FtrProviderContext) => {
description: 'Not found',
};
const id = `${jobId}_invalid`;
const message = `[resource_not_found_exception] No known data frame analytics with id [${id}]`;
const message = 'resource_not_found_exception';
const { body } = await supertest
.post(`/api/ml/data_frame/analytics/${id}/_update`)

View file

@ -60,8 +60,7 @@ export default ({ getService }: FtrProviderContext) => {
responseBody: {
statusCode: 404,
error: 'Not Found',
message:
'[index_not_found_exception] no such index [ft_farequote_not_exists], with { resource.type="index_or_alias" & resource.id="ft_farequote_not_exists" & index_uuid="_na_" & index="ft_farequote_not_exists" }',
message: 'index_not_found_exception',
},
},
};

View file

@ -152,8 +152,7 @@ export default ({ getService }: FtrProviderContext) => {
responseBody: {
statusCode: 404,
error: 'Not Found',
message:
'[index_not_found_exception] no such index [ft_farequote_not_exists], with { resource.type="index_or_alias" & resource.id="ft_farequote_not_exists" & index_uuid="_na_" & index="ft_farequote_not_exists" }',
message: 'index_not_found_exception',
},
},
};

View file

@ -116,8 +116,7 @@ export default ({ getService }: FtrProviderContext) => {
responseBody: {
statusCode: 404,
error: 'Not Found',
message:
'[index_not_found_exception] no such index [ft_farequote_not_exist], with { resource.type="index_or_alias" & resource.id="ft_farequote_not_exist" & index_uuid="_na_" & index="ft_farequote_not_exist" }',
message: 'index_not_found_exception',
},
},
},

Some files were not shown because too many files have changed in this diff Show more