[ML] Versioning file upload APIs (#158265)

Adds versioning to all of the file upload APIs.
Versions are added to the server side routes and to the client side
functions which call the routes.
Updates API tests to add the API version to the request headers.

All of the APIs are internal and have been given the version '1'.

Also renames `/internal/file_data_visualizer/analyze_file` to
`/internal/file_upload/analyze_file`
It appears this was a mistake from when the route was moved from the
data visualiser plugin midway through development on [this
PR](https://github.com/elastic/kibana/pull/96408).

**Internal APIs**

`/internal/file_upload/analyze_file`
`/internal/file_upload/has_import_permission`
`/internal/file_upload/index_exists`
`/internal/file_upload/time_field_range`

---------

Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
James Gowdy 2023-05-24 18:17:57 +01:00 committed by GitHub
parent b388d704c2
commit e77b45f9f9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 173 additions and 124 deletions

View file

@ -54,8 +54,9 @@ export async function analyzeFile(
const { getHttp } = await lazyLoadModules(); const { getHttp } = await lazyLoadModules();
const body = JSON.stringify(file); const body = JSON.stringify(file);
return await getHttp().fetch<FindFileStructureResponse>({ return await getHttp().fetch<FindFileStructureResponse>({
path: `/internal/file_data_visualizer/analyze_file`, path: `/internal/file_upload/analyze_file`,
method: 'POST', method: 'POST',
version: '1',
body, body,
query: params, query: params,
}); });
@ -67,6 +68,7 @@ export async function hasImportPermission(params: HasImportPermissionParams): Pr
const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({ const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({
path: `/internal/file_upload/has_import_permission`, path: `/internal/file_upload/has_import_permission`,
method: 'GET', method: 'GET',
version: '1',
query: { ...params }, query: { ...params },
}); });
return resp.hasImportPermission; return resp.hasImportPermission;
@ -85,6 +87,7 @@ export async function checkIndexExists(
const { exists } = await fileUploadModules.getHttp().fetch<{ exists: boolean }>({ const { exists } = await fileUploadModules.getHttp().fetch<{ exists: boolean }>({
path: `/internal/file_upload/index_exists`, path: `/internal/file_upload/index_exists`,
method: 'POST', method: 'POST',
version: '1',
body, body,
query: params, query: params,
}); });
@ -101,6 +104,7 @@ export async function getTimeFieldRange(index: string, query: unknown, timeField
return await fileUploadModules.getHttp().fetch<GetTimeFieldRangeResponse>({ return await fileUploadModules.getHttp().fetch<GetTimeFieldRangeResponse>({
path: `/internal/file_upload/time_field_range`, path: `/internal/file_upload/time_field_range`,
method: 'POST', method: 'POST',
version: '1',
body, body,
}); });
} }

View file

@ -298,6 +298,7 @@ export function callImportRoute({
return getHttp().fetch<ImportResponse>({ return getHttp().fetch<ImportResponse>({
path: `/internal/file_upload/import`, path: `/internal/file_upload/import`,
method: 'POST', method: 'POST',
version: '1',
query, query,
body, body,
}); });

View file

@ -44,37 +44,44 @@ function importData(
export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) { export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) {
const router = coreSetup.http.createRouter(); const router = coreSetup.http.createRouter();
router.get( router.versioned
{ .get({
path: '/internal/file_upload/has_import_permission', path: '/internal/file_upload/has_import_permission',
validate: { access: 'internal',
query: schema.object({ })
indexName: schema.maybe(schema.string()), .addVersion(
checkCreateDataView: schema.boolean(), {
checkHasManagePipeline: schema.boolean(), version: '1',
}), validate: {
request: {
query: schema.object({
indexName: schema.maybe(schema.string()),
checkCreateDataView: schema.boolean(),
checkHasManagePipeline: schema.boolean(),
}),
},
},
}, },
}, async (context, request, response) => {
async (context, request, response) => { try {
try { const [, pluginsStart] = await coreSetup.getStartServices();
const [, pluginsStart] = await coreSetup.getStartServices(); const { indexName, checkCreateDataView, checkHasManagePipeline } = request.query;
const { indexName, checkCreateDataView, checkHasManagePipeline } = request.query;
const { hasImportPermission } = await checkFileUploadPrivileges({ const { hasImportPermission } = await checkFileUploadPrivileges({
authorization: pluginsStart.security?.authz, authorization: pluginsStart.security?.authz,
request, request,
indexName, indexName,
checkCreateDataView, checkCreateDataView,
checkHasManagePipeline, checkHasManagePipeline,
}); });
return response.ok({ body: { hasImportPermission } }); return response.ok({ body: { hasImportPermission } });
} catch (e) { } catch (e) {
logger.warn(`Unable to check import permission, error: ${e.message}`); logger.warn(`Unable to check import permission, error: ${e.message}`);
return response.ok({ body: { hasImportPermission: false } }); return response.ok({ body: { hasImportPermission: false } });
}
} }
} );
);
/** /**
* @apiGroup FileDataVisualizer * @apiGroup FileDataVisualizer
@ -85,13 +92,10 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
* *
* @apiSchema (query) analyzeFileQuerySchema * @apiSchema (query) analyzeFileQuerySchema
*/ */
router.post( router.versioned
{ .post({
path: '/internal/file_data_visualizer/analyze_file', path: '/internal/file_upload/analyze_file',
validate: { access: 'internal',
body: schema.any(),
query: analyzeFileQuerySchema,
},
options: { options: {
body: { body: {
accepts: ['text/*', 'application/json'], accepts: ['text/*', 'application/json'],
@ -99,17 +103,27 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
}, },
tags: ['access:fileUpload:analyzeFile'], tags: ['access:fileUpload:analyzeFile'],
}, },
}, })
async (context, request, response) => { .addVersion(
try { {
const esClient = (await context.core).elasticsearch.client; version: '1',
const result = await analyzeFile(esClient, request.body, request.query); validate: {
return response.ok({ body: result }); request: {
} catch (e) { body: schema.any(),
return response.customError(wrapError(e)); query: analyzeFileQuerySchema,
},
},
},
async (context, request, response) => {
try {
const esClient = (await context.core).elasticsearch.client;
const result = await analyzeFile(esClient, request.body, request.query);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
}
} }
} );
);
/** /**
* @apiGroup FileDataVisualizer * @apiGroup FileDataVisualizer
@ -121,49 +135,56 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
* @apiSchema (query) importFileQuerySchema * @apiSchema (query) importFileQuerySchema
* @apiSchema (body) importFileBodySchema * @apiSchema (body) importFileBodySchema
*/ */
router.post( router.versioned
{ .post({
path: '/internal/file_upload/import', path: '/internal/file_upload/import',
validate: { access: 'internal',
query: importFileQuerySchema,
body: importFileBodySchema,
},
options: { options: {
body: { body: {
accepts: ['application/json'], accepts: ['application/json'],
maxBytes: MAX_FILE_SIZE_BYTES, maxBytes: MAX_FILE_SIZE_BYTES,
}, },
}, },
}, })
async (context, request, response) => { .addVersion(
try { {
const { id } = request.query; version: '1',
const { index, data, settings, mappings, ingestPipeline } = request.body; validate: {
const esClient = (await context.core).elasticsearch.client; request: {
query: importFileQuerySchema,
body: importFileBodySchema,
},
},
},
async (context, request, response) => {
try {
const { id } = request.query;
const { index, data, settings, mappings, ingestPipeline } = request.body;
const esClient = (await context.core).elasticsearch.client;
// `id` being `undefined` tells us that this is a new import due to create a new index. // `id` being `undefined` tells us that this is a new import due to create a new index.
// follow-up import calls to just add additional data will include the `id` of the created // follow-up import calls to just add additional data will include the `id` of the created
// index, we'll ignore those and don't increment the counter. // index, we'll ignore those and don't increment the counter.
if (id === undefined) { if (id === undefined) {
await updateTelemetry(); await updateTelemetry();
}
const result = await importData(
esClient,
id,
index,
settings,
mappings,
// @ts-expect-error
ingestPipeline,
data
);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
} }
const result = await importData(
esClient,
id,
index,
settings,
mappings,
// @ts-expect-error
ingestPipeline,
data
);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
} }
} );
);
/** /**
* @apiGroup FileDataVisualizer * @apiGroup FileDataVisualizer
@ -171,23 +192,30 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
* @api {post} /internal/file_upload/index_exists ES indices exists wrapper checks if index exists * @api {post} /internal/file_upload/index_exists ES indices exists wrapper checks if index exists
* @apiName IndexExists * @apiName IndexExists
*/ */
router.post( router.versioned
{ .post({
path: '/internal/file_upload/index_exists', path: '/internal/file_upload/index_exists',
validate: { access: 'internal',
body: schema.object({ index: schema.string() }), })
.addVersion(
{
version: '1',
validate: {
request: {
body: schema.object({ index: schema.string() }),
},
},
}, },
}, async (context, request, response) => {
async (context, request, response) => { try {
try { const esClient = (await context.core).elasticsearch.client;
const esClient = (await context.core).elasticsearch.client; const indexExists = await esClient.asCurrentUser.indices.exists(request.body);
const indexExists = await esClient.asCurrentUser.indices.exists(request.body); return response.ok({ body: { exists: indexExists } });
return response.ok({ body: { exists: indexExists } }); } catch (e) {
} catch (e) { return response.customError(wrapError(e));
return response.customError(wrapError(e)); }
} }
} );
);
/** /**
* @apiGroup FileDataVisualizer * @apiGroup FileDataVisualizer
@ -201,42 +229,49 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
* @apiSuccess {Object} start start of time range with epoch and string properties. * @apiSuccess {Object} start start of time range with epoch and string properties.
* @apiSuccess {Object} end end of time range with epoch and string properties. * @apiSuccess {Object} end end of time range with epoch and string properties.
*/ */
router.post( router.versioned
{ .post({
path: '/internal/file_upload/time_field_range', path: '/internal/file_upload/time_field_range',
validate: { access: 'internal',
body: schema.object({
/** Index or indexes for which to return the time range. */
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
/** Name of the time field in the index. */
timeFieldName: schema.string(),
/** Query to match documents in the index(es). */
query: schema.maybe(schema.any()),
runtimeMappings: schema.maybe(runtimeMappingsSchema),
}),
},
options: { options: {
tags: ['access:fileUpload:analyzeFile'], tags: ['access:fileUpload:analyzeFile'],
}, },
}, })
async (context, request, response) => { .addVersion(
try { {
const { index, timeFieldName, query, runtimeMappings } = request.body; version: '1',
const esClient = (await context.core).elasticsearch.client; validate: {
const resp = await getTimeFieldRange( request: {
esClient, body: schema.object({
index, /** Index or indexes for which to return the time range. */
timeFieldName, index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
query, /** Name of the time field in the index. */
runtimeMappings timeFieldName: schema.string(),
); /** Query to match documents in the index(es). */
query: schema.maybe(schema.any()),
runtimeMappings: schema.maybe(runtimeMappingsSchema),
}),
},
},
},
async (context, request, response) => {
try {
const { index, timeFieldName, query, runtimeMappings } = request.body;
const esClient = (await context.core).elasticsearch.client;
const resp = await getTimeFieldRange(
esClient,
index,
timeFieldName,
query,
runtimeMappings
);
return response.ok({ return response.ok({
body: resp, body: resp,
}); });
} catch (e) { } catch (e) {
return response.customError(wrapError(e)); return response.customError(wrapError(e));
}
} }
} );
);
} }

View file

@ -5,6 +5,7 @@
* 2.0. * 2.0.
*/ */
import { ELASTIC_HTTP_VERSION_HEADER } from '@kbn/core-http-common';
import expect from '@kbn/expect'; import expect from '@kbn/expect';
import { FtrProviderContext } from '../../ftr_provider_context'; import { FtrProviderContext } from '../../ftr_provider_context';
@ -54,6 +55,7 @@ export default ({ getService }: FtrProviderContext) => {
) )
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD) .auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
.set('kbn-xsrf', 'kibana') .set('kbn-xsrf', 'kibana')
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
.expect(200); .expect(200);
expect(resp.body.hasImportPermission).to.be(true); expect(resp.body.hasImportPermission).to.be(true);
@ -80,6 +82,7 @@ export default ({ getService }: FtrProviderContext) => {
) )
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD) .auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
.set('kbn-xsrf', 'kibana') .set('kbn-xsrf', 'kibana')
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
.send() .send()
.expect(200); .expect(200);
@ -107,6 +110,7 @@ export default ({ getService }: FtrProviderContext) => {
) )
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD) .auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
.set('kbn-xsrf', 'kibana') .set('kbn-xsrf', 'kibana')
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
.expect(200); .expect(200);
expect(resp.body.hasImportPermission).to.be(false); expect(resp.body.hasImportPermission).to.be(false);
@ -134,6 +138,7 @@ export default ({ getService }: FtrProviderContext) => {
) )
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD) .auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
.set('kbn-xsrf', 'kibana') .set('kbn-xsrf', 'kibana')
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
.expect(200); .expect(200);
expect(resp.body.hasImportPermission).to.be(false); expect(resp.body.hasImportPermission).to.be(false);

View file

@ -5,6 +5,7 @@
* 2.0. * 2.0.
*/ */
import { ELASTIC_HTTP_VERSION_HEADER } from '@kbn/core-http-common';
import expect from '@kbn/expect'; import expect from '@kbn/expect';
import { FtrProviderContext } from '../../ftr_provider_context'; import { FtrProviderContext } from '../../ftr_provider_context';
@ -25,6 +26,7 @@ export default ({ getService }: FtrProviderContext) => {
const resp = await supertest const resp = await supertest
.post(`/internal/file_upload/index_exists`) .post(`/internal/file_upload/index_exists`)
.set('kbn-xsrf', 'kibana') .set('kbn-xsrf', 'kibana')
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
.send({ .send({
index: 'logstash-2015.09.22', index: 'logstash-2015.09.22',
}) })
@ -37,6 +39,7 @@ export default ({ getService }: FtrProviderContext) => {
const resp = await supertest const resp = await supertest
.post(`/internal/file_upload/index_exists`) .post(`/internal/file_upload/index_exists`)
.set('kbn-xsrf', 'kibana') .set('kbn-xsrf', 'kibana')
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
.send({ .send({
index: 'myNewIndex', index: 'myNewIndex',
}) })

View file

@ -123,6 +123,7 @@
"@kbn/infra-forge", "@kbn/infra-forge",
"@kbn/observability-shared-plugin", "@kbn/observability-shared-plugin",
"@kbn/maps-vector-tile-utils", "@kbn/maps-vector-tile-utils",
"@kbn/server-route-repository" "@kbn/server-route-repository",
"@kbn/core-http-common"
] ]
} }