mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[ML] Versioning file upload APIs (#158265)
Adds versioning to all of the file upload APIs. Versions are added to the server side routes and to the client side functions which call the routes. Updates API tests to add the API version to the request headers. All of the APIs are internal and have been given the version '1'. Also renames `/internal/file_data_visualizer/analyze_file` to `/internal/file_upload/analyze_file` It appears this was a mistake from when the route was moved from the data visualiser plugin midway through development on [this PR](https://github.com/elastic/kibana/pull/96408). **Internal APIs** `/internal/file_upload/analyze_file` `/internal/file_upload/has_import_permission` `/internal/file_upload/index_exists` `/internal/file_upload/time_field_range` --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
b388d704c2
commit
e77b45f9f9
6 changed files with 173 additions and 124 deletions
|
@ -54,8 +54,9 @@ export async function analyzeFile(
|
|||
const { getHttp } = await lazyLoadModules();
|
||||
const body = JSON.stringify(file);
|
||||
return await getHttp().fetch<FindFileStructureResponse>({
|
||||
path: `/internal/file_data_visualizer/analyze_file`,
|
||||
path: `/internal/file_upload/analyze_file`,
|
||||
method: 'POST',
|
||||
version: '1',
|
||||
body,
|
||||
query: params,
|
||||
});
|
||||
|
@ -67,6 +68,7 @@ export async function hasImportPermission(params: HasImportPermissionParams): Pr
|
|||
const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({
|
||||
path: `/internal/file_upload/has_import_permission`,
|
||||
method: 'GET',
|
||||
version: '1',
|
||||
query: { ...params },
|
||||
});
|
||||
return resp.hasImportPermission;
|
||||
|
@ -85,6 +87,7 @@ export async function checkIndexExists(
|
|||
const { exists } = await fileUploadModules.getHttp().fetch<{ exists: boolean }>({
|
||||
path: `/internal/file_upload/index_exists`,
|
||||
method: 'POST',
|
||||
version: '1',
|
||||
body,
|
||||
query: params,
|
||||
});
|
||||
|
@ -101,6 +104,7 @@ export async function getTimeFieldRange(index: string, query: unknown, timeField
|
|||
return await fileUploadModules.getHttp().fetch<GetTimeFieldRangeResponse>({
|
||||
path: `/internal/file_upload/time_field_range`,
|
||||
method: 'POST',
|
||||
version: '1',
|
||||
body,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -298,6 +298,7 @@ export function callImportRoute({
|
|||
return getHttp().fetch<ImportResponse>({
|
||||
path: `/internal/file_upload/import`,
|
||||
method: 'POST',
|
||||
version: '1',
|
||||
query,
|
||||
body,
|
||||
});
|
||||
|
|
|
@ -44,37 +44,44 @@ function importData(
|
|||
export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) {
|
||||
const router = coreSetup.http.createRouter();
|
||||
|
||||
router.get(
|
||||
{
|
||||
router.versioned
|
||||
.get({
|
||||
path: '/internal/file_upload/has_import_permission',
|
||||
validate: {
|
||||
query: schema.object({
|
||||
indexName: schema.maybe(schema.string()),
|
||||
checkCreateDataView: schema.boolean(),
|
||||
checkHasManagePipeline: schema.boolean(),
|
||||
}),
|
||||
access: 'internal',
|
||||
})
|
||||
.addVersion(
|
||||
{
|
||||
version: '1',
|
||||
validate: {
|
||||
request: {
|
||||
query: schema.object({
|
||||
indexName: schema.maybe(schema.string()),
|
||||
checkCreateDataView: schema.boolean(),
|
||||
checkHasManagePipeline: schema.boolean(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const [, pluginsStart] = await coreSetup.getStartServices();
|
||||
const { indexName, checkCreateDataView, checkHasManagePipeline } = request.query;
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const [, pluginsStart] = await coreSetup.getStartServices();
|
||||
const { indexName, checkCreateDataView, checkHasManagePipeline } = request.query;
|
||||
|
||||
const { hasImportPermission } = await checkFileUploadPrivileges({
|
||||
authorization: pluginsStart.security?.authz,
|
||||
request,
|
||||
indexName,
|
||||
checkCreateDataView,
|
||||
checkHasManagePipeline,
|
||||
});
|
||||
const { hasImportPermission } = await checkFileUploadPrivileges({
|
||||
authorization: pluginsStart.security?.authz,
|
||||
request,
|
||||
indexName,
|
||||
checkCreateDataView,
|
||||
checkHasManagePipeline,
|
||||
});
|
||||
|
||||
return response.ok({ body: { hasImportPermission } });
|
||||
} catch (e) {
|
||||
logger.warn(`Unable to check import permission, error: ${e.message}`);
|
||||
return response.ok({ body: { hasImportPermission: false } });
|
||||
return response.ok({ body: { hasImportPermission } });
|
||||
} catch (e) {
|
||||
logger.warn(`Unable to check import permission, error: ${e.message}`);
|
||||
return response.ok({ body: { hasImportPermission: false } });
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup FileDataVisualizer
|
||||
|
@ -85,13 +92,10 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
|||
*
|
||||
* @apiSchema (query) analyzeFileQuerySchema
|
||||
*/
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/file_data_visualizer/analyze_file',
|
||||
validate: {
|
||||
body: schema.any(),
|
||||
query: analyzeFileQuerySchema,
|
||||
},
|
||||
router.versioned
|
||||
.post({
|
||||
path: '/internal/file_upload/analyze_file',
|
||||
access: 'internal',
|
||||
options: {
|
||||
body: {
|
||||
accepts: ['text/*', 'application/json'],
|
||||
|
@ -99,17 +103,27 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
|||
},
|
||||
tags: ['access:fileUpload:analyzeFile'],
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const result = await analyzeFile(esClient, request.body, request.query);
|
||||
return response.ok({ body: result });
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
})
|
||||
.addVersion(
|
||||
{
|
||||
version: '1',
|
||||
validate: {
|
||||
request: {
|
||||
body: schema.any(),
|
||||
query: analyzeFileQuerySchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const result = await analyzeFile(esClient, request.body, request.query);
|
||||
return response.ok({ body: result });
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup FileDataVisualizer
|
||||
|
@ -121,49 +135,56 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
|||
* @apiSchema (query) importFileQuerySchema
|
||||
* @apiSchema (body) importFileBodySchema
|
||||
*/
|
||||
router.post(
|
||||
{
|
||||
router.versioned
|
||||
.post({
|
||||
path: '/internal/file_upload/import',
|
||||
validate: {
|
||||
query: importFileQuerySchema,
|
||||
body: importFileBodySchema,
|
||||
},
|
||||
access: 'internal',
|
||||
options: {
|
||||
body: {
|
||||
accepts: ['application/json'],
|
||||
maxBytes: MAX_FILE_SIZE_BYTES,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const { id } = request.query;
|
||||
const { index, data, settings, mappings, ingestPipeline } = request.body;
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
})
|
||||
.addVersion(
|
||||
{
|
||||
version: '1',
|
||||
validate: {
|
||||
request: {
|
||||
query: importFileQuerySchema,
|
||||
body: importFileBodySchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const { id } = request.query;
|
||||
const { index, data, settings, mappings, ingestPipeline } = request.body;
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
|
||||
// `id` being `undefined` tells us that this is a new import due to create a new index.
|
||||
// follow-up import calls to just add additional data will include the `id` of the created
|
||||
// index, we'll ignore those and don't increment the counter.
|
||||
if (id === undefined) {
|
||||
await updateTelemetry();
|
||||
// `id` being `undefined` tells us that this is a new import due to create a new index.
|
||||
// follow-up import calls to just add additional data will include the `id` of the created
|
||||
// index, we'll ignore those and don't increment the counter.
|
||||
if (id === undefined) {
|
||||
await updateTelemetry();
|
||||
}
|
||||
|
||||
const result = await importData(
|
||||
esClient,
|
||||
id,
|
||||
index,
|
||||
settings,
|
||||
mappings,
|
||||
// @ts-expect-error
|
||||
ingestPipeline,
|
||||
data
|
||||
);
|
||||
return response.ok({ body: result });
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
|
||||
const result = await importData(
|
||||
esClient,
|
||||
id,
|
||||
index,
|
||||
settings,
|
||||
mappings,
|
||||
// @ts-expect-error
|
||||
ingestPipeline,
|
||||
data
|
||||
);
|
||||
return response.ok({ body: result });
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup FileDataVisualizer
|
||||
|
@ -171,23 +192,30 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
|||
* @api {post} /internal/file_upload/index_exists ES indices exists wrapper checks if index exists
|
||||
* @apiName IndexExists
|
||||
*/
|
||||
router.post(
|
||||
{
|
||||
router.versioned
|
||||
.post({
|
||||
path: '/internal/file_upload/index_exists',
|
||||
validate: {
|
||||
body: schema.object({ index: schema.string() }),
|
||||
access: 'internal',
|
||||
})
|
||||
.addVersion(
|
||||
{
|
||||
version: '1',
|
||||
validate: {
|
||||
request: {
|
||||
body: schema.object({ index: schema.string() }),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const indexExists = await esClient.asCurrentUser.indices.exists(request.body);
|
||||
return response.ok({ body: { exists: indexExists } });
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const indexExists = await esClient.asCurrentUser.indices.exists(request.body);
|
||||
return response.ok({ body: { exists: indexExists } });
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
/**
|
||||
* @apiGroup FileDataVisualizer
|
||||
|
@ -201,42 +229,49 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
|||
* @apiSuccess {Object} start start of time range with epoch and string properties.
|
||||
* @apiSuccess {Object} end end of time range with epoch and string properties.
|
||||
*/
|
||||
router.post(
|
||||
{
|
||||
router.versioned
|
||||
.post({
|
||||
path: '/internal/file_upload/time_field_range',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
/** Index or indexes for which to return the time range. */
|
||||
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
|
||||
/** Name of the time field in the index. */
|
||||
timeFieldName: schema.string(),
|
||||
/** Query to match documents in the index(es). */
|
||||
query: schema.maybe(schema.any()),
|
||||
runtimeMappings: schema.maybe(runtimeMappingsSchema),
|
||||
}),
|
||||
},
|
||||
access: 'internal',
|
||||
options: {
|
||||
tags: ['access:fileUpload:analyzeFile'],
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const { index, timeFieldName, query, runtimeMappings } = request.body;
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const resp = await getTimeFieldRange(
|
||||
esClient,
|
||||
index,
|
||||
timeFieldName,
|
||||
query,
|
||||
runtimeMappings
|
||||
);
|
||||
})
|
||||
.addVersion(
|
||||
{
|
||||
version: '1',
|
||||
validate: {
|
||||
request: {
|
||||
body: schema.object({
|
||||
/** Index or indexes for which to return the time range. */
|
||||
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
|
||||
/** Name of the time field in the index. */
|
||||
timeFieldName: schema.string(),
|
||||
/** Query to match documents in the index(es). */
|
||||
query: schema.maybe(schema.any()),
|
||||
runtimeMappings: schema.maybe(runtimeMappingsSchema),
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
async (context, request, response) => {
|
||||
try {
|
||||
const { index, timeFieldName, query, runtimeMappings } = request.body;
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
const resp = await getTimeFieldRange(
|
||||
esClient,
|
||||
index,
|
||||
timeFieldName,
|
||||
query,
|
||||
runtimeMappings
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
body: resp,
|
||||
});
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
return response.ok({
|
||||
body: resp,
|
||||
});
|
||||
} catch (e) {
|
||||
return response.customError(wrapError(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ELASTIC_HTTP_VERSION_HEADER } from '@kbn/core-http-common';
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
|
@ -54,6 +55,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(true);
|
||||
|
@ -80,6 +82,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||
.send()
|
||||
.expect(200);
|
||||
|
||||
|
@ -107,6 +110,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(false);
|
||||
|
@ -134,6 +138,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
)
|
||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body.hasImportPermission).to.be(false);
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ELASTIC_HTTP_VERSION_HEADER } from '@kbn/core-http-common';
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
|
@ -25,6 +26,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
const resp = await supertest
|
||||
.post(`/internal/file_upload/index_exists`)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||
.send({
|
||||
index: 'logstash-2015.09.22',
|
||||
})
|
||||
|
@ -37,6 +39,7 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
const resp = await supertest
|
||||
.post(`/internal/file_upload/index_exists`)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||
.send({
|
||||
index: 'myNewIndex',
|
||||
})
|
||||
|
|
|
@ -123,6 +123,7 @@
|
|||
"@kbn/infra-forge",
|
||||
"@kbn/observability-shared-plugin",
|
||||
"@kbn/maps-vector-tile-utils",
|
||||
"@kbn/server-route-repository"
|
||||
"@kbn/server-route-repository",
|
||||
"@kbn/core-http-common"
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue