mirror of
https://github.com/elastic/kibana.git
synced 2025-06-28 03:01:21 -04:00
[ML] Versioning file upload APIs (#158265)
Adds versioning to all of the file upload APIs. Versions are added to the server side routes and to the client side functions which call the routes. Updates API tests to add the API version to the request headers. All of the APIs are internal and have been given the version '1'. Also renames `/internal/file_data_visualizer/analyze_file` to `/internal/file_upload/analyze_file` It appears this was a mistake from when the route was moved from the data visualiser plugin midway through development on [this PR](https://github.com/elastic/kibana/pull/96408). **Internal APIs** `/internal/file_upload/analyze_file` `/internal/file_upload/has_import_permission` `/internal/file_upload/index_exists` `/internal/file_upload/time_field_range` --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
b388d704c2
commit
e77b45f9f9
6 changed files with 173 additions and 124 deletions
|
@ -54,8 +54,9 @@ export async function analyzeFile(
|
||||||
const { getHttp } = await lazyLoadModules();
|
const { getHttp } = await lazyLoadModules();
|
||||||
const body = JSON.stringify(file);
|
const body = JSON.stringify(file);
|
||||||
return await getHttp().fetch<FindFileStructureResponse>({
|
return await getHttp().fetch<FindFileStructureResponse>({
|
||||||
path: `/internal/file_data_visualizer/analyze_file`,
|
path: `/internal/file_upload/analyze_file`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
version: '1',
|
||||||
body,
|
body,
|
||||||
query: params,
|
query: params,
|
||||||
});
|
});
|
||||||
|
@ -67,6 +68,7 @@ export async function hasImportPermission(params: HasImportPermissionParams): Pr
|
||||||
const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({
|
const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({
|
||||||
path: `/internal/file_upload/has_import_permission`,
|
path: `/internal/file_upload/has_import_permission`,
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
version: '1',
|
||||||
query: { ...params },
|
query: { ...params },
|
||||||
});
|
});
|
||||||
return resp.hasImportPermission;
|
return resp.hasImportPermission;
|
||||||
|
@ -85,6 +87,7 @@ export async function checkIndexExists(
|
||||||
const { exists } = await fileUploadModules.getHttp().fetch<{ exists: boolean }>({
|
const { exists } = await fileUploadModules.getHttp().fetch<{ exists: boolean }>({
|
||||||
path: `/internal/file_upload/index_exists`,
|
path: `/internal/file_upload/index_exists`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
version: '1',
|
||||||
body,
|
body,
|
||||||
query: params,
|
query: params,
|
||||||
});
|
});
|
||||||
|
@ -101,6 +104,7 @@ export async function getTimeFieldRange(index: string, query: unknown, timeField
|
||||||
return await fileUploadModules.getHttp().fetch<GetTimeFieldRangeResponse>({
|
return await fileUploadModules.getHttp().fetch<GetTimeFieldRangeResponse>({
|
||||||
path: `/internal/file_upload/time_field_range`,
|
path: `/internal/file_upload/time_field_range`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
version: '1',
|
||||||
body,
|
body,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -298,6 +298,7 @@ export function callImportRoute({
|
||||||
return getHttp().fetch<ImportResponse>({
|
return getHttp().fetch<ImportResponse>({
|
||||||
path: `/internal/file_upload/import`,
|
path: `/internal/file_upload/import`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
version: '1',
|
||||||
query,
|
query,
|
||||||
body,
|
body,
|
||||||
});
|
});
|
||||||
|
|
|
@ -44,10 +44,16 @@ function importData(
|
||||||
export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) {
|
export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) {
|
||||||
const router = coreSetup.http.createRouter();
|
const router = coreSetup.http.createRouter();
|
||||||
|
|
||||||
router.get(
|
router.versioned
|
||||||
{
|
.get({
|
||||||
path: '/internal/file_upload/has_import_permission',
|
path: '/internal/file_upload/has_import_permission',
|
||||||
|
access: 'internal',
|
||||||
|
})
|
||||||
|
.addVersion(
|
||||||
|
{
|
||||||
|
version: '1',
|
||||||
validate: {
|
validate: {
|
||||||
|
request: {
|
||||||
query: schema.object({
|
query: schema.object({
|
||||||
indexName: schema.maybe(schema.string()),
|
indexName: schema.maybe(schema.string()),
|
||||||
checkCreateDataView: schema.boolean(),
|
checkCreateDataView: schema.boolean(),
|
||||||
|
@ -55,6 +61,7 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
try {
|
try {
|
||||||
const [, pluginsStart] = await coreSetup.getStartServices();
|
const [, pluginsStart] = await coreSetup.getStartServices();
|
||||||
|
@ -85,13 +92,10 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
*
|
*
|
||||||
* @apiSchema (query) analyzeFileQuerySchema
|
* @apiSchema (query) analyzeFileQuerySchema
|
||||||
*/
|
*/
|
||||||
router.post(
|
router.versioned
|
||||||
{
|
.post({
|
||||||
path: '/internal/file_data_visualizer/analyze_file',
|
path: '/internal/file_upload/analyze_file',
|
||||||
validate: {
|
access: 'internal',
|
||||||
body: schema.any(),
|
|
||||||
query: analyzeFileQuerySchema,
|
|
||||||
},
|
|
||||||
options: {
|
options: {
|
||||||
body: {
|
body: {
|
||||||
accepts: ['text/*', 'application/json'],
|
accepts: ['text/*', 'application/json'],
|
||||||
|
@ -99,6 +103,16 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
},
|
},
|
||||||
tags: ['access:fileUpload:analyzeFile'],
|
tags: ['access:fileUpload:analyzeFile'],
|
||||||
},
|
},
|
||||||
|
})
|
||||||
|
.addVersion(
|
||||||
|
{
|
||||||
|
version: '1',
|
||||||
|
validate: {
|
||||||
|
request: {
|
||||||
|
body: schema.any(),
|
||||||
|
query: analyzeFileQuerySchema,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
try {
|
try {
|
||||||
|
@ -121,19 +135,26 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
* @apiSchema (query) importFileQuerySchema
|
* @apiSchema (query) importFileQuerySchema
|
||||||
* @apiSchema (body) importFileBodySchema
|
* @apiSchema (body) importFileBodySchema
|
||||||
*/
|
*/
|
||||||
router.post(
|
router.versioned
|
||||||
{
|
.post({
|
||||||
path: '/internal/file_upload/import',
|
path: '/internal/file_upload/import',
|
||||||
validate: {
|
access: 'internal',
|
||||||
query: importFileQuerySchema,
|
|
||||||
body: importFileBodySchema,
|
|
||||||
},
|
|
||||||
options: {
|
options: {
|
||||||
body: {
|
body: {
|
||||||
accepts: ['application/json'],
|
accepts: ['application/json'],
|
||||||
maxBytes: MAX_FILE_SIZE_BYTES,
|
maxBytes: MAX_FILE_SIZE_BYTES,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
})
|
||||||
|
.addVersion(
|
||||||
|
{
|
||||||
|
version: '1',
|
||||||
|
validate: {
|
||||||
|
request: {
|
||||||
|
query: importFileQuerySchema,
|
||||||
|
body: importFileBodySchema,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
try {
|
try {
|
||||||
|
@ -171,13 +192,20 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
* @api {post} /internal/file_upload/index_exists ES indices exists wrapper checks if index exists
|
* @api {post} /internal/file_upload/index_exists ES indices exists wrapper checks if index exists
|
||||||
* @apiName IndexExists
|
* @apiName IndexExists
|
||||||
*/
|
*/
|
||||||
router.post(
|
router.versioned
|
||||||
{
|
.post({
|
||||||
path: '/internal/file_upload/index_exists',
|
path: '/internal/file_upload/index_exists',
|
||||||
|
access: 'internal',
|
||||||
|
})
|
||||||
|
.addVersion(
|
||||||
|
{
|
||||||
|
version: '1',
|
||||||
validate: {
|
validate: {
|
||||||
|
request: {
|
||||||
body: schema.object({ index: schema.string() }),
|
body: schema.object({ index: schema.string() }),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
try {
|
try {
|
||||||
const esClient = (await context.core).elasticsearch.client;
|
const esClient = (await context.core).elasticsearch.client;
|
||||||
|
@ -201,10 +229,19 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
* @apiSuccess {Object} start start of time range with epoch and string properties.
|
* @apiSuccess {Object} start start of time range with epoch and string properties.
|
||||||
* @apiSuccess {Object} end end of time range with epoch and string properties.
|
* @apiSuccess {Object} end end of time range with epoch and string properties.
|
||||||
*/
|
*/
|
||||||
router.post(
|
router.versioned
|
||||||
{
|
.post({
|
||||||
path: '/internal/file_upload/time_field_range',
|
path: '/internal/file_upload/time_field_range',
|
||||||
|
access: 'internal',
|
||||||
|
options: {
|
||||||
|
tags: ['access:fileUpload:analyzeFile'],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.addVersion(
|
||||||
|
{
|
||||||
|
version: '1',
|
||||||
validate: {
|
validate: {
|
||||||
|
request: {
|
||||||
body: schema.object({
|
body: schema.object({
|
||||||
/** Index or indexes for which to return the time range. */
|
/** Index or indexes for which to return the time range. */
|
||||||
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
|
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
|
||||||
|
@ -215,8 +252,6 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
|
||||||
runtimeMappings: schema.maybe(runtimeMappingsSchema),
|
runtimeMappings: schema.maybe(runtimeMappingsSchema),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
options: {
|
|
||||||
tags: ['access:fileUpload:analyzeFile'],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
* 2.0.
|
* 2.0.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { ELASTIC_HTTP_VERSION_HEADER } from '@kbn/core-http-common';
|
||||||
import expect from '@kbn/expect';
|
import expect from '@kbn/expect';
|
||||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||||
|
|
||||||
|
@ -54,6 +55,7 @@ export default ({ getService }: FtrProviderContext) => {
|
||||||
)
|
)
|
||||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||||
.set('kbn-xsrf', 'kibana')
|
.set('kbn-xsrf', 'kibana')
|
||||||
|
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
expect(resp.body.hasImportPermission).to.be(true);
|
expect(resp.body.hasImportPermission).to.be(true);
|
||||||
|
@ -80,6 +82,7 @@ export default ({ getService }: FtrProviderContext) => {
|
||||||
)
|
)
|
||||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||||
.set('kbn-xsrf', 'kibana')
|
.set('kbn-xsrf', 'kibana')
|
||||||
|
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||||
.send()
|
.send()
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
|
@ -107,6 +110,7 @@ export default ({ getService }: FtrProviderContext) => {
|
||||||
)
|
)
|
||||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||||
.set('kbn-xsrf', 'kibana')
|
.set('kbn-xsrf', 'kibana')
|
||||||
|
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
expect(resp.body.hasImportPermission).to.be(false);
|
expect(resp.body.hasImportPermission).to.be(false);
|
||||||
|
@ -134,6 +138,7 @@ export default ({ getService }: FtrProviderContext) => {
|
||||||
)
|
)
|
||||||
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
.auth(IMPORTER_USER_NAME, IMPORT_USER_PASSWORD)
|
||||||
.set('kbn-xsrf', 'kibana')
|
.set('kbn-xsrf', 'kibana')
|
||||||
|
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||||
.expect(200);
|
.expect(200);
|
||||||
|
|
||||||
expect(resp.body.hasImportPermission).to.be(false);
|
expect(resp.body.hasImportPermission).to.be(false);
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
* 2.0.
|
* 2.0.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { ELASTIC_HTTP_VERSION_HEADER } from '@kbn/core-http-common';
|
||||||
import expect from '@kbn/expect';
|
import expect from '@kbn/expect';
|
||||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||||
|
|
||||||
|
@ -25,6 +26,7 @@ export default ({ getService }: FtrProviderContext) => {
|
||||||
const resp = await supertest
|
const resp = await supertest
|
||||||
.post(`/internal/file_upload/index_exists`)
|
.post(`/internal/file_upload/index_exists`)
|
||||||
.set('kbn-xsrf', 'kibana')
|
.set('kbn-xsrf', 'kibana')
|
||||||
|
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||||
.send({
|
.send({
|
||||||
index: 'logstash-2015.09.22',
|
index: 'logstash-2015.09.22',
|
||||||
})
|
})
|
||||||
|
@ -37,6 +39,7 @@ export default ({ getService }: FtrProviderContext) => {
|
||||||
const resp = await supertest
|
const resp = await supertest
|
||||||
.post(`/internal/file_upload/index_exists`)
|
.post(`/internal/file_upload/index_exists`)
|
||||||
.set('kbn-xsrf', 'kibana')
|
.set('kbn-xsrf', 'kibana')
|
||||||
|
.set(ELASTIC_HTTP_VERSION_HEADER, '1')
|
||||||
.send({
|
.send({
|
||||||
index: 'myNewIndex',
|
index: 'myNewIndex',
|
||||||
})
|
})
|
||||||
|
|
|
@ -123,6 +123,7 @@
|
||||||
"@kbn/infra-forge",
|
"@kbn/infra-forge",
|
||||||
"@kbn/observability-shared-plugin",
|
"@kbn/observability-shared-plugin",
|
||||||
"@kbn/maps-vector-tile-utils",
|
"@kbn/maps-vector-tile-utils",
|
||||||
"@kbn/server-route-repository"
|
"@kbn/server-route-repository",
|
||||||
|
"@kbn/core-http-common"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue