mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[7.x] [Maps] remove maps_file_upload plugin and fold public folder into file_upload plugin (#90292) (#90946)
* [Maps] remove maps_file_upload plugin and fold public folder into file_upload plugin (#90292) * get geojson working with api/file_upload/import * remove maps_file_upload server code * remove common folder * remove maps_file_upload plugin * fix tsconfig paths * rename file_upload plugin in maps tsconfig * fix file path * node scripts/build_plugin_list_docs * fix webpack compile errors * telemetry schema cleanup, i18n cleanup, limits cleanup * remove mapsFileUpload from limits.yml * remove index pattern link test case * update telemetry/v2/clusters/_stats for new file_upload path Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> * remove maps_file_upload from tsconfig.refs.json * remove x-pack/tsconfig.refs.json, removed in upstream Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
868b40eb2d
commit
51a14defcc
55 changed files with 36 additions and 763 deletions
|
@ -439,10 +439,6 @@ using the CURL scripts in the scripts folder.
|
|||
|Visualize geo data from Elasticsearch or 3rd party geo-services.
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/x-pack/plugins/maps_file_upload/README.md[mapsFileUpload]
|
||||
|Deprecated - plugin targeted for removal and will get merged into file_upload plugin
|
||||
|
||||
|
||||
|{kib-repo}blob/{branch}/x-pack/plugins/maps_legacy_licensing/README.md[mapsLegacyLicensing]
|
||||
|This plugin provides access to the detailed tile map services from Elastic.
|
||||
|
||||
|
|
|
@ -104,4 +104,4 @@ pageLoadAssetSize:
|
|||
presentationUtil: 28545
|
||||
spacesOss: 18817
|
||||
osquery: 107090
|
||||
mapsFileUpload: 23775
|
||||
fileUpload: 25664
|
||||
|
|
|
@ -83,7 +83,6 @@
|
|||
{ "path": "./x-pack/plugins/lens/tsconfig.json" },
|
||||
{ "path": "./x-pack/plugins/license_management/tsconfig.json" },
|
||||
{ "path": "./x-pack/plugins/licensing/tsconfig.json" },
|
||||
{ "path": "./x-pack/plugins/maps_file_upload/tsconfig.json" },
|
||||
{ "path": "./x-pack/plugins/maps_legacy_licensing/tsconfig.json" },
|
||||
{ "path": "./x-pack/plugins/maps/tsconfig.json" },
|
||||
{ "path": "./x-pack/plugins/ml/tsconfig.json" },
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
"xpack.endpoint": "plugins/endpoint",
|
||||
"xpack.enterpriseSearch": "plugins/enterprise_search",
|
||||
"xpack.features": "plugins/features",
|
||||
"xpack.fileUpload": "plugins/maps_file_upload",
|
||||
"xpack.fileUpload": "plugins/file_upload",
|
||||
"xpack.globalSearch": ["plugins/global_search"],
|
||||
"xpack.globalSearchBar": ["plugins/global_search_bar"],
|
||||
"xpack.graph": ["plugins/graph"],
|
||||
|
|
|
@ -3,6 +3,6 @@
|
|||
"version": "8.0.0",
|
||||
"kibanaVersion": "kibana",
|
||||
"server": true,
|
||||
"ui": false,
|
||||
"requiredPlugins": ["usageCollection"]
|
||||
"ui": true,
|
||||
"requiredPlugins": ["data", "usageCollection"]
|
||||
}
|
||||
|
|
|
@ -118,9 +118,7 @@ export class JsonImportProgress extends Component {
|
|||
<a
|
||||
data-test-subj="indexManagementNewIndexLink"
|
||||
target="_blank"
|
||||
href={`${basePath}/app/kibana#/
|
||||
management/elasticsearch/index_management/indices/
|
||||
filter/${indexName}`.replace(/\s/g, '')}
|
||||
href={`${basePath}/app/management/kibana/indexPatterns`}
|
||||
>
|
||||
{i18n.translate('xpack.fileUpload.jsonImport.indexMgmtLink', {
|
||||
defaultMessage: 'Index Management',
|
|
@ -10,8 +10,8 @@ import { EuiFilePicker, EuiFormRow, EuiProgress } from '@elastic/eui';
|
|||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { parseFile } from '../util/file_parser';
|
||||
import { MAX_FILE_SIZE } from '../../common/constants/file_import';
|
||||
|
||||
const MAX_FILE_SIZE = 52428800;
|
||||
const ACCEPTABLE_FILETYPES = ['json', 'geojson'];
|
||||
const acceptedFileTypeString = ACCEPTABLE_FILETYPES.map((type) => `.${type}`).join(',');
|
||||
const acceptedFileTypeStringMessage = ACCEPTABLE_FILETYPES.map((type) => `.${type}`).join(', ');
|
|
@ -11,5 +11,7 @@ export function plugin() {
|
|||
return new FileUploadPlugin();
|
||||
}
|
||||
|
||||
export * from '../common';
|
||||
|
||||
export { StartContract } from './plugin';
|
||||
export { FileUploadComponentProps } from './get_file_upload_component';
|
|
@ -6,26 +6,12 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { ES_GEO_FIELD_TYPE } from '../../common/constants/file_import';
|
||||
|
||||
const DEFAULT_SETTINGS = {
|
||||
number_of_shards: 1,
|
||||
export const ES_GEO_FIELD_TYPE = {
|
||||
GEO_POINT: 'geo_point',
|
||||
GEO_SHAPE: 'geo_shape',
|
||||
};
|
||||
|
||||
const DEFAULT_GEO_SHAPE_MAPPINGS = {
|
||||
coordinates: {
|
||||
type: ES_GEO_FIELD_TYPE.GEO_SHAPE,
|
||||
},
|
||||
};
|
||||
|
||||
const DEFAULT_GEO_POINT_MAPPINGS = {
|
||||
coordinates: {
|
||||
type: ES_GEO_FIELD_TYPE.GEO_POINT,
|
||||
},
|
||||
};
|
||||
|
||||
const DEFAULT_INGEST_PIPELINE = {};
|
||||
|
||||
export function getGeoIndexTypesForFeatures(featureTypes) {
|
||||
const hasNoFeatureType = !featureTypes || !featureTypes.length;
|
||||
if (hasNoFeatureType) {
|
||||
|
@ -77,11 +63,16 @@ export function geoJsonToEs(parsedGeojson, datatype) {
|
|||
export function getGeoJsonIndexingDetails(parsedGeojson, dataType) {
|
||||
return {
|
||||
data: geoJsonToEs(parsedGeojson, dataType),
|
||||
ingestPipeline: DEFAULT_INGEST_PIPELINE,
|
||||
mappings:
|
||||
dataType === ES_GEO_FIELD_TYPE.GEO_POINT
|
||||
? DEFAULT_GEO_POINT_MAPPINGS
|
||||
: DEFAULT_GEO_SHAPE_MAPPINGS,
|
||||
settings: DEFAULT_SETTINGS,
|
||||
ingestPipeline: {},
|
||||
mappings: {
|
||||
properties: {
|
||||
coordinates: {
|
||||
type: dataType,
|
||||
},
|
||||
},
|
||||
},
|
||||
settings: {
|
||||
number_of_shards: 1,
|
||||
},
|
||||
};
|
||||
}
|
|
@ -5,8 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { geoJsonToEs } from './geo_processing';
|
||||
import { ES_GEO_FIELD_TYPE } from '../../common/constants/file_import';
|
||||
import { ES_GEO_FIELD_TYPE, geoJsonToEs } from './geo_processing';
|
||||
|
||||
describe('geo_processing', () => {
|
||||
describe('getGeoJsonToEs', () => {
|
|
@ -11,8 +11,6 @@ import { getGeoJsonIndexingDetails } from './geo_processing';
|
|||
import { sizeLimitedChunking } from './size_limited_chunking';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
const fileType = 'json';
|
||||
|
||||
export async function indexData(parsedFile, transformDetails, indexName, dataType, appName) {
|
||||
if (!parsedFile) {
|
||||
throw i18n.translate('xpack.fileUpload.indexingService.noFileImported', {
|
||||
|
@ -117,10 +115,10 @@ function transformDataByFormatForIndexing(transform, parsedFile, dataType) {
|
|||
|
||||
async function writeToIndex(indexingDetails) {
|
||||
const query = indexingDetails.id ? { id: indexingDetails.id } : null;
|
||||
const { appName, index, data, settings, mappings, ingestPipeline } = indexingDetails;
|
||||
const { index, data, settings, mappings, ingestPipeline } = indexingDetails;
|
||||
|
||||
return await httpService({
|
||||
url: `/api/maps/fileupload/import`,
|
||||
url: `/api/file_upload/import`,
|
||||
method: 'POST',
|
||||
...(query ? { query } : {}),
|
||||
data: {
|
||||
|
@ -129,8 +127,6 @@ async function writeToIndex(indexingDetails) {
|
|||
settings,
|
||||
mappings,
|
||||
ingestPipeline,
|
||||
fileType,
|
||||
...(appName ? { app: appName } : {}),
|
||||
},
|
||||
});
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { MAX_BYTES } from '../../common/constants/file_import';
|
||||
const MAX_BYTES = 31457280;
|
||||
|
||||
// MAX_BYTES is a good guideline for splitting up posts, but this logic
|
||||
// occasionally sizes chunks so closely to the limit, that the remaining content
|
|
@ -10,6 +10,7 @@
|
|||
"include": ["common/**/*", "public/**/*", "server/**/*"],
|
||||
"references": [
|
||||
{ "path": "../../../src/core/tsconfig.json" },
|
||||
{ "path": "../../../src/plugins/data/tsconfig.json" },
|
||||
{ "path": "../../../src/plugins/usage_collection/tsconfig.json" }
|
||||
]
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
"features",
|
||||
"inspector",
|
||||
"data",
|
||||
"mapsFileUpload",
|
||||
"fileUpload",
|
||||
"uiActions",
|
||||
"navigation",
|
||||
"visualizations",
|
||||
|
|
|
@ -19,7 +19,7 @@ import { GeoJsonFileSource } from '../../sources/geojson_file_source';
|
|||
import { VectorLayer } from '../../layers/vector_layer';
|
||||
import { createDefaultLayerDescriptor } from '../../sources/es_search_source';
|
||||
import { RenderWizardArguments } from '../../layers/layer_wizard_registry';
|
||||
import { FileUploadComponentProps } from '../../../../../maps_file_upload/public';
|
||||
import { FileUploadComponentProps } from '../../../../../file_upload/public';
|
||||
|
||||
export const INDEX_SETUP_STEP_ID = 'INDEX_SETUP_STEP_ID';
|
||||
export const INDEXING_STEP_ID = 'INDEXING_STEP_ID';
|
||||
|
|
|
@ -27,7 +27,7 @@ export const getIndexPatternService = () => pluginsStart.data.indexPatterns;
|
|||
export const getAutocompleteService = () => pluginsStart.data.autocomplete;
|
||||
export const getInspector = () => pluginsStart.inspector;
|
||||
export const getFileUploadComponent = async () => {
|
||||
return await pluginsStart.mapsFileUpload.getFileUploadComponent();
|
||||
return await pluginsStart.fileUpload.getFileUploadComponent();
|
||||
};
|
||||
export const getUiSettings = () => coreStart.uiSettings;
|
||||
export const getIsDarkMode = () => getUiSettings().get('theme:darkMode', false);
|
||||
|
|
|
@ -54,7 +54,7 @@ import { EmbeddableStart } from '../../../../src/plugins/embeddable/public';
|
|||
import { MapsLegacyConfig } from '../../../../src/plugins/maps_legacy/config';
|
||||
import { DataPublicPluginStart } from '../../../../src/plugins/data/public';
|
||||
import { LicensingPluginSetup, LicensingPluginStart } from '../../licensing/public';
|
||||
import { StartContract as FileUploadStartContract } from '../../maps_file_upload/public';
|
||||
import { StartContract as FileUploadStartContract } from '../../file_upload/public';
|
||||
import { SavedObjectsStart } from '../../../../src/plugins/saved_objects/public';
|
||||
import { PresentationUtilPluginStart } from '../../../../src/plugins/presentation_util/public';
|
||||
import {
|
||||
|
@ -80,7 +80,7 @@ export interface MapsPluginStartDependencies {
|
|||
charts: ChartsPluginStart;
|
||||
data: DataPublicPluginStart;
|
||||
embeddable: EmbeddableStart;
|
||||
mapsFileUpload: FileUploadStartContract;
|
||||
fileUpload: FileUploadStartContract;
|
||||
inspector: InspectorStartContract;
|
||||
licensing: LicensingPluginStart;
|
||||
navigation: NavigationPublicPluginStart;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
{ "path": "../../../src/plugins/maps_legacy/tsconfig.json" },
|
||||
{ "path": "../features/tsconfig.json" },
|
||||
{ "path": "../licensing/tsconfig.json" },
|
||||
{ "path": "../maps_file_upload/tsconfig.json" },
|
||||
{ "path": "../file_upload/tsconfig.json" },
|
||||
{ "path": "../saved_objects_tagging/tsconfig.json" },
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
# Maps File upload
|
||||
|
||||
Deprecated - plugin targeted for removal and will get merged into file_upload plugin
|
|
@ -1,21 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const MAX_BYTES = 31457280;
|
||||
|
||||
export const MAX_FILE_SIZE = 52428800;
|
||||
|
||||
// Value to use in the Elasticsearch index mapping metadata to identify the
|
||||
// index as having been created by the File Upload Plugin.
|
||||
export const INDEX_META_DATA_CREATED_BY = 'file-upload-plugin';
|
||||
|
||||
export const ES_GEO_FIELD_TYPE = {
|
||||
GEO_POINT: 'geo_point',
|
||||
GEO_SHAPE: 'geo_shape',
|
||||
};
|
||||
|
||||
export const DEFAULT_KBN_VERSION = 'kbnVersion';
|
|
@ -1,12 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test',
|
||||
rootDir: '../../..',
|
||||
roots: ['<rootDir>/x-pack/plugins/maps_file_upload'],
|
||||
};
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"id": "mapsFileUpload",
|
||||
"version": "8.0.0",
|
||||
"kibanaVersion": "kibana",
|
||||
"server": true,
|
||||
"ui": true,
|
||||
"requiredPlugins": ["data", "usageCollection"]
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const mappings = {
|
||||
'file-upload-telemetry': {
|
||||
properties: {
|
||||
filesUploadedTotalCount: {
|
||||
type: 'long',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
|
@ -1,12 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { boomify } from '@hapi/boom';
|
||||
|
||||
export function wrapError(error) {
|
||||
return boomify(error, { statusCode: error.status });
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { FileUploadPlugin } from './plugin';
|
||||
|
||||
export * from './plugin';
|
||||
|
||||
export const plugin = () => new FileUploadPlugin();
|
|
@ -1,12 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
let internalRepository;
|
||||
export const setInternalRepository = (createInternalRepository) => {
|
||||
internalRepository = createInternalRepository();
|
||||
};
|
||||
export const getInternalRepository = () => internalRepository;
|
|
@ -1,161 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_import';
|
||||
import uuid from 'uuid';
|
||||
|
||||
export function importDataProvider(callWithRequest) {
|
||||
async function importData(id, index, settings, mappings, ingestPipeline, data) {
|
||||
let createdIndex;
|
||||
let createdPipelineId;
|
||||
const docCount = data.length;
|
||||
|
||||
try {
|
||||
const { id: pipelineId, pipeline } = ingestPipeline;
|
||||
|
||||
if (!id) {
|
||||
// first chunk of data, create the index and id to return
|
||||
id = uuid.v1();
|
||||
|
||||
await createIndex(index, settings, mappings);
|
||||
createdIndex = index;
|
||||
|
||||
// create the pipeline if one has been supplied
|
||||
if (pipelineId !== undefined) {
|
||||
const success = await createPipeline(pipelineId, pipeline);
|
||||
if (success.acknowledged !== true) {
|
||||
throw success;
|
||||
}
|
||||
}
|
||||
createdPipelineId = pipelineId;
|
||||
} else {
|
||||
createdIndex = index;
|
||||
createdPipelineId = pipelineId;
|
||||
}
|
||||
|
||||
let failures = [];
|
||||
if (data.length) {
|
||||
const resp = await indexData(index, createdPipelineId, data);
|
||||
if (resp.success === false) {
|
||||
if (resp.ingestError) {
|
||||
// all docs failed, abort
|
||||
throw resp;
|
||||
} else {
|
||||
// some docs failed.
|
||||
// still report success but with a list of failures
|
||||
failures = resp.failures || [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
id,
|
||||
index: createdIndex,
|
||||
pipelineId: createdPipelineId,
|
||||
docCount,
|
||||
failures,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
id,
|
||||
index: createdIndex,
|
||||
pipelineId: createdPipelineId,
|
||||
error: error.error !== undefined ? error.error : error,
|
||||
docCount,
|
||||
ingestError: error.ingestError,
|
||||
failures: error.failures || [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function createIndex(index, settings, mappings) {
|
||||
const body = {
|
||||
mappings: {
|
||||
_meta: {
|
||||
created_by: INDEX_META_DATA_CREATED_BY,
|
||||
},
|
||||
properties: mappings,
|
||||
},
|
||||
};
|
||||
|
||||
if (settings && Object.keys(settings).length) {
|
||||
body.settings = settings;
|
||||
}
|
||||
|
||||
await callWithRequest('indices.create', { index, body });
|
||||
}
|
||||
|
||||
async function indexData(index, pipelineId, data) {
|
||||
try {
|
||||
const body = [];
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
body.push({ index: {} });
|
||||
body.push(data[i]);
|
||||
}
|
||||
|
||||
const settings = { index, body };
|
||||
if (pipelineId !== undefined) {
|
||||
settings.pipeline = pipelineId;
|
||||
}
|
||||
|
||||
const resp = await callWithRequest('bulk', settings);
|
||||
if (resp.errors) {
|
||||
throw resp;
|
||||
} else {
|
||||
return {
|
||||
success: true,
|
||||
docs: data.length,
|
||||
failures: [],
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
let failures = [];
|
||||
let ingestError = false;
|
||||
if (error.errors !== undefined && Array.isArray(error.items)) {
|
||||
// an expected error where some or all of the bulk request
|
||||
// docs have failed to be ingested.
|
||||
failures = getFailures(error.items, data);
|
||||
} else {
|
||||
// some other error has happened.
|
||||
ingestError = true;
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error,
|
||||
docCount: data.length,
|
||||
failures,
|
||||
ingestError,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function createPipeline(id, pipeline) {
|
||||
return await callWithRequest('ingest.putPipeline', { id, body: pipeline });
|
||||
}
|
||||
|
||||
function getFailures(items, data) {
|
||||
const failures = [];
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const item = items[i];
|
||||
if (item.index && item.index.error) {
|
||||
failures.push({
|
||||
item: i,
|
||||
reason: item.index.error.reason,
|
||||
doc: data[i],
|
||||
});
|
||||
}
|
||||
}
|
||||
return failures;
|
||||
}
|
||||
|
||||
return {
|
||||
importData,
|
||||
};
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { importDataProvider } from './import_data';
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { initRoutes } from './routes/file_upload';
|
||||
import { setInternalRepository } from './kibana_server_services';
|
||||
import { registerFileUploadUsageCollector, fileUploadTelemetryMappingsType } from './telemetry';
|
||||
|
||||
export class FileUploadPlugin {
|
||||
constructor() {
|
||||
this.router = null;
|
||||
}
|
||||
|
||||
setup(core, plugins) {
|
||||
core.savedObjects.registerType(fileUploadTelemetryMappingsType);
|
||||
this.router = core.http.createRouter();
|
||||
registerFileUploadUsageCollector(plugins.usageCollection);
|
||||
}
|
||||
|
||||
start(core) {
|
||||
initRoutes(this.router, core.savedObjects.getSavedObjectsRepository);
|
||||
setInternalRepository(core.savedObjects.createInternalRepository);
|
||||
}
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { importDataProvider } from '../models/import_data';
|
||||
import { updateTelemetry } from '../telemetry/telemetry';
|
||||
import { MAX_BYTES } from '../../common/constants/file_import';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
export const IMPORT_ROUTE = '/api/maps/fileupload/import';
|
||||
|
||||
export const querySchema = schema.maybe(
|
||||
schema.object({
|
||||
id: schema.nullable(schema.string()),
|
||||
})
|
||||
);
|
||||
|
||||
export const bodySchema = schema.object(
|
||||
{
|
||||
app: schema.maybe(schema.string()),
|
||||
index: schema.string(),
|
||||
fileType: schema.string(),
|
||||
ingestPipeline: schema.maybe(
|
||||
schema.object(
|
||||
{},
|
||||
{
|
||||
defaultValue: {},
|
||||
unknowns: 'allow',
|
||||
}
|
||||
)
|
||||
),
|
||||
},
|
||||
{ unknowns: 'allow' }
|
||||
);
|
||||
|
||||
const options = {
|
||||
body: {
|
||||
maxBytes: MAX_BYTES,
|
||||
accepts: ['application/json'],
|
||||
},
|
||||
tags: ['access:fileUpload:import'],
|
||||
};
|
||||
|
||||
export const idConditionalValidation = (body, boolHasId) =>
|
||||
schema
|
||||
.object(
|
||||
{
|
||||
data: boolHasId
|
||||
? schema.arrayOf(schema.object({}, { unknowns: 'allow' }), { minSize: 1 })
|
||||
: schema.any(),
|
||||
settings: boolHasId
|
||||
? schema.any()
|
||||
: schema.object(
|
||||
{},
|
||||
{
|
||||
defaultValue: {
|
||||
number_of_shards: 1,
|
||||
},
|
||||
unknowns: 'allow',
|
||||
}
|
||||
),
|
||||
mappings: boolHasId
|
||||
? schema.any()
|
||||
: schema.object(
|
||||
{},
|
||||
{
|
||||
defaultValue: {},
|
||||
unknowns: 'allow',
|
||||
}
|
||||
),
|
||||
},
|
||||
{ unknowns: 'allow' }
|
||||
)
|
||||
.validate(body);
|
||||
|
||||
const finishValidationAndProcessReq = () => {
|
||||
return async (con, req, { ok, badRequest }) => {
|
||||
const {
|
||||
query: { id },
|
||||
body,
|
||||
} = req;
|
||||
const boolHasId = !!id;
|
||||
|
||||
let resp;
|
||||
try {
|
||||
const validIdReqData = idConditionalValidation(body, boolHasId);
|
||||
const callWithRequest = con.core.elasticsearch.legacy.client.callAsCurrentUser;
|
||||
const { importData: importDataFunc } = importDataProvider(callWithRequest);
|
||||
|
||||
const { index, settings, mappings, ingestPipeline, data } = validIdReqData;
|
||||
const processedReq = await importDataFunc(
|
||||
id,
|
||||
index,
|
||||
settings,
|
||||
mappings,
|
||||
ingestPipeline,
|
||||
data
|
||||
);
|
||||
|
||||
if (processedReq.success) {
|
||||
resp = ok({ body: processedReq });
|
||||
// If no id's been established then this is a new index, update telemetry
|
||||
if (!boolHasId) {
|
||||
await updateTelemetry();
|
||||
}
|
||||
} else {
|
||||
resp = badRequest(`Error processing request 1: ${processedReq.error.message}`, ['body']);
|
||||
}
|
||||
} catch (e) {
|
||||
resp = badRequest(`Error processing request 2: : ${e.message}`, ['body']);
|
||||
}
|
||||
return resp;
|
||||
};
|
||||
};
|
||||
|
||||
export const initRoutes = (router) => {
|
||||
router.post(
|
||||
{
|
||||
path: `${IMPORT_ROUTE}{id?}`,
|
||||
validate: {
|
||||
query: querySchema,
|
||||
body: bodySchema,
|
||||
},
|
||||
options,
|
||||
},
|
||||
finishValidationAndProcessReq()
|
||||
);
|
||||
};
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { querySchema, bodySchema, idConditionalValidation } from './file_upload';
|
||||
|
||||
const queryWithId = {
|
||||
id: '123',
|
||||
};
|
||||
|
||||
const bodyWithoutQueryId = {
|
||||
index: 'islandofone',
|
||||
data: [],
|
||||
settings: { number_of_shards: 1 },
|
||||
mappings: { coordinates: { type: 'geo_point' } },
|
||||
ingestPipeline: {},
|
||||
fileType: 'json',
|
||||
app: 'Maps',
|
||||
};
|
||||
|
||||
const bodyWithQueryId = {
|
||||
index: 'islandofone2',
|
||||
data: [{ coordinates: [], name: 'islandofone2' }],
|
||||
settings: {},
|
||||
mappings: {},
|
||||
ingestPipeline: {},
|
||||
fileType: 'json',
|
||||
};
|
||||
|
||||
describe('route validation', () => {
|
||||
it(`validates query with id`, async () => {
|
||||
const validationResult = querySchema.validate(queryWithId);
|
||||
expect(validationResult.id).toBe(queryWithId.id);
|
||||
});
|
||||
|
||||
it(`validates query without id`, async () => {
|
||||
const validationResult = querySchema.validate({});
|
||||
expect(validationResult.id).toBeNull();
|
||||
});
|
||||
|
||||
it(`throws when query contains content other than an id`, async () => {
|
||||
expect(() => querySchema.validate({ notAnId: 123 })).toThrowError(
|
||||
`[notAnId]: definition for this key is missing`
|
||||
);
|
||||
});
|
||||
|
||||
it(`validates body with valid fields`, async () => {
|
||||
const validationResult = bodySchema.validate(bodyWithoutQueryId);
|
||||
expect(validationResult).toEqual(bodyWithoutQueryId);
|
||||
});
|
||||
|
||||
it(`throws if an expected field is missing`, async () => {
|
||||
/* eslint-disable no-unused-vars */
|
||||
const { index, ...bodyWithoutIndexField } = bodyWithoutQueryId;
|
||||
expect(() => bodySchema.validate(bodyWithoutIndexField)).toThrowError(
|
||||
`[index]: expected value of type [string] but got [undefined]`
|
||||
);
|
||||
});
|
||||
|
||||
it(`validates conditional fields when id has been provided in query`, async () => {
|
||||
const validationResult = idConditionalValidation(bodyWithQueryId, true);
|
||||
expect(validationResult).toEqual(bodyWithQueryId);
|
||||
});
|
||||
|
||||
it(`validates conditional fields when no id has been provided in query`, async () => {
|
||||
const validationResultWhenIdPresent = idConditionalValidation(bodyWithoutQueryId, false);
|
||||
expect(validationResultWhenIdPresent).toEqual(bodyWithoutQueryId);
|
||||
// Conditions for no id are more strict since this query sets up the index,
|
||||
// expect it to throw if expected fields aren't present
|
||||
expect(() => idConditionalValidation(bodyWithoutQueryId, true)).toThrowError(
|
||||
`[data]: array size is [0], but cannot be smaller than [1]`
|
||||
);
|
||||
});
|
||||
});
|
|
@ -1,29 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
|
||||
import { getTelemetry, initTelemetry, Telemetry } from './telemetry';
|
||||
|
||||
export function registerFileUploadUsageCollector(usageCollection: UsageCollectionSetup): void {
|
||||
const fileUploadUsageCollector = usageCollection.makeUsageCollector<Telemetry>({
|
||||
type: 'fileUploadTelemetry',
|
||||
isReady: () => true,
|
||||
fetch: async () => {
|
||||
const fileUploadUsage = await getTelemetry();
|
||||
if (!fileUploadUsage) {
|
||||
return initTelemetry();
|
||||
}
|
||||
|
||||
return fileUploadUsage;
|
||||
},
|
||||
schema: {
|
||||
filesUploadedTotalCount: { type: 'long' },
|
||||
},
|
||||
});
|
||||
|
||||
usageCollection.registerCollector(fileUploadUsageCollector);
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { registerFileUploadUsageCollector } from './file_upload_usage_collector';
|
||||
export { fileUploadTelemetryMappingsType } from './mappings';
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { SavedObjectsType } from 'src/core/server';
|
||||
import { TELEMETRY_DOC_ID } from './telemetry';
|
||||
|
||||
export const fileUploadTelemetryMappingsType: SavedObjectsType = {
|
||||
name: TELEMETRY_DOC_ID,
|
||||
hidden: false,
|
||||
namespaceType: 'agnostic',
|
||||
mappings: {
|
||||
properties: {
|
||||
filesUploadedTotalCount: {
|
||||
type: 'long',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getTelemetry, updateTelemetry } from './telemetry';
|
||||
|
||||
const internalRepository = () => ({
|
||||
get: jest.fn(() => null),
|
||||
create: jest.fn(() => ({ attributes: 'test' })),
|
||||
update: jest.fn(() => ({ attributes: 'test' })),
|
||||
});
|
||||
|
||||
function mockInit(getVal: any = { attributes: {} }): any {
|
||||
return {
|
||||
...internalRepository(),
|
||||
get: jest.fn(() => getVal),
|
||||
};
|
||||
}
|
||||
|
||||
describe('file upload plugin telemetry', () => {
|
||||
describe('getTelemetry', () => {
|
||||
it('should get existing telemetry', async () => {
|
||||
const internalRepo = mockInit();
|
||||
await getTelemetry(internalRepo);
|
||||
expect(internalRepo.update.mock.calls.length).toBe(0);
|
||||
expect(internalRepo.get.mock.calls.length).toBe(1);
|
||||
expect(internalRepo.create.mock.calls.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateTelemetry', () => {
|
||||
it('should update existing telemetry', async () => {
|
||||
const internalRepo = mockInit({
|
||||
attributes: {
|
||||
filesUploadedTotalCount: 2,
|
||||
},
|
||||
});
|
||||
|
||||
await updateTelemetry(internalRepo);
|
||||
expect(internalRepo.update.mock.calls.length).toBe(1);
|
||||
expect(internalRepo.get.mock.calls.length).toBe(1);
|
||||
expect(internalRepo.create.mock.calls.length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
// @ts-ignore
|
||||
import { getInternalRepository } from '../kibana_server_services';
|
||||
|
||||
export const TELEMETRY_DOC_ID = 'file-upload-telemetry';
|
||||
|
||||
export interface Telemetry {
|
||||
filesUploadedTotalCount: number;
|
||||
}
|
||||
|
||||
export interface TelemetrySavedObject {
|
||||
attributes: Telemetry;
|
||||
}
|
||||
|
||||
export function initTelemetry(): Telemetry {
|
||||
return {
|
||||
filesUploadedTotalCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getTelemetry(internalRepo?: object): Promise<Telemetry> {
|
||||
const internalRepository = internalRepo || getInternalRepository();
|
||||
let telemetrySavedObject;
|
||||
|
||||
try {
|
||||
telemetrySavedObject = await internalRepository.get(TELEMETRY_DOC_ID, TELEMETRY_DOC_ID);
|
||||
} catch (e) {
|
||||
// Fail silently
|
||||
}
|
||||
|
||||
return telemetrySavedObject ? telemetrySavedObject.attributes : null;
|
||||
}
|
||||
|
||||
export async function updateTelemetry(internalRepo?: any) {
|
||||
const internalRepository = internalRepo || getInternalRepository();
|
||||
let telemetry = await getTelemetry(internalRepository);
|
||||
// Create if doesn't exist
|
||||
if (!telemetry || _.isEmpty(telemetry)) {
|
||||
const newTelemetrySavedObject = await internalRepository.create(
|
||||
TELEMETRY_DOC_ID,
|
||||
initTelemetry(),
|
||||
{ id: TELEMETRY_DOC_ID }
|
||||
);
|
||||
telemetry = newTelemetrySavedObject.attributes;
|
||||
}
|
||||
|
||||
await internalRepository.update(TELEMETRY_DOC_ID, TELEMETRY_DOC_ID, incrementCounts(telemetry));
|
||||
}
|
||||
|
||||
export function incrementCounts({ filesUploadedTotalCount }: { filesUploadedTotalCount: number }) {
|
||||
return {
|
||||
// TODO: get telemetry for app, total file counts, file type
|
||||
filesUploadedTotalCount: filesUploadedTotalCount + 1,
|
||||
};
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"extends": "../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"outDir": "./target/types",
|
||||
"emitDeclarationOnly": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true
|
||||
},
|
||||
"include": ["common/**/*", "public/**/*", "server/**/*", "mappings.ts"],
|
||||
"references": [
|
||||
{ "path": "../../../src/plugins/data/tsconfig.json" },
|
||||
{ "path": "../../../src/plugins/usage_collection/tsconfig.json" }
|
||||
]
|
||||
}
|
|
@ -12,7 +12,7 @@ import { EuiCallOut, EuiSpacer, EuiButtonEmpty, EuiHorizontalRule } from '@elast
|
|||
|
||||
import numeral from '@elastic/numeral';
|
||||
import { ErrorResponse } from '../../../../../../common/types/errors';
|
||||
import { FILE_SIZE_DISPLAY_FORMAT } from '../../../../../../../file_upload/common';
|
||||
import { FILE_SIZE_DISPLAY_FORMAT } from '../../../../../../../file_upload/public';
|
||||
|
||||
interface FileTooLargeProps {
|
||||
fileSize: number;
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
MAX_FILE_SIZE_BYTES,
|
||||
ABSOLUTE_MAX_FILE_SIZE_BYTES,
|
||||
FILE_SIZE_DISPLAY_FORMAT,
|
||||
} from '../../../../../../../file_upload/common';
|
||||
} from '../../../../../../../file_upload/public';
|
||||
import { getUiSettings } from '../../../../util/dependency_cache';
|
||||
import { FILE_DATA_VISUALIZER_MAX_FILE_SIZE } from '../../../../../../common/constants/settings';
|
||||
|
||||
|
|
|
@ -2219,13 +2219,6 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"fileUploadTelemetry": {
|
||||
"properties": {
|
||||
"filesUploadedTotalCount": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"maps": {
|
||||
"properties": {
|
||||
"settings": {
|
||||
|
|
|
@ -83,7 +83,7 @@ export default function ({ getService }) {
|
|||
expect(stats.stack_stats.kibana.plugins.reporting.enabled).to.be(true);
|
||||
expect(stats.stack_stats.kibana.plugins.rollups.index_patterns).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.spaces.available).to.be(true);
|
||||
expect(stats.stack_stats.kibana.plugins.fileUploadTelemetry.filesUploadedTotalCount).to.be.a(
|
||||
expect(stats.stack_stats.kibana.plugins.fileUpload.file_upload.index_creation_count).to.be.a(
|
||||
'number'
|
||||
);
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ import uuid from 'uuid/v4';
|
|||
|
||||
export default function ({ getService, getPageObjects }) {
|
||||
const PageObjects = getPageObjects(['maps', 'common']);
|
||||
const testSubjects = getService('testSubjects');
|
||||
const log = getService('log');
|
||||
const security = getService('security');
|
||||
|
||||
|
@ -99,20 +98,6 @@ export default function ({ getService, getPageObjects }) {
|
|||
expect(newIndexedLayerExists).to.be(false);
|
||||
});
|
||||
|
||||
it('should create a link to new index in management', async () => {
|
||||
const indexName = await indexPoint();
|
||||
|
||||
const layerAddReady = await PageObjects.maps.importLayerReadyForAdd();
|
||||
expect(layerAddReady).to.be(true);
|
||||
|
||||
const newIndexLinkExists = await testSubjects.exists('indexManagementNewIndexLink');
|
||||
expect(newIndexLinkExists).to.be(true);
|
||||
|
||||
const indexLink = await testSubjects.getAttribute('indexManagementNewIndexLink', 'href');
|
||||
const linkDirectsToNewIndex = indexLink.endsWith(indexName);
|
||||
expect(linkDirectsToNewIndex).to.be(true);
|
||||
});
|
||||
|
||||
const GEO_POINT = 'geo_point';
|
||||
const pointGeojsonFiles = ['point.json', 'multi_point.json'];
|
||||
pointGeojsonFiles.forEach(async (pointFile) => {
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
"plugins/licensing/**/*",
|
||||
"plugins/lens/**/*",
|
||||
"plugins/maps/**/*",
|
||||
"plugins/maps_file_upload/**/*",
|
||||
"plugins/maps_legacy_licensing/**/*",
|
||||
"plugins/ml/**/*",
|
||||
"plugins/observability/**/*",
|
||||
|
@ -131,7 +130,6 @@
|
|||
{ "path": "./plugins/lens/tsconfig.json" },
|
||||
{ "path": "./plugins/license_management/tsconfig.json" },
|
||||
{ "path": "./plugins/licensing/tsconfig.json" },
|
||||
{ "path": "./plugins/maps_file_upload/tsconfig.json" },
|
||||
{ "path": "./plugins/maps_legacy_licensing/tsconfig.json" },
|
||||
{ "path": "./plugins/maps/tsconfig.json" },
|
||||
{ "path": "./plugins/ml/tsconfig.json" },
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue