mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
Allow sample data set to install multiple indices (#23230)
* Allow sample data set to install multiple indices * create insertDataIntoIndex function to better encapsulate bulk insert * move everything under bulk insert * add comment in createIndexName
This commit is contained in:
parent
2e5d3ec18d
commit
00922789c0
14 changed files with 538 additions and 397 deletions
|
@ -19,15 +19,14 @@
|
|||
|
||||
import Joi from 'joi';
|
||||
|
||||
export const dataSetSchema = {
|
||||
const dataIndexSchema = Joi.object({
|
||||
id: Joi.string().regex(/^[a-zA-Z0-9-]+$/).required(),
|
||||
name: Joi.string().required(),
|
||||
description: Joi.string().required(),
|
||||
previewImagePath: Joi.string().required(),
|
||||
overviewDashboard: Joi.string().required(), // saved object id of main dashboard for sample data set
|
||||
defaultIndex: Joi.string().required(), // saved object id of default index-pattern for sample data set
|
||||
dataPath: Joi.string().required(), // path to newline delimented JSON file containing data relative to KIBANA_HOME
|
||||
fields: Joi.object().required(), // Object defining Elasticsearch field mappings (contents of index.mappings.type.properties)
|
||||
|
||||
// path to newline delimented JSON file containing data relative to KIBANA_HOME
|
||||
dataPath: Joi.string().required(),
|
||||
|
||||
// Object defining Elasticsearch field mappings (contents of index.mappings.type.properties)
|
||||
fields: Joi.object().required(),
|
||||
|
||||
// times fields that will be updated relative to now when data is installed
|
||||
timeFields: Joi.array().items(Joi.string()).required(),
|
||||
|
@ -43,8 +42,22 @@ export const dataSetSchema = {
|
|||
// Set to true to move timestamp to current week, preserving day of week and time of day
|
||||
// Relative distance from timestamp to currentTimeMarker will not remain the same
|
||||
preserveDayOfWeekTimeOfDay: Joi.boolean().default(false),
|
||||
});
|
||||
|
||||
export const sampleDataSchema = {
|
||||
id: Joi.string().regex(/^[a-zA-Z0-9-]+$/).required(),
|
||||
name: Joi.string().required(),
|
||||
description: Joi.string().required(),
|
||||
previewImagePath: Joi.string().required(),
|
||||
|
||||
// saved object id of main dashboard for sample data set
|
||||
overviewDashboard: Joi.string().required(),
|
||||
|
||||
// saved object id of default index-pattern for sample data set
|
||||
defaultIndex: Joi.string().required(),
|
||||
|
||||
// Kibana saved objects (index patter, visualizations, dashboard, ...)
|
||||
// Should provide a nice demo of Kibana's functionality with the sample data set
|
||||
savedObjects: Joi.array().items(Joi.object()).required(),
|
||||
dataIndices: Joi.array().items(dataIndexSchema).required(),
|
||||
};
|
||||
|
|
105
src/server/sample_data/data_sets/flights/field_mappings.js
Normal file
105
src/server/sample_data/data_sets/flights/field_mappings.js
Normal file
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint max-len: 0 */
|
||||
/* eslint quotes: 0 */
|
||||
|
||||
export const fieldMappings = {
|
||||
timestamp: {
|
||||
type: 'date'
|
||||
},
|
||||
dayOfWeek: {
|
||||
type: 'integer'
|
||||
},
|
||||
Carrier: {
|
||||
type: 'keyword'
|
||||
},
|
||||
FlightNum: {
|
||||
type: 'keyword'
|
||||
},
|
||||
Origin: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginAirportID: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginCityName: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginRegion: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginCountry: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginLocation: {
|
||||
type: 'geo_point'
|
||||
},
|
||||
Dest: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestAirportID: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestCityName: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestRegion: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestCountry: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestLocation: {
|
||||
type: 'geo_point'
|
||||
},
|
||||
AvgTicketPrice: {
|
||||
type: 'float'
|
||||
},
|
||||
OriginWeather: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestWeather: {
|
||||
type: 'keyword'
|
||||
},
|
||||
Cancelled: {
|
||||
type: 'boolean'
|
||||
},
|
||||
DistanceMiles: {
|
||||
type: 'float'
|
||||
},
|
||||
DistanceKilometers: {
|
||||
type: 'float'
|
||||
},
|
||||
FlightDelayMin: {
|
||||
type: 'integer'
|
||||
},
|
||||
FlightDelay: {
|
||||
type: 'boolean'
|
||||
},
|
||||
FlightDelayType: {
|
||||
type: 'keyword'
|
||||
},
|
||||
FlightTimeMin: {
|
||||
type: 'float'
|
||||
},
|
||||
FlightTimeHour: {
|
||||
type: 'keyword'
|
||||
}
|
||||
};
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
import path from 'path';
|
||||
import { savedObjects } from './saved_objects';
|
||||
import { fieldMappings } from './field_mappings';
|
||||
|
||||
export function flightsSpecProvider() {
|
||||
return {
|
||||
|
@ -28,93 +29,16 @@ export function flightsSpecProvider() {
|
|||
previewImagePath: '/plugins/kibana/home/sample_data_resources/flights/dashboard.png',
|
||||
overviewDashboard: '7adfa750-4c81-11e8-b3d7-01146121b73d',
|
||||
defaultIndex: 'd3d7af60-4c81-11e8-b3d7-01146121b73d',
|
||||
dataPath: path.join(__dirname, './flights.json.gz'),
|
||||
fields: {
|
||||
timestamp: {
|
||||
type: 'date'
|
||||
},
|
||||
dayOfWeek: {
|
||||
type: 'integer'
|
||||
},
|
||||
Carrier: {
|
||||
type: 'keyword'
|
||||
},
|
||||
FlightNum: {
|
||||
type: 'keyword'
|
||||
},
|
||||
Origin: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginAirportID: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginCityName: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginRegion: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginCountry: {
|
||||
type: 'keyword'
|
||||
},
|
||||
OriginLocation: {
|
||||
type: 'geo_point'
|
||||
},
|
||||
Dest: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestAirportID: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestCityName: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestRegion: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestCountry: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestLocation: {
|
||||
type: 'geo_point'
|
||||
},
|
||||
AvgTicketPrice: {
|
||||
type: 'float'
|
||||
},
|
||||
OriginWeather: {
|
||||
type: 'keyword'
|
||||
},
|
||||
DestWeather: {
|
||||
type: 'keyword'
|
||||
},
|
||||
Cancelled: {
|
||||
type: 'boolean'
|
||||
},
|
||||
DistanceMiles: {
|
||||
type: 'float'
|
||||
},
|
||||
DistanceKilometers: {
|
||||
type: 'float'
|
||||
},
|
||||
FlightDelayMin: {
|
||||
type: 'integer'
|
||||
},
|
||||
FlightDelay: {
|
||||
type: 'boolean'
|
||||
},
|
||||
FlightDelayType: {
|
||||
type: 'keyword'
|
||||
},
|
||||
FlightTimeMin: {
|
||||
type: 'float'
|
||||
},
|
||||
FlightTimeHour: {
|
||||
type: 'keyword'
|
||||
}
|
||||
},
|
||||
timeFields: ['timestamp'],
|
||||
currentTimeMarker: '2018-01-09T00:00:00',
|
||||
preserveDayOfWeekTimeOfDay: true,
|
||||
savedObjects: savedObjects,
|
||||
dataIndices: [
|
||||
{
|
||||
id: 'flights',
|
||||
dataPath: path.join(__dirname, './flights.json.gz'),
|
||||
fields: fieldMappings,
|
||||
timeFields: ['timestamp'],
|
||||
currentTimeMarker: '2018-01-09T00:00:00',
|
||||
preserveDayOfWeekTimeOfDay: true,
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
|
152
src/server/sample_data/data_sets/logs/field_mappings.js
Normal file
152
src/server/sample_data/data_sets/logs/field_mappings.js
Normal file
|
@ -0,0 +1,152 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint max-len: 0 */
|
||||
/* eslint quotes: 0 */
|
||||
|
||||
export const fieldMappings = {
|
||||
request: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
geo: {
|
||||
properties: {
|
||||
srcdest: {
|
||||
type: 'keyword'
|
||||
},
|
||||
src: {
|
||||
type: 'keyword'
|
||||
},
|
||||
dest: {
|
||||
type: 'keyword'
|
||||
},
|
||||
coordinates: {
|
||||
type: 'geo_point'
|
||||
}
|
||||
}
|
||||
},
|
||||
utc_time: {
|
||||
type: 'date'
|
||||
},
|
||||
url: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
message: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
host: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip'
|
||||
},
|
||||
response: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
machine: {
|
||||
properties: {
|
||||
ram: {
|
||||
type: 'long'
|
||||
},
|
||||
os: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
agent: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
bytes: {
|
||||
type: 'long'
|
||||
},
|
||||
tags: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
referer: {
|
||||
type: 'keyword'
|
||||
},
|
||||
ip: {
|
||||
type: 'ip'
|
||||
},
|
||||
timestamp: {
|
||||
type: 'date'
|
||||
},
|
||||
phpmemory: {
|
||||
type: 'long'
|
||||
},
|
||||
memory: {
|
||||
type: 'double'
|
||||
},
|
||||
extension: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
import path from 'path';
|
||||
import { savedObjects } from './saved_objects';
|
||||
import { fieldMappings } from './field_mappings';
|
||||
|
||||
export function logsSpecProvider() {
|
||||
return {
|
||||
|
@ -28,140 +29,16 @@ export function logsSpecProvider() {
|
|||
previewImagePath: '/plugins/kibana/home/sample_data_resources/logs/dashboard.png',
|
||||
overviewDashboard: 'edf84fe0-e1a0-11e7-b6d5-4dc382ef7f5b',
|
||||
defaultIndex: '90943e30-9a47-11e8-b64d-95841ca0b247',
|
||||
dataPath: path.join(__dirname, './logs.json.gz'),
|
||||
fields: {
|
||||
request: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
geo: {
|
||||
properties: {
|
||||
srcdest: {
|
||||
type: 'keyword'
|
||||
},
|
||||
src: {
|
||||
type: 'keyword'
|
||||
},
|
||||
dest: {
|
||||
type: 'keyword'
|
||||
},
|
||||
coordinates: {
|
||||
type: 'geo_point'
|
||||
}
|
||||
}
|
||||
},
|
||||
utc_time: {
|
||||
type: 'date'
|
||||
},
|
||||
url: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
message: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
host: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip'
|
||||
},
|
||||
response: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
machine: {
|
||||
properties: {
|
||||
ram: {
|
||||
type: 'long'
|
||||
},
|
||||
os: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
agent: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
bytes: {
|
||||
type: 'long'
|
||||
},
|
||||
tags: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
},
|
||||
referer: {
|
||||
type: 'keyword'
|
||||
},
|
||||
ip: {
|
||||
type: 'ip'
|
||||
},
|
||||
timestamp: {
|
||||
type: 'date'
|
||||
},
|
||||
phpmemory: {
|
||||
type: 'long'
|
||||
},
|
||||
memory: {
|
||||
type: 'double'
|
||||
},
|
||||
extension: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
timeFields: ['timestamp'],
|
||||
currentTimeMarker: '2018-08-01T00:00:00',
|
||||
preserveDayOfWeekTimeOfDay: true,
|
||||
savedObjects: savedObjects,
|
||||
dataIndices: [
|
||||
{
|
||||
id: 'logs',
|
||||
dataPath: path.join(__dirname, './logs.json.gz'),
|
||||
fields: fieldMappings,
|
||||
timeFields: ['timestamp'],
|
||||
currentTimeMarker: '2018-08-01T00:00:00',
|
||||
preserveDayOfWeekTimeOfDay: true,
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
|
|
@ -27,6 +27,42 @@ import {
|
|||
translateTimeRelativeToWeek
|
||||
} from './lib/translate_timestamp';
|
||||
|
||||
function insertDataIntoIndex(dataIndexConfig, index, nowReference, request, server, callWithRequest) {
|
||||
const bulkInsert = async (docs) => {
|
||||
function updateTimestamps(doc) {
|
||||
dataIndexConfig.timeFields.forEach(timeFieldName => {
|
||||
if (doc[timeFieldName]) {
|
||||
doc[timeFieldName] = dataIndexConfig.preserveDayOfWeekTimeOfDay
|
||||
? translateTimeRelativeToWeek(doc[timeFieldName], dataIndexConfig.currentTimeMarker, nowReference)
|
||||
: translateTimeRelativeToDifference(doc[timeFieldName], dataIndexConfig.currentTimeMarker, nowReference);
|
||||
}
|
||||
});
|
||||
return doc;
|
||||
}
|
||||
|
||||
const insertCmd = {
|
||||
index: {
|
||||
_index: index
|
||||
}
|
||||
};
|
||||
|
||||
const bulk = [];
|
||||
docs.forEach(doc => {
|
||||
bulk.push(insertCmd);
|
||||
bulk.push(updateTimestamps(doc));
|
||||
});
|
||||
const resp = await callWithRequest(request, 'bulk', { body: bulk });
|
||||
if (resp.errors) {
|
||||
server.log(
|
||||
['warning'],
|
||||
`sample_data install errors while bulk inserting. Elasticsearch response: ${JSON.stringify(resp, null, '')}`);
|
||||
return Promise.reject(new Error(`Unable to load sample data into index "${index}", see kibana logs for details`));
|
||||
}
|
||||
};
|
||||
|
||||
return loadData(dataIndexConfig.dataPath, bulkInsert);
|
||||
}
|
||||
|
||||
export const createInstallRoute = () => ({
|
||||
path: '/api/sample_data/{id}',
|
||||
method: 'POST',
|
||||
|
@ -49,87 +85,66 @@ export const createInstallRoute = () => ({
|
|||
}
|
||||
|
||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
const index = createIndexName(server, sampleDataset.id);
|
||||
const insertCmd = {
|
||||
index: {
|
||||
_index: index
|
||||
}
|
||||
};
|
||||
|
||||
// clean up any old installation of dataset
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
} catch (err) {
|
||||
// ignore delete errors
|
||||
}
|
||||
|
||||
try {
|
||||
const createIndexParams = {
|
||||
index: index,
|
||||
body: {
|
||||
settings: {
|
||||
index: {
|
||||
number_of_shards: 1,
|
||||
number_of_replicas: 0
|
||||
}
|
||||
},
|
||||
mappings: {
|
||||
_doc: {
|
||||
properties: sampleDataset.fields
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
await callWithRequest(request, 'indices.create', createIndexParams);
|
||||
} catch (err) {
|
||||
const errMsg = `Unable to create sample data index "${index}", error: ${err.message}`;
|
||||
server.log(['warning'], errMsg);
|
||||
return reply(errMsg).code(err.status);
|
||||
}
|
||||
|
||||
const now = request.query.now ? request.query.now : new Date();
|
||||
const nowReference = dateToIso8601IgnoringTime(now);
|
||||
function updateTimestamps(doc) {
|
||||
sampleDataset.timeFields.forEach(timeFieldName => {
|
||||
if (doc[timeFieldName]) {
|
||||
doc[timeFieldName] = sampleDataset.preserveDayOfWeekTimeOfDay
|
||||
? translateTimeRelativeToWeek(doc[timeFieldName], sampleDataset.currentTimeMarker, nowReference)
|
||||
: translateTimeRelativeToDifference(doc[timeFieldName], sampleDataset.currentTimeMarker, nowReference);
|
||||
}
|
||||
});
|
||||
return doc;
|
||||
}
|
||||
const bulkInsert = async (docs) => {
|
||||
const bulk = [];
|
||||
docs.forEach(doc => {
|
||||
bulk.push(insertCmd);
|
||||
bulk.push(updateTimestamps(doc));
|
||||
});
|
||||
const resp = await callWithRequest(request, 'bulk', { body: bulk });
|
||||
if (resp.errors) {
|
||||
server.log(
|
||||
['warning'],
|
||||
`sample_data install errors while bulk inserting. Elasticsearch response: ${JSON.stringify(resp, null, ' ')}`);
|
||||
return Promise.reject(new Error(`Unable to load sample data into index "${index}", see kibana logs for details`));
|
||||
|
||||
const counts = {};
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
|
||||
// clean up any old installation of dataset
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
} catch (err) {
|
||||
// ignore delete errors
|
||||
}
|
||||
};
|
||||
loadData(sampleDataset.dataPath, bulkInsert, async (err, count) => {
|
||||
if (err) {
|
||||
|
||||
try {
|
||||
const createIndexParams = {
|
||||
index: index,
|
||||
body: {
|
||||
settings: {
|
||||
index: {
|
||||
number_of_shards: 1,
|
||||
number_of_replicas: 0
|
||||
}
|
||||
},
|
||||
mappings: {
|
||||
_doc: {
|
||||
properties: dataIndexConfig.fields
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
await callWithRequest(request, 'indices.create', createIndexParams);
|
||||
} catch (err) {
|
||||
const errMsg = `Unable to create sample data index "${index}", error: ${err.message}`;
|
||||
server.log(['warning'], errMsg);
|
||||
return reply(errMsg).code(err.status);
|
||||
}
|
||||
|
||||
try {
|
||||
const count = await insertDataIntoIndex(
|
||||
dataIndexConfig, index, nowReference, request, server, callWithRequest);
|
||||
counts[index] = count;
|
||||
} catch (err) {
|
||||
server.log(['warning'], `sample_data install errors while loading data. Error: ${err}`);
|
||||
return reply(err.message).code(500);
|
||||
}
|
||||
}
|
||||
|
||||
const createResults = await request.getSavedObjectsClient().bulkCreate(sampleDataset.savedObjects, { overwrite: true });
|
||||
const errors = createResults.saved_objects.filter(savedObjectCreateResult => {
|
||||
return savedObjectCreateResult.hasOwnProperty('error');
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
server.log(['warning'], `sample_data install errors while loading saved objects. Errors: ${errors.join(',')}`);
|
||||
return reply(`Unable to load kibana saved objects, see kibana logs for details`).code(403);
|
||||
}
|
||||
|
||||
return reply({ docsLoaded: count, kibanaSavedObjectsLoaded: sampleDataset.savedObjects.length });
|
||||
const createResults = await request.getSavedObjectsClient().bulkCreate(sampleDataset.savedObjects, { overwrite: true });
|
||||
const errors = createResults.saved_objects.filter(savedObjectCreateResult => {
|
||||
return savedObjectCreateResult.hasOwnProperty('error');
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
server.log(['warning'], `sample_data install errors while loading saved objects. Errors: ${errors.join(',')}`);
|
||||
return reply(`Unable to load kibana saved objects, see kibana logs for details`).code(403);
|
||||
}
|
||||
|
||||
return reply({ elasticsearchIndicesCreated: counts, kibanaSavedObjectsLoaded: sampleDataset.savedObjects.length });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -17,6 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export function createIndexName(server, sampleDataSetId) {
|
||||
return `kibana_sample_data_${sampleDataSetId}`;
|
||||
export function createIndexName(sampleDataSetId, dataIndexId) {
|
||||
// Sample data schema was updated to support multiple indices in 6.5.
|
||||
// This if statement ensures that sample data sets that used a single index prior to the schema change
|
||||
// have the same index name to avoid orphaned indices when uninstalling.
|
||||
if (sampleDataSetId === dataIndexId) {
|
||||
return `kibana_sample_data_${sampleDataSetId}`;
|
||||
}
|
||||
return `kibana_sample_data_${sampleDataSetId}_${dataIndexId}`;
|
||||
}
|
||||
|
|
28
src/server/sample_data/routes/lib/create_index_name.test.js
Normal file
28
src/server/sample_data/routes/lib/create_index_name.test.js
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { createIndexName } from './create_index_name';
|
||||
|
||||
test('should include sampleDataSetId and dataIndexId in elasticsearch index name', async () => {
|
||||
expect(createIndexName('mySampleDataSetId', 'myDataIndexId')).toBe('kibana_sample_data_mySampleDataSetId_myDataIndexId');
|
||||
});
|
||||
|
||||
test('should only include sampleDataSetId when sampleDataSetId and dataIndexId are identical', async () => {
|
||||
expect(createIndexName('flights', 'flights')).toBe('kibana_sample_data_flights');
|
||||
});
|
|
@ -23,75 +23,77 @@ import zlib from 'zlib';
|
|||
|
||||
const BULK_INSERT_SIZE = 500;
|
||||
|
||||
export function loadData(path, bulkInsert, callback) {
|
||||
let count = 0;
|
||||
let docs = [];
|
||||
let isPaused = false;
|
||||
export function loadData(path, bulkInsert) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
let count = 0;
|
||||
let docs = [];
|
||||
let isPaused = false;
|
||||
|
||||
const readStream = fs.createReadStream(path, {
|
||||
// pause does not stop lines already in buffer. Use smaller buffer size to avoid bulk inserting to many records
|
||||
highWaterMark: 1024 * 4
|
||||
});
|
||||
const lineStream = readline.createInterface({
|
||||
input: readStream.pipe(zlib.Unzip()) // eslint-disable-line new-cap
|
||||
});
|
||||
const readStream = fs.createReadStream(path, {
|
||||
// pause does not stop lines already in buffer. Use smaller buffer size to avoid bulk inserting to many records
|
||||
highWaterMark: 1024 * 4
|
||||
});
|
||||
const lineStream = readline.createInterface({
|
||||
input: readStream.pipe(zlib.Unzip()) // eslint-disable-line new-cap
|
||||
});
|
||||
|
||||
const onClose = async () => {
|
||||
if (docs.length > 0) {
|
||||
try {
|
||||
await bulkInsert(docs);
|
||||
} catch (err) {
|
||||
callback(err);
|
||||
const onClose = async () => {
|
||||
if (docs.length > 0) {
|
||||
try {
|
||||
await bulkInsert(docs);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
}
|
||||
resolve(count);
|
||||
};
|
||||
lineStream.on('close', onClose);
|
||||
|
||||
function closeWithError(err) {
|
||||
lineStream.removeListener('close', onClose);
|
||||
lineStream.close();
|
||||
reject(err);
|
||||
}
|
||||
|
||||
lineStream.on('line', async (line) => {
|
||||
if (line.length === 0 || line.charAt(0) === '#') {
|
||||
return;
|
||||
}
|
||||
}
|
||||
callback(null, count);
|
||||
};
|
||||
lineStream.on('close', onClose);
|
||||
|
||||
function closeWithError(err) {
|
||||
lineStream.removeListener('close', onClose);
|
||||
lineStream.close();
|
||||
callback(err);
|
||||
}
|
||||
|
||||
lineStream.on('line', async (line) => {
|
||||
if (line.length === 0 || line.charAt(0) === '#') {
|
||||
return;
|
||||
}
|
||||
|
||||
let doc;
|
||||
try {
|
||||
doc = JSON.parse(line);
|
||||
} catch (err) {
|
||||
closeWithError(new Error(`Unable to parse line as JSON document, line: """${line}""", Error: ${err.message}`));
|
||||
return;
|
||||
}
|
||||
|
||||
count++;
|
||||
docs.push(doc);
|
||||
|
||||
if (docs.length >= BULK_INSERT_SIZE && !isPaused) {
|
||||
lineStream.pause();
|
||||
|
||||
// readline pause is leaky and events in buffer still get sent after pause
|
||||
// need to clear buffer before async call
|
||||
const docstmp = docs.slice();
|
||||
docs = [];
|
||||
let doc;
|
||||
try {
|
||||
await bulkInsert(docstmp);
|
||||
lineStream.resume();
|
||||
doc = JSON.parse(line);
|
||||
} catch (err) {
|
||||
closeWithError(err);
|
||||
closeWithError(new Error(`Unable to parse line as JSON document, line: """${line}""", Error: ${err.message}`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
lineStream.on('pause', async () => {
|
||||
isPaused = true;
|
||||
});
|
||||
count++;
|
||||
docs.push(doc);
|
||||
|
||||
lineStream.on('resume', async () => {
|
||||
isPaused = false;
|
||||
if (docs.length >= BULK_INSERT_SIZE && !isPaused) {
|
||||
lineStream.pause();
|
||||
|
||||
// readline pause is leaky and events in buffer still get sent after pause
|
||||
// need to clear buffer before async call
|
||||
const docstmp = docs.slice();
|
||||
docs = [];
|
||||
try {
|
||||
await bulkInsert(docstmp);
|
||||
lineStream.resume();
|
||||
} catch (err) {
|
||||
closeWithError(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
lineStream.on('pause', async () => {
|
||||
isPaused = true;
|
||||
});
|
||||
|
||||
lineStream.on('resume', async () => {
|
||||
isPaused = false;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -19,26 +19,22 @@
|
|||
|
||||
import { loadData } from './load_data';
|
||||
|
||||
test('load flight data', done => {
|
||||
test('load flight data', async () => {
|
||||
let myDocsCount = 0;
|
||||
const bulkInsertMock = (docs) => {
|
||||
myDocsCount += docs.length;
|
||||
};
|
||||
loadData('./src/server/sample_data/data_sets/flights/flights.json.gz', bulkInsertMock, async (err, count) => {
|
||||
expect(myDocsCount).toBe(13059);
|
||||
expect(count).toBe(13059);
|
||||
done();
|
||||
});
|
||||
const count = await loadData('./src/server/sample_data/data_sets/flights/flights.json.gz', bulkInsertMock);
|
||||
expect(myDocsCount).toBe(13059);
|
||||
expect(count).toBe(13059);
|
||||
});
|
||||
|
||||
test('load log data', done => {
|
||||
test('load log data', async () => {
|
||||
let myDocsCount = 0;
|
||||
const bulkInsertMock = (docs) => {
|
||||
myDocsCount += docs.length;
|
||||
};
|
||||
loadData('./src/server/sample_data/data_sets/logs/logs.json.gz', bulkInsertMock, async (err, count) => {
|
||||
expect(myDocsCount).toBe(14005);
|
||||
expect(count).toBe(14005);
|
||||
done();
|
||||
});
|
||||
const count = await loadData('./src/server/sample_data/data_sets/logs/logs.json.gz', bulkInsertMock);
|
||||
expect(myDocsCount).toBe(14005);
|
||||
expect(count).toBe(14005);
|
||||
});
|
||||
|
|
|
@ -39,27 +39,33 @@ export const createListRoute = () => ({
|
|||
previewImagePath: sampleDataset.previewImagePath,
|
||||
overviewDashboard: sampleDataset.overviewDashboard,
|
||||
defaultIndex: sampleDataset.defaultIndex,
|
||||
dataIndices: sampleDataset.dataIndices.map(dataIndexConfig => {
|
||||
return { id: dataIndexConfig.id };
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
const isInstalledPromises = sampleDatasets.map(async sampleDataset => {
|
||||
const index = createIndexName(request.server, sampleDataset.id);
|
||||
try {
|
||||
const indexExists = await callWithRequest(request, 'indices.exists', { index: index });
|
||||
if (!indexExists) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
try {
|
||||
const indexExists = await callWithRequest(request, 'indices.exists', { index: index });
|
||||
if (!indexExists) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
|
||||
const { count } = await callWithRequest(request, 'count', { index: index });
|
||||
if (count === 0) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
const { count } = await callWithRequest(request, 'count', { index: index });
|
||||
if (count === 0) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
sampleDataset.status = UNKNOWN;
|
||||
sampleDataset.statusMsg = err.message;
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
sampleDataset.status = UNKNOWN;
|
||||
sampleDataset.statusMsg = err.message;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -43,12 +43,16 @@ export const createUninstallRoute = () => ({
|
|||
}
|
||||
|
||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
const index = createIndexName(server, sampleDataset.id);
|
||||
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
} catch (err) {
|
||||
return reply(`Unable to delete sample data index "${index}", error: ${err.message}`).code(err.status);
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
} catch (err) {
|
||||
return reply(`Unable to delete sample data index "${index}", error: ${err.message}`).code(err.status);
|
||||
}
|
||||
}
|
||||
|
||||
const deletePromises = sampleDataset.savedObjects.map((savedObjectJson) => {
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
import Joi from 'joi';
|
||||
import { dataSetSchema } from './data_set_schema';
|
||||
import { sampleDataSchema } from './data_set_schema';
|
||||
import {
|
||||
createListRoute,
|
||||
createInstallRoute,
|
||||
|
@ -41,7 +41,7 @@ export function sampleDataMixin(kbnServer, server) {
|
|||
});
|
||||
|
||||
server.decorate('server', 'registerSampleDataset', (specProvider) => {
|
||||
const { error, value } = Joi.validate(specProvider(server), dataSetSchema);
|
||||
const { error, value } = Joi.validate(specProvider(server), sampleDataSchema);
|
||||
|
||||
if (error) {
|
||||
throw new Error(`Unable to register sample dataset spec because it's invalid. ${error}`);
|
||||
|
|
|
@ -26,6 +26,19 @@ export default function ({ getService }) {
|
|||
|
||||
describe('sample data apis', () => {
|
||||
|
||||
describe('list', () => {
|
||||
it('should return list of sample data sets with installed status', async () => {
|
||||
const resp = await supertest
|
||||
.get(`/api/sample_data`)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body).to.be.an('array');
|
||||
expect(resp.body.length).to.be.above(0);
|
||||
expect(resp.body[0].status).to.be('not_installed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('install', () => {
|
||||
it('should return 404 if id does not match any sample data sets', async () => {
|
||||
await supertest
|
||||
|
@ -40,7 +53,7 @@ export default function ({ getService }) {
|
|||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
|
||||
expect(resp.body).to.eql({ docsLoaded: 13059, kibanaSavedObjectsLoaded: 21 });
|
||||
expect(resp.body).to.eql({ elasticsearchIndicesCreated: { kibana_sample_data_flights: 13059 }, kibanaSavedObjectsLoaded: 21 });
|
||||
});
|
||||
|
||||
it('should load elasticsearch index containing sample data with dates relative to current time', async () => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue