mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Sample Data] run prettier on a few sample data files (#33075)
* run prettier on sample data routes * run prettier, and hoist out a mapping function * straight up prettier * use closure over factory fn
This commit is contained in:
parent
20c704dcc3
commit
0b7715bc76
4 changed files with 105 additions and 78 deletions
|
@ -19,33 +19,43 @@
|
|||
|
||||
import Boom from 'boom';
|
||||
import Joi from 'joi';
|
||||
|
||||
import { loadData } from './lib/load_data';
|
||||
import { createIndexName } from './lib/create_index_name';
|
||||
import {
|
||||
dateToIso8601IgnoringTime,
|
||||
translateTimeRelativeToDifference,
|
||||
translateTimeRelativeToWeek
|
||||
translateTimeRelativeToWeek,
|
||||
} from './lib/translate_timestamp';
|
||||
|
||||
function insertDataIntoIndex(dataIndexConfig, index, nowReference, request, server, callWithRequest) {
|
||||
const bulkInsert = async (docs) => {
|
||||
function insertDataIntoIndex(
|
||||
dataIndexConfig,
|
||||
index,
|
||||
nowReference,
|
||||
request,
|
||||
server,
|
||||
callWithRequest
|
||||
) {
|
||||
const bulkInsert = async docs => {
|
||||
function updateTimestamps(doc) {
|
||||
dataIndexConfig.timeFields.forEach(timeFieldName => {
|
||||
if (doc[timeFieldName]) {
|
||||
doc[timeFieldName] = dataIndexConfig.preserveDayOfWeekTimeOfDay
|
||||
? translateTimeRelativeToWeek(doc[timeFieldName], dataIndexConfig.currentTimeMarker, nowReference)
|
||||
: translateTimeRelativeToDifference(doc[timeFieldName], dataIndexConfig.currentTimeMarker, nowReference);
|
||||
? translateTimeRelativeToWeek(
|
||||
doc[timeFieldName],
|
||||
dataIndexConfig.currentTimeMarker,
|
||||
nowReference
|
||||
)
|
||||
: translateTimeRelativeToDifference(
|
||||
doc[timeFieldName],
|
||||
dataIndexConfig.currentTimeMarker,
|
||||
nowReference
|
||||
);
|
||||
}
|
||||
});
|
||||
return doc;
|
||||
}
|
||||
|
||||
const insertCmd = {
|
||||
index: {
|
||||
_index: index
|
||||
}
|
||||
};
|
||||
const insertCmd = { index: { _index: index } };
|
||||
|
||||
const bulk = [];
|
||||
docs.forEach(doc => {
|
||||
|
@ -56,8 +66,15 @@ function insertDataIntoIndex(dataIndexConfig, index, nowReference, request, serv
|
|||
if (resp.errors) {
|
||||
server.log(
|
||||
['warning'],
|
||||
`sample_data install errors while bulk inserting. Elasticsearch response: ${JSON.stringify(resp, null, '')}`);
|
||||
return Promise.reject(new Error(`Unable to load sample data into index "${index}", see kibana logs for details`));
|
||||
`sample_data install errors while bulk inserting. Elasticsearch response: ${JSON.stringify(
|
||||
resp,
|
||||
null,
|
||||
''
|
||||
)}`
|
||||
);
|
||||
return Promise.reject(
|
||||
new Error(`Unable to load sample data into index "${index}", see kibana logs for details`)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -69,25 +86,22 @@ export const createInstallRoute = () => ({
|
|||
method: 'POST',
|
||||
config: {
|
||||
validate: {
|
||||
query: Joi.object().keys({
|
||||
now: Joi.date().iso()
|
||||
}),
|
||||
params: Joi.object().keys({
|
||||
id: Joi.string().required(),
|
||||
}).required()
|
||||
query: Joi.object().keys({ now: Joi.date().iso() }),
|
||||
params: Joi.object()
|
||||
.keys({ id: Joi.string().required() })
|
||||
.required(),
|
||||
},
|
||||
handler: async (request, h) => {
|
||||
const server = request.server;
|
||||
const sampleDataset = server.getSampleDatasets().find(sampleDataset => {
|
||||
return sampleDataset.id === request.params.id;
|
||||
});
|
||||
const { server, params, query } = request;
|
||||
|
||||
const sampleDataset = server.getSampleDatasets().find(({ id }) => id === params.id);
|
||||
if (!sampleDataset) {
|
||||
return h.response().code(404);
|
||||
}
|
||||
|
||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
|
||||
const now = request.query.now ? request.query.now : new Date();
|
||||
const now = query.now ? query.now : new Date();
|
||||
const nowReference = dateToIso8601IgnoringTime(now);
|
||||
|
||||
const counts = {};
|
||||
|
@ -97,7 +111,7 @@ export const createInstallRoute = () => ({
|
|||
|
||||
// clean up any old installation of dataset
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
await callWithRequest(request, 'indices.delete', { index });
|
||||
} catch (err) {
|
||||
// ignore delete errors
|
||||
}
|
||||
|
@ -106,16 +120,9 @@ export const createInstallRoute = () => ({
|
|||
const createIndexParams = {
|
||||
index: index,
|
||||
body: {
|
||||
settings: {
|
||||
index: {
|
||||
number_of_shards: 1,
|
||||
number_of_replicas: 0
|
||||
}
|
||||
},
|
||||
mappings: {
|
||||
properties: dataIndexConfig.fields
|
||||
}
|
||||
}
|
||||
settings: { index: { number_of_shards: 1, number_of_replicas: 0 } },
|
||||
mappings: { properties: dataIndexConfig.fields },
|
||||
},
|
||||
};
|
||||
await callWithRequest(request, 'indices.create', createIndexParams);
|
||||
} catch (err) {
|
||||
|
@ -126,7 +133,13 @@ export const createInstallRoute = () => ({
|
|||
|
||||
try {
|
||||
const count = await insertDataIntoIndex(
|
||||
dataIndexConfig, index, nowReference, request, server, callWithRequest);
|
||||
dataIndexConfig,
|
||||
index,
|
||||
nowReference,
|
||||
request,
|
||||
server,
|
||||
callWithRequest
|
||||
);
|
||||
counts[index] = count;
|
||||
} catch (err) {
|
||||
server.log(['warning'], `sample_data install errors while loading data. Error: ${err}`);
|
||||
|
@ -136,20 +149,32 @@ export const createInstallRoute = () => ({
|
|||
|
||||
let createResults;
|
||||
try {
|
||||
createResults = await request.getSavedObjectsClient().bulkCreate(sampleDataset.savedObjects, { overwrite: true });
|
||||
} catch (err) {
|
||||
createResults = await request
|
||||
.getSavedObjectsClient()
|
||||
.bulkCreate(sampleDataset.savedObjects, { overwrite: true });
|
||||
} catch (err) {
|
||||
server.log(['warning'], `bulkCreate failed, error: ${err.message}`);
|
||||
return Boom.badImplementation(`Unable to load kibana saved objects, see kibana logs for details`);
|
||||
return Boom.badImplementation(
|
||||
`Unable to load kibana saved objects, see kibana logs for details`
|
||||
);
|
||||
}
|
||||
const errors = createResults.saved_objects.filter(savedObjectCreateResult => {
|
||||
return savedObjectCreateResult.hasOwnProperty('error');
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
server.log(['warning'], `sample_data install errors while loading saved objects. Errors: ${errors.join(',')}`);
|
||||
return h.response(`Unable to load kibana saved objects, see kibana logs for details`).code(403);
|
||||
server.log(
|
||||
['warning'],
|
||||
`sample_data install errors while loading saved objects. Errors: ${errors.join(',')}`
|
||||
);
|
||||
return h
|
||||
.response(`Unable to load kibana saved objects, see kibana logs for details`)
|
||||
.code(403);
|
||||
}
|
||||
|
||||
return h.response({ elasticsearchIndicesCreated: counts, kibanaSavedObjectsLoaded: sampleDataset.savedObjects.length });
|
||||
}
|
||||
}
|
||||
return h.response({
|
||||
elasticsearchIndicesCreated: counts,
|
||||
kibanaSavedObjectsLoaded: sampleDataset.savedObjects.length,
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
|
@ -24,19 +24,15 @@ import zlib from 'zlib';
|
|||
const BULK_INSERT_SIZE = 500;
|
||||
|
||||
export function loadData(path, bulkInsert) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let count = 0;
|
||||
let docs = [];
|
||||
let isPaused = false;
|
||||
|
||||
const readStream = fs.createReadStream(path, {
|
||||
// pause does not stop lines already in buffer. Use smaller buffer size to avoid bulk inserting to many records
|
||||
highWaterMark: 1024 * 4
|
||||
});
|
||||
const lineStream = readline.createInterface({
|
||||
input: readStream.pipe(zlib.Unzip()) // eslint-disable-line new-cap
|
||||
});
|
||||
|
||||
// pause does not stop lines already in buffer. Use smaller buffer size to avoid bulk inserting to many records
|
||||
const readStream = fs.createReadStream(path, { highWaterMark: 1024 * 4 });
|
||||
// eslint-disable-next-line new-cap
|
||||
const lineStream = readline.createInterface({ input: readStream.pipe(zlib.Unzip()) });
|
||||
const onClose = async () => {
|
||||
if (docs.length > 0) {
|
||||
try {
|
||||
|
@ -50,13 +46,13 @@ export function loadData(path, bulkInsert) {
|
|||
};
|
||||
lineStream.on('close', onClose);
|
||||
|
||||
function closeWithError(err) {
|
||||
const closeWithError = err => {
|
||||
lineStream.removeListener('close', onClose);
|
||||
lineStream.close();
|
||||
reject(err);
|
||||
}
|
||||
};
|
||||
|
||||
lineStream.on('line', async (line) => {
|
||||
lineStream.on('line', async line => {
|
||||
if (line.length === 0 || line.charAt(0) === '#') {
|
||||
return;
|
||||
}
|
||||
|
@ -65,7 +61,11 @@ export function loadData(path, bulkInsert) {
|
|||
try {
|
||||
doc = JSON.parse(line);
|
||||
} catch (err) {
|
||||
closeWithError(new Error(`Unable to parse line as JSON document, line: """${line}""", Error: ${err.message}`));
|
||||
closeWithError(
|
||||
new Error(
|
||||
`Unable to parse line as JSON document, line: """${line}""", Error: ${err.message}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ export const createListRoute = () => ({
|
|||
path: '/api/sample_data',
|
||||
method: 'GET',
|
||||
config: {
|
||||
handler: async (request) => {
|
||||
handler: async request => {
|
||||
const { callWithRequest } = request.server.plugins.elasticsearch.getCluster('data');
|
||||
|
||||
const sampleDatasets = request.server.getSampleDatasets().map(sampleDataset => {
|
||||
|
@ -40,9 +40,7 @@ export const createListRoute = () => ({
|
|||
darkPreviewImagePath: sampleDataset.darkPreviewImagePath,
|
||||
overviewDashboard: sampleDataset.overviewDashboard,
|
||||
defaultIndex: sampleDataset.defaultIndex,
|
||||
dataIndices: sampleDataset.dataIndices.map(dataIndexConfig => {
|
||||
return { id: dataIndexConfig.id };
|
||||
}),
|
||||
dataIndices: sampleDataset.dataIndices.map(({ id }) => ({ id })),
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -88,6 +86,6 @@ export const createListRoute = () => ({
|
|||
|
||||
await Promise.all(isInstalledPromises);
|
||||
return sampleDatasets;
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
import _ from 'lodash';
|
||||
import Joi from 'joi';
|
||||
|
||||
import { createIndexName } from './lib/create_index_name';
|
||||
|
||||
export const createUninstallRoute = () => ({
|
||||
|
@ -27,15 +26,15 @@ export const createUninstallRoute = () => ({
|
|||
method: 'DELETE',
|
||||
config: {
|
||||
validate: {
|
||||
params: Joi.object().keys({
|
||||
id: Joi.string().required(),
|
||||
}).required()
|
||||
params: Joi.object()
|
||||
.keys({
|
||||
id: Joi.string().required(),
|
||||
})
|
||||
.required(),
|
||||
},
|
||||
handler: async (request, h) => {
|
||||
const server = request.server;
|
||||
const sampleDataset = server.getSampleDatasets().find(({ id }) => {
|
||||
return id === request.params.id;
|
||||
});
|
||||
const { server, params } = request;
|
||||
const sampleDataset = server.getSampleDatasets().find(({ id }) => id === params.id);
|
||||
|
||||
if (!sampleDataset) {
|
||||
return h.response().code(404);
|
||||
|
@ -50,23 +49,28 @@ export const createUninstallRoute = () => ({
|
|||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
} catch (err) {
|
||||
return h.response(`Unable to delete sample data index "${index}", error: ${err.message}`).code(err.status);
|
||||
return h
|
||||
.response(`Unable to delete sample data index "${index}", error: ${err.message}`)
|
||||
.code(err.status);
|
||||
}
|
||||
}
|
||||
|
||||
const deletePromises = sampleDataset.savedObjects.map((savedObjectJson) => {
|
||||
return request.getSavedObjectsClient().delete(savedObjectJson.type, savedObjectJson.id);
|
||||
});
|
||||
const deletePromises = sampleDataset.savedObjects.map(({ type, id }) =>
|
||||
request.getSavedObjectsClient().delete(type, id)
|
||||
);
|
||||
|
||||
try {
|
||||
await Promise.all(deletePromises);
|
||||
} catch (err) {
|
||||
// ignore 404s since users could have deleted some of the saved objects via the UI
|
||||
if (_.get(err, 'output.statusCode') !== 404) {
|
||||
return h.response(`Unable to delete sample dataset saved objects, error: ${err.message}`).code(403);
|
||||
return h
|
||||
.response(`Unable to delete sample dataset saved objects, error: ${err.message}`)
|
||||
.code(403);
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue