mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
NP Migration: Sample data (#52753)
This commit is contained in:
parent
97ed566245
commit
c75cd9de19
62 changed files with 1277 additions and 1117 deletions
|
@ -11,6 +11,7 @@
|
|||
"embeddableApi": "src/plugins/embeddable",
|
||||
"embeddableExamples": "examples/embeddable_examples",
|
||||
"share": "src/plugins/share",
|
||||
"home": "src/plugins/home",
|
||||
"esUi": "src/plugins/es_ui_shared",
|
||||
"devTools": "src/plugins/dev_tools",
|
||||
"expressions": "src/plugins/expressions",
|
||||
|
|
2
src/legacy/server/kbn_server.d.ts
vendored
2
src/legacy/server/kbn_server.d.ts
vendored
|
@ -44,6 +44,7 @@ import { UsageCollectionSetup } from '../../plugins/usage_collection/server';
|
|||
import { IndexPatternsServiceFactory } from './index_patterns';
|
||||
import { Capabilities } from '../../core/server';
|
||||
import { UiSettingsServiceFactoryOptions } from '../../legacy/ui/ui_settings/ui_settings_service_factory';
|
||||
import { HomeServerPluginSetup } from '../../plugins/home/server';
|
||||
|
||||
// lot of legacy code was assuming this type only had these two methods
|
||||
export type KibanaConfig = Pick<LegacyConfig, 'get' | 'has'>;
|
||||
|
@ -99,6 +100,7 @@ type KbnMixinFunc = (kbnServer: KbnServer, server: Server, config: any) => Promi
|
|||
|
||||
export interface PluginsSetup {
|
||||
usageCollection: UsageCollectionSetup;
|
||||
home: HomeServerPluginSetup;
|
||||
[key: string]: object;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ import optimizeMixin from '../../optimize';
|
|||
import * as Plugins from './plugins';
|
||||
import { indexPatternsMixin } from './index_patterns';
|
||||
import { savedObjectsMixin } from './saved_objects/saved_objects_mixin';
|
||||
import { sampleDataMixin } from './sample_data';
|
||||
import { capabilitiesMixin } from './capabilities';
|
||||
import { urlShorteningMixin } from './url_shortening';
|
||||
import { serverExtensionsMixin } from './server_extensions';
|
||||
|
@ -112,9 +111,6 @@ export default class KbnServer {
|
|||
// setup capabilities routes
|
||||
capabilitiesMixin,
|
||||
|
||||
// setup routes for installing/uninstalling sample data sets
|
||||
sampleDataMixin,
|
||||
|
||||
// setup routes for short urls
|
||||
urlShorteningMixin,
|
||||
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
### What happens when a user installs a sample data set?
|
||||
1) Kibana deletes existing Elastic search indicies for the sample data set if they exist from previous installs.
|
||||
2) Kibana creates Elasticsearch indicies with the provided field mappings.
|
||||
3) Kibana uses bulk insert to ingest the new-line delimited json into the Elasticsearch index. Kibana migrates timestamps provided in new-line delimited json to the current time frame for any date field defined in `timeFields`
|
||||
4) Kibana will install all saved objects for sample data set. This will override any saved objects previouslly installed for sample data set.
|
||||
|
||||
Elasticsearch index names are prefixed with `kibana_sample_data_`. For more details see [createIndexName](/src/legacy/server/sample_data/routes/lib/create_index_name.js)
|
||||
|
||||
Sample data sets typically provide data that spans 5 weeks from the past and 5 weeks into the future so users see data relative to `now` for a few weeks after installing sample data sets.
|
||||
|
||||
### Adding new sample data sets
|
||||
Use [existing sample data sets](/src/legacy/server/sample_data/data_sets) as examples.
|
||||
To avoid bloating the Kibana distribution, keep data set size to a minimum.
|
||||
|
||||
Follow the steps below to add new Sample data sets to Kibana.
|
||||
1) Create new-line delimited json containing sample data.
|
||||
2) Create file with Elasticsearch field mappings for sample data indices.
|
||||
3) Create Kibana saved objects for sample data including index-patterns, visualizations, and dashboards. The best way to extract the saved objects is from the Kibana management -> saved objects [export UI](https://www.elastic.co/guide/en/kibana/current/managing-saved-objects.html#_export)
|
||||
4) Define sample data spec conforming to [Data Set Schema](/src/legacy/server/sample_data/data_set_schema.js).
|
||||
5) Register sample data by calling `server.registerSampleDataset(yourSpecProvider)` where `yourSpecProvider` is a function that returns an object containing your sample data spec from step 4.
|
|
@ -1,184 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
import Joi from 'joi';
|
||||
import { usage } from '../usage';
|
||||
import { loadData } from './lib/load_data';
|
||||
import { createIndexName } from './lib/create_index_name';
|
||||
import {
|
||||
dateToIso8601IgnoringTime,
|
||||
translateTimeRelativeToDifference,
|
||||
translateTimeRelativeToWeek,
|
||||
} from './lib/translate_timestamp';
|
||||
|
||||
function insertDataIntoIndex(
|
||||
dataIndexConfig,
|
||||
index,
|
||||
nowReference,
|
||||
request,
|
||||
server,
|
||||
callWithRequest
|
||||
) {
|
||||
const bulkInsert = async docs => {
|
||||
function updateTimestamps(doc) {
|
||||
dataIndexConfig.timeFields.forEach(timeFieldName => {
|
||||
if (doc[timeFieldName]) {
|
||||
doc[timeFieldName] = dataIndexConfig.preserveDayOfWeekTimeOfDay
|
||||
? translateTimeRelativeToWeek(
|
||||
doc[timeFieldName],
|
||||
dataIndexConfig.currentTimeMarker,
|
||||
nowReference
|
||||
)
|
||||
: translateTimeRelativeToDifference(
|
||||
doc[timeFieldName],
|
||||
dataIndexConfig.currentTimeMarker,
|
||||
nowReference
|
||||
);
|
||||
}
|
||||
});
|
||||
return doc;
|
||||
}
|
||||
|
||||
const insertCmd = { index: { _index: index } };
|
||||
|
||||
const bulk = [];
|
||||
docs.forEach(doc => {
|
||||
bulk.push(insertCmd);
|
||||
bulk.push(updateTimestamps(doc));
|
||||
});
|
||||
const resp = await callWithRequest(request, 'bulk', { body: bulk });
|
||||
if (resp.errors) {
|
||||
server.log(
|
||||
['warning'],
|
||||
`sample_data install errors while bulk inserting. Elasticsearch response: ${JSON.stringify(
|
||||
resp,
|
||||
null,
|
||||
''
|
||||
)}`
|
||||
);
|
||||
return Promise.reject(
|
||||
new Error(`Unable to load sample data into index "${index}", see kibana logs for details`)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
return loadData(dataIndexConfig.dataPath, bulkInsert);
|
||||
}
|
||||
|
||||
export const createInstallRoute = () => ({
|
||||
path: '/api/sample_data/{id}',
|
||||
method: 'POST',
|
||||
config: {
|
||||
validate: {
|
||||
query: Joi.object().keys({ now: Joi.date().iso() }),
|
||||
params: Joi.object()
|
||||
.keys({ id: Joi.string().required() })
|
||||
.required(),
|
||||
},
|
||||
handler: async (request, h) => {
|
||||
const { server, params, query } = request;
|
||||
|
||||
const sampleDataset = server.getSampleDatasets().find(({ id }) => id === params.id);
|
||||
if (!sampleDataset) {
|
||||
return h.response().code(404);
|
||||
}
|
||||
|
||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
|
||||
const now = query.now ? query.now : new Date();
|
||||
const nowReference = dateToIso8601IgnoringTime(now);
|
||||
|
||||
const counts = {};
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
|
||||
// clean up any old installation of dataset
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index });
|
||||
} catch (err) {
|
||||
// ignore delete errors
|
||||
}
|
||||
|
||||
try {
|
||||
const createIndexParams = {
|
||||
index: index,
|
||||
body: {
|
||||
settings: { index: { number_of_shards: 1, auto_expand_replicas: '0-1' } },
|
||||
mappings: { properties: dataIndexConfig.fields },
|
||||
},
|
||||
};
|
||||
await callWithRequest(request, 'indices.create', createIndexParams);
|
||||
} catch (err) {
|
||||
const errMsg = `Unable to create sample data index "${index}", error: ${err.message}`;
|
||||
server.log(['warning'], errMsg);
|
||||
return h.response(errMsg).code(err.status);
|
||||
}
|
||||
|
||||
try {
|
||||
const count = await insertDataIntoIndex(
|
||||
dataIndexConfig,
|
||||
index,
|
||||
nowReference,
|
||||
request,
|
||||
server,
|
||||
callWithRequest
|
||||
);
|
||||
counts[index] = count;
|
||||
} catch (err) {
|
||||
server.log(['warning'], `sample_data install errors while loading data. Error: ${err}`);
|
||||
return h.response(err.message).code(500);
|
||||
}
|
||||
}
|
||||
|
||||
let createResults;
|
||||
try {
|
||||
createResults = await request
|
||||
.getSavedObjectsClient()
|
||||
.bulkCreate(sampleDataset.savedObjects, { overwrite: true });
|
||||
} catch (err) {
|
||||
server.log(['warning'], `bulkCreate failed, error: ${err.message}`);
|
||||
return Boom.badImplementation(
|
||||
`Unable to load kibana saved objects, see kibana logs for details`
|
||||
);
|
||||
}
|
||||
const errors = createResults.saved_objects.filter(savedObjectCreateResult => {
|
||||
return Boolean(savedObjectCreateResult.error);
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
server.log(
|
||||
['warning'],
|
||||
`sample_data install errors while loading saved objects. Errors: ${errors.join(',')}`
|
||||
);
|
||||
return h
|
||||
.response(`Unable to load kibana saved objects, see kibana logs for details`)
|
||||
.code(403);
|
||||
}
|
||||
|
||||
// track the usage operation in a non-blocking way
|
||||
usage(request).addInstall(params.id);
|
||||
|
||||
return h.response({
|
||||
elasticsearchIndicesCreated: counts,
|
||||
kibanaSavedObjectsLoaded: sampleDataset.savedObjects.length,
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { createIndexName } from './create_index_name';
|
||||
|
||||
test('should include sampleDataSetId and dataIndexId in elasticsearch index name', async () => {
|
||||
expect(createIndexName('mySampleDataSetId', 'myDataIndexId')).toBe(
|
||||
'kibana_sample_data_mySampleDataSetId_myDataIndexId'
|
||||
);
|
||||
});
|
||||
|
||||
test('should only include sampleDataSetId when sampleDataSetId and dataIndexId are identical', async () => {
|
||||
expect(createIndexName('flights', 'flights')).toBe('kibana_sample_data_flights');
|
||||
});
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { loadData } from './load_data';
|
||||
|
||||
test('load flight data', async () => {
|
||||
let myDocsCount = 0;
|
||||
const bulkInsertMock = docs => {
|
||||
myDocsCount += docs.length;
|
||||
};
|
||||
const count = await loadData(
|
||||
'./src/legacy/server/sample_data/data_sets/flights/flights.json.gz',
|
||||
bulkInsertMock
|
||||
);
|
||||
expect(myDocsCount).toBe(13059);
|
||||
expect(count).toBe(13059);
|
||||
});
|
||||
|
||||
test('load log data', async () => {
|
||||
let myDocsCount = 0;
|
||||
const bulkInsertMock = docs => {
|
||||
myDocsCount += docs.length;
|
||||
};
|
||||
const count = await loadData(
|
||||
'./src/legacy/server/sample_data/data_sets/logs/logs.json.gz',
|
||||
bulkInsertMock
|
||||
);
|
||||
expect(myDocsCount).toBe(14074);
|
||||
expect(count).toBe(14074);
|
||||
});
|
||||
|
||||
test('load ecommerce data', async () => {
|
||||
let myDocsCount = 0;
|
||||
const bulkInsertMock = docs => {
|
||||
myDocsCount += docs.length;
|
||||
};
|
||||
const count = await loadData(
|
||||
'./src/legacy/server/sample_data/data_sets/ecommerce/ecommerce.json.gz',
|
||||
bulkInsertMock
|
||||
);
|
||||
expect(myDocsCount).toBe(4675);
|
||||
expect(count).toBe(4675);
|
||||
});
|
|
@ -1,105 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { translateTimeRelativeToWeek } from './translate_timestamp';
|
||||
|
||||
describe('translateTimeRelativeToWeek', () => {
|
||||
const sourceReference = '2018-01-02T00:00:00'; //Tuesday
|
||||
const targetReference = '2018-04-25T18:24:58.650'; // Wednesday
|
||||
|
||||
describe('2 weeks before', () => {
|
||||
test('should properly adjust timestamp when day is before targetReference day of week', () => {
|
||||
const source = '2017-12-18T23:50:00'; // Monday, -2 week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-09T23:50:00'); // Monday 2 week before targetReference week
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is same as targetReference day of week', () => {
|
||||
const source = '2017-12-20T23:50:00'; // Wednesday, -2 week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-11T23:50:00'); // Wednesday 2 week before targetReference week
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is after targetReference day of week', () => {
|
||||
const source = '2017-12-22T16:16:50'; // Friday, -2 week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-13T16:16:50'); // Friday 2 week before targetReference week
|
||||
});
|
||||
});
|
||||
|
||||
describe('week before', () => {
|
||||
test('should properly adjust timestamp when day is before targetReference day of week', () => {
|
||||
const source = '2017-12-25T23:50:00'; // Monday, -1 week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-16T23:50:00'); // Monday 1 week before targetReference week
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is same as targetReference day of week', () => {
|
||||
const source = '2017-12-27T23:50:00'; // Wednesday, -1 week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-18T23:50:00'); // Wednesday 1 week before targetReference week
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is after targetReference day of week', () => {
|
||||
const source = '2017-12-29T16:16:50'; // Friday, -1 week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-20T16:16:50'); // Friday 1 week before targetReference week
|
||||
});
|
||||
});
|
||||
|
||||
describe('same week', () => {
|
||||
test('should properly adjust timestamp when day is before targetReference day of week', () => {
|
||||
const source = '2018-01-01T23:50:00'; // Monday, same week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-23T23:50:00'); // Monday same week as targetReference
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is same as targetReference day of week', () => {
|
||||
const source = '2018-01-03T23:50:00'; // Wednesday, same week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-25T23:50:00'); // Wednesday same week as targetReference
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is after targetReference day of week', () => {
|
||||
const source = '2018-01-05T16:16:50'; // Friday, same week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-27T16:16:50'); // Friday same week as targetReference
|
||||
});
|
||||
});
|
||||
|
||||
describe('week after', () => {
|
||||
test('should properly adjust timestamp when day is before targetReference day of week', () => {
|
||||
const source = '2018-01-08T23:50:00'; // Monday, 1 week after relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-04-30T23:50:00'); // Monday 1 week after targetReference week
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is same as targetReference day of week', () => {
|
||||
const source = '2018-01-10T23:50:00'; // Wednesday, same week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-05-02T23:50:00'); // Wednesday 1 week after targetReference week
|
||||
});
|
||||
|
||||
test('should properly adjust timestamp when day is after targetReference day of week', () => {
|
||||
const source = '2018-01-12T16:16:50'; // Friday, same week relative to sourceReference
|
||||
const timestamp = translateTimeRelativeToWeek(source, sourceReference, targetReference);
|
||||
expect(timestamp).toBe('2018-05-04T16:16:50'); // Friday 1 week after targetReference week
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,92 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { createIndexName } from './lib/create_index_name';
|
||||
|
||||
const NOT_INSTALLED = 'not_installed';
|
||||
const INSTALLED = 'installed';
|
||||
const UNKNOWN = 'unknown';
|
||||
|
||||
export const createListRoute = () => ({
|
||||
path: '/api/sample_data',
|
||||
method: 'GET',
|
||||
config: {
|
||||
handler: async request => {
|
||||
const { callWithRequest } = request.server.plugins.elasticsearch.getCluster('data');
|
||||
|
||||
const sampleDatasets = request.server.getSampleDatasets().map(sampleDataset => {
|
||||
return {
|
||||
id: sampleDataset.id,
|
||||
name: sampleDataset.name,
|
||||
description: sampleDataset.description,
|
||||
previewImagePath: sampleDataset.previewImagePath,
|
||||
darkPreviewImagePath: sampleDataset.darkPreviewImagePath,
|
||||
overviewDashboard: sampleDataset.overviewDashboard,
|
||||
appLinks: sampleDataset.appLinks,
|
||||
defaultIndex: sampleDataset.defaultIndex,
|
||||
dataIndices: sampleDataset.dataIndices.map(({ id }) => ({ id })),
|
||||
};
|
||||
});
|
||||
|
||||
const isInstalledPromises = sampleDatasets.map(async sampleDataset => {
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
try {
|
||||
const indexExists = await callWithRequest(request, 'indices.exists', { index: index });
|
||||
if (!indexExists) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
|
||||
const { count } = await callWithRequest(request, 'count', { index: index });
|
||||
if (count === 0) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
sampleDataset.status = UNKNOWN;
|
||||
sampleDataset.statusMsg = err.message;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await request.getSavedObjectsClient().get('dashboard', sampleDataset.overviewDashboard);
|
||||
} catch (err) {
|
||||
// savedObjectClient.get() throws an boom error when object is not found.
|
||||
if (_.get(err, 'output.statusCode') === 404) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
|
||||
sampleDataset.status = UNKNOWN;
|
||||
sampleDataset.statusMsg = err.message;
|
||||
return;
|
||||
}
|
||||
|
||||
sampleDataset.status = INSTALLED;
|
||||
});
|
||||
|
||||
await Promise.all(isInstalledPromises);
|
||||
return sampleDatasets;
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import Joi from 'joi';
|
||||
import { usage } from '../usage';
|
||||
import { createIndexName } from './lib/create_index_name';
|
||||
|
||||
export const createUninstallRoute = () => ({
|
||||
path: '/api/sample_data/{id}',
|
||||
method: 'DELETE',
|
||||
config: {
|
||||
validate: {
|
||||
params: Joi.object()
|
||||
.keys({
|
||||
id: Joi.string().required(),
|
||||
})
|
||||
.required(),
|
||||
},
|
||||
handler: async (request, h) => {
|
||||
const { server, params } = request;
|
||||
const sampleDataset = server.getSampleDatasets().find(({ id }) => id === params.id);
|
||||
|
||||
if (!sampleDataset) {
|
||||
return h.response().code(404);
|
||||
}
|
||||
|
||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
|
||||
try {
|
||||
await callWithRequest(request, 'indices.delete', { index: index });
|
||||
} catch (err) {
|
||||
return h
|
||||
.response(`Unable to delete sample data index "${index}", error: ${err.message}`)
|
||||
.code(err.status);
|
||||
}
|
||||
}
|
||||
|
||||
const deletePromises = sampleDataset.savedObjects.map(({ type, id }) =>
|
||||
request.getSavedObjectsClient().delete(type, id)
|
||||
);
|
||||
|
||||
try {
|
||||
await Promise.all(deletePromises);
|
||||
} catch (err) {
|
||||
// ignore 404s since users could have deleted some of the saved objects via the UI
|
||||
if (_.get(err, 'output.statusCode') !== 404) {
|
||||
return h
|
||||
.response(`Unable to delete sample dataset saved objects, error: ${err.message}`)
|
||||
.code(403);
|
||||
}
|
||||
}
|
||||
|
||||
// track the usage operation in a non-blocking way
|
||||
usage(request).addUninstall(params.id);
|
||||
|
||||
return {};
|
||||
},
|
||||
},
|
||||
});
|
|
@ -1,146 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import Joi from 'joi';
|
||||
import { sampleDataSchema } from './data_set_schema';
|
||||
import { createListRoute, createInstallRoute, createUninstallRoute } from './routes';
|
||||
import { flightsSpecProvider, logsSpecProvider, ecommerceSpecProvider } from './data_sets';
|
||||
import { makeSampleDataUsageCollector } from './usage';
|
||||
|
||||
export function sampleDataMixin(kbnServer, server) {
|
||||
server.route(createListRoute());
|
||||
server.route(createInstallRoute());
|
||||
server.route(createUninstallRoute());
|
||||
|
||||
const sampleDatasets = [];
|
||||
|
||||
server.decorate('server', 'getSampleDatasets', () => {
|
||||
return sampleDatasets;
|
||||
});
|
||||
|
||||
server.decorate('server', 'registerSampleDataset', specProvider => {
|
||||
const { error, value } = Joi.validate(specProvider(server), sampleDataSchema);
|
||||
|
||||
if (error) {
|
||||
throw new Error(`Unable to register sample dataset spec because it's invalid. ${error}`);
|
||||
}
|
||||
|
||||
const defaultIndexSavedObjectJson = value.savedObjects.find(savedObjectJson => {
|
||||
return savedObjectJson.type === 'index-pattern' && savedObjectJson.id === value.defaultIndex;
|
||||
});
|
||||
if (!defaultIndexSavedObjectJson) {
|
||||
throw new Error(
|
||||
`Unable to register sample dataset spec, defaultIndex: "${value.defaultIndex}" does not exist in savedObjects list.`
|
||||
);
|
||||
}
|
||||
|
||||
const dashboardSavedObjectJson = value.savedObjects.find(savedObjectJson => {
|
||||
return savedObjectJson.type === 'dashboard' && savedObjectJson.id === value.overviewDashboard;
|
||||
});
|
||||
if (!dashboardSavedObjectJson) {
|
||||
throw new Error(
|
||||
`Unable to register sample dataset spec, overviewDashboard: "${value.overviewDashboard}" does not exist in savedObjects list.`
|
||||
);
|
||||
}
|
||||
|
||||
sampleDatasets.push(value);
|
||||
});
|
||||
|
||||
server.decorate('server', 'addSavedObjectsToSampleDataset', (id, savedObjects) => {
|
||||
const sampleDataset = sampleDatasets.find(sampleDataset => {
|
||||
return sampleDataset.id === id;
|
||||
});
|
||||
|
||||
if (!sampleDataset) {
|
||||
throw new Error(`Unable to find sample dataset with id: ${id}`);
|
||||
}
|
||||
|
||||
sampleDataset.savedObjects = sampleDataset.savedObjects.concat(savedObjects);
|
||||
});
|
||||
|
||||
server.decorate('server', 'addAppLinksToSampleDataset', (id, appLinks) => {
|
||||
const sampleDataset = sampleDatasets.find(sampleDataset => {
|
||||
return sampleDataset.id === id;
|
||||
});
|
||||
|
||||
if (!sampleDataset) {
|
||||
throw new Error(`Unable to find sample dataset with id: ${id}`);
|
||||
}
|
||||
|
||||
sampleDataset.appLinks = sampleDataset.appLinks.concat(appLinks);
|
||||
});
|
||||
|
||||
server.decorate(
|
||||
'server',
|
||||
'replacePanelInSampleDatasetDashboard',
|
||||
({
|
||||
sampleDataId,
|
||||
dashboardId,
|
||||
oldEmbeddableId,
|
||||
embeddableId,
|
||||
embeddableType,
|
||||
embeddableConfig = {},
|
||||
}) => {
|
||||
const sampleDataset = sampleDatasets.find(sampleDataset => {
|
||||
return sampleDataset.id === sampleDataId;
|
||||
});
|
||||
if (!sampleDataset) {
|
||||
throw new Error(`Unable to find sample dataset with id: ${sampleDataId}`);
|
||||
}
|
||||
|
||||
const dashboard = sampleDataset.savedObjects.find(savedObject => {
|
||||
return savedObject.id === dashboardId && savedObject.type === 'dashboard';
|
||||
});
|
||||
if (!dashboard) {
|
||||
throw new Error(`Unable to find dashboard with id: ${dashboardId}`);
|
||||
}
|
||||
|
||||
try {
|
||||
const reference = dashboard.references.find(reference => {
|
||||
return reference.id === oldEmbeddableId;
|
||||
});
|
||||
if (!reference) {
|
||||
throw new Error(`Unable to find reference for embeddable: ${oldEmbeddableId}`);
|
||||
}
|
||||
reference.type = embeddableType;
|
||||
reference.id = embeddableId;
|
||||
|
||||
const panels = JSON.parse(dashboard.attributes.panelsJSON);
|
||||
const panel = panels.find(panel => {
|
||||
return panel.panelRefName === reference.name;
|
||||
});
|
||||
if (!panel) {
|
||||
throw new Error(`Unable to find panel for reference: ${reference.name}`);
|
||||
}
|
||||
panel.embeddableConfig = embeddableConfig;
|
||||
dashboard.attributes.panelsJSON = JSON.stringify(panels);
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Unable to replace panel with embeddable ${oldEmbeddableId}, error: ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
server.registerSampleDataset(flightsSpecProvider);
|
||||
server.registerSampleDataset(logsSpecProvider);
|
||||
server.registerSampleDataset(ecommerceSpecProvider);
|
||||
|
||||
makeSampleDataUsageCollector(server);
|
||||
}
|
|
@ -1,13 +1,13 @@
|
|||
# home plugin
|
||||
Moves the legacy `ui/registry/feature_catalogue` module for registering "features" that should be shown in the home page's feature catalogue to a service within a "home" plugin. The feature catalogue refered to here should not be confused with the "feature" plugin for registering features used to derive UI capabilities for feature controls.
|
||||
|
||||
# Feature catalogue (public service)
|
||||
## Feature catalogue (public service)
|
||||
|
||||
Replaces the legacy `ui/registry/feature_catalogue` module for registering "features" that should be showed in the home
|
||||
page's feature catalogue. This should not be confused with the "feature" plugin for registering features used to derive
|
||||
UI capabilities for feature controls.
|
||||
|
||||
## Example registration
|
||||
### Example registration
|
||||
|
||||
```ts
|
||||
// For legacy plugins
|
||||
|
@ -27,3 +27,28 @@ class MyPlugin {
|
|||
```
|
||||
|
||||
Note that the old module supported providing a Angular DI function to receive Angular dependencies. This is no longer supported as we migrate away from Angular and will be removed in 8.0.
|
||||
|
||||
## Sample data
|
||||
|
||||
Replaces the sample data mixin putting functions on the global `server` object.
|
||||
|
||||
### What happens when a user installs a sample data set?
|
||||
1) Kibana deletes existing Elastic search indicies for the sample data set if they exist from previous installs.
|
||||
2) Kibana creates Elasticsearch indicies with the provided field mappings.
|
||||
3) Kibana uses bulk insert to ingest the new-line delimited json into the Elasticsearch index. Kibana migrates timestamps provided in new-line delimited json to the current time frame for any date field defined in `timeFields`
|
||||
4) Kibana will install all saved objects for sample data set. This will override any saved objects previouslly installed for sample data set.
|
||||
|
||||
Elasticsearch index names are prefixed with `kibana_sample_data_`. For more details see [createIndexName](/src/plugins/home/server/services/sample_data/lib/create_index_name.js)
|
||||
|
||||
Sample data sets typically provide data that spans 5 weeks from the past and 5 weeks into the future so users see data relative to `now` for a few weeks after installing sample data sets.
|
||||
|
||||
### Adding new sample data sets
|
||||
Use [existing sample data sets](/src/plugins/home/server/services/sample_data/data_sets) as examples.
|
||||
To avoid bloating the Kibana distribution, keep data set size to a minimum.
|
||||
|
||||
Follow the steps below to add new Sample data sets to Kibana.
|
||||
1) Create new-line delimited json containing sample data.
|
||||
2) Create file with Elasticsearch field mappings for sample data indices.
|
||||
3) Create Kibana saved objects for sample data including index-patterns, visualizations, and dashboards. The best way to extract the saved objects is from the Kibana management -> saved objects [export UI](https://www.elastic.co/guide/en/kibana/current/managing-saved-objects.html#_export)
|
||||
4) Define sample data spec conforming to [Data Set Schema](/src/plugins/home/server/services/sample_data/lib/sample_dataset_registry_types.ts).
|
||||
5) Register sample data by calling `plguins.home.sampleData.registerSampleDataset(yourSpecProvider)` in your `setup` method where `yourSpecProvider` is a function that returns an object containing your sample data spec from step 4.
|
||||
|
|
|
@ -2,5 +2,6 @@
|
|||
"id": "home",
|
||||
"version": "kibana",
|
||||
"server": true,
|
||||
"ui": true
|
||||
"ui": true,
|
||||
"optionalPlugins": ["usage_collection"]
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
export { HomeServerPluginSetup, HomeServerPluginStart } from './plugin';
|
||||
export { TutorialProvider } from './services';
|
||||
export { SampleDatasetProvider, SampleDataRegistrySetup } from './services';
|
||||
import { PluginInitializerContext } from 'src/core/server';
|
||||
import { HomeServerPlugin } from './plugin';
|
||||
|
||||
export const plugin = () => new HomeServerPlugin();
|
||||
export const plugin = (initContext: PluginInitializerContext) => new HomeServerPlugin(initContext);
|
||||
|
|
|
@ -17,8 +17,11 @@
|
|||
* under the License.
|
||||
*/
|
||||
import { tutorialsRegistryMock } from './services/tutorials/tutorials_registry.mock';
|
||||
import { sampleDataRegistryMock } from './services/sample_data/sample_data_registry.mock';
|
||||
|
||||
export const registryMock = tutorialsRegistryMock.create();
|
||||
export const registryForTutorialsMock = tutorialsRegistryMock.create();
|
||||
export const registryForSampleDataMock = sampleDataRegistryMock.create();
|
||||
jest.doMock('./services', () => ({
|
||||
TutorialsRegistry: jest.fn(() => registryMock),
|
||||
TutorialsRegistry: jest.fn(() => registryForTutorialsMock),
|
||||
SampleDataRegistry: jest.fn(() => registryForSampleDataMock),
|
||||
}));
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { registryMock } from './plugin.test.mocks';
|
||||
import { registryForTutorialsMock, registryForSampleDataMock } from './plugin.test.mocks';
|
||||
import { HomeServerPlugin } from './plugin';
|
||||
import { coreMock } from '../../../core/server/mocks';
|
||||
import { CoreSetup } from '../../../core/server';
|
||||
|
@ -26,26 +26,41 @@ type MockedKeys<T> = { [P in keyof T]: jest.Mocked<T[P]> };
|
|||
|
||||
describe('HomeServerPlugin', () => {
|
||||
beforeEach(() => {
|
||||
registryMock.setup.mockClear();
|
||||
registryMock.start.mockClear();
|
||||
registryForTutorialsMock.setup.mockClear();
|
||||
registryForTutorialsMock.start.mockClear();
|
||||
registryForSampleDataMock.setup.mockClear();
|
||||
registryForSampleDataMock.start.mockClear();
|
||||
});
|
||||
|
||||
describe('setup', () => {
|
||||
const mockCoreSetup: MockedKeys<CoreSetup> = coreMock.createSetup();
|
||||
const initContext = coreMock.createPluginInitializerContext();
|
||||
|
||||
test('wires up and returns registerTutorial and addScopedTutorialContextFactory', () => {
|
||||
const setup = new HomeServerPlugin().setup(mockCoreSetup);
|
||||
test('wires up tutorials provider service and returns registerTutorial and addScopedTutorialContextFactory', () => {
|
||||
const setup = new HomeServerPlugin(initContext).setup(mockCoreSetup, {});
|
||||
expect(setup).toHaveProperty('tutorials');
|
||||
expect(setup.tutorials).toHaveProperty('registerTutorial');
|
||||
expect(setup.tutorials).toHaveProperty('addScopedTutorialContextFactory');
|
||||
});
|
||||
|
||||
test('wires up sample data provider service and returns registerTutorial and addScopedTutorialContextFactory', () => {
|
||||
const setup = new HomeServerPlugin(initContext).setup(mockCoreSetup, {});
|
||||
expect(setup).toHaveProperty('sampleData');
|
||||
expect(setup.sampleData).toHaveProperty('registerSampleDataset');
|
||||
expect(setup.sampleData).toHaveProperty('getSampleDatasets');
|
||||
expect(setup.sampleData).toHaveProperty('addSavedObjectsToSampleDataset');
|
||||
expect(setup.sampleData).toHaveProperty('addAppLinksToSampleDataset');
|
||||
expect(setup.sampleData).toHaveProperty('replacePanelInSampleDatasetDashboard');
|
||||
});
|
||||
});
|
||||
|
||||
describe('start', () => {
|
||||
const initContext = coreMock.createPluginInitializerContext();
|
||||
test('is defined', () => {
|
||||
const start = new HomeServerPlugin().start();
|
||||
const start = new HomeServerPlugin(initContext).start();
|
||||
expect(start).toBeDefined();
|
||||
expect(start).toHaveProperty('tutorials');
|
||||
expect(start).toHaveProperty('sampleData');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -16,21 +16,37 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { CoreSetup, Plugin } from 'src/core/server';
|
||||
import { TutorialsRegistry, TutorialsRegistrySetup, TutorialsRegistryStart } from './services';
|
||||
import { CoreSetup, Plugin, PluginInitializerContext } from 'src/core/server';
|
||||
import {
|
||||
TutorialsRegistry,
|
||||
TutorialsRegistrySetup,
|
||||
TutorialsRegistryStart,
|
||||
SampleDataRegistry,
|
||||
SampleDataRegistrySetup,
|
||||
SampleDataRegistryStart,
|
||||
} from './services';
|
||||
import { UsageCollectionSetup } from '../../usage_collection/server';
|
||||
|
||||
interface HomeServerPluginSetupDependencies {
|
||||
usage_collection?: UsageCollectionSetup;
|
||||
}
|
||||
|
||||
export class HomeServerPlugin implements Plugin<HomeServerPluginSetup, HomeServerPluginStart> {
|
||||
constructor(private readonly initContext: PluginInitializerContext) {}
|
||||
private readonly tutorialsRegistry = new TutorialsRegistry();
|
||||
private readonly sampleDataRegistry = new SampleDataRegistry(this.initContext);
|
||||
|
||||
public setup(core: CoreSetup) {
|
||||
public setup(core: CoreSetup, plugins: HomeServerPluginSetupDependencies): HomeServerPluginSetup {
|
||||
return {
|
||||
tutorials: { ...this.tutorialsRegistry.setup(core) },
|
||||
sampleData: { ...this.sampleDataRegistry.setup(core, plugins.usage_collection) },
|
||||
};
|
||||
}
|
||||
|
||||
public start() {
|
||||
public start(): HomeServerPluginStart {
|
||||
return {
|
||||
tutorials: { ...this.tutorialsRegistry.start() },
|
||||
sampleData: { ...this.sampleDataRegistry.start() },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -38,9 +54,11 @@ export class HomeServerPlugin implements Plugin<HomeServerPluginSetup, HomeServe
|
|||
/** @public */
|
||||
export interface HomeServerPluginSetup {
|
||||
tutorials: TutorialsRegistrySetup;
|
||||
sampleData: SampleDataRegistrySetup;
|
||||
}
|
||||
|
||||
/** @public */
|
||||
export interface HomeServerPluginStart {
|
||||
tutorials: TutorialsRegistryStart;
|
||||
sampleData: SampleDataRegistryStart;
|
||||
}
|
||||
|
|
|
@ -33,3 +33,11 @@ export {
|
|||
TutorialContextFactory,
|
||||
ScopedTutorialContextFactory,
|
||||
} from './tutorials';
|
||||
|
||||
export {
|
||||
SampleDataRegistry,
|
||||
SampleDataRegistrySetup,
|
||||
SampleDataRegistryStart,
|
||||
} from './sample_data';
|
||||
|
||||
export { SampleDatasetSchema, SampleDatasetProvider } from './sample_data';
|
||||
|
|
|
@ -21,19 +21,25 @@ import path from 'path';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { getSavedObjects } from './saved_objects';
|
||||
import { fieldMappings } from './field_mappings';
|
||||
import { SampleDatasetSchema, AppLinkSchema } from '../../lib/sample_dataset_registry_types';
|
||||
|
||||
export function ecommerceSpecProvider() {
|
||||
const ecommerceName = i18n.translate('home.sampleData.ecommerceSpecTitle', {
|
||||
defaultMessage: 'Sample eCommerce orders',
|
||||
});
|
||||
const ecommerceDescription = i18n.translate('home.sampleData.ecommerceSpecDescription', {
|
||||
defaultMessage: 'Sample data, visualizations, and dashboards for tracking eCommerce orders.',
|
||||
});
|
||||
const initialAppLinks = [] as AppLinkSchema[];
|
||||
|
||||
export const ecommerceSpecProvider = function(): SampleDatasetSchema {
|
||||
return {
|
||||
id: 'ecommerce',
|
||||
name: i18n.translate('server.sampleData.ecommerceSpecTitle', {
|
||||
defaultMessage: 'Sample eCommerce orders',
|
||||
}),
|
||||
description: i18n.translate('server.sampleData.ecommerceSpecDescription', {
|
||||
defaultMessage: 'Sample data, visualizations, and dashboards for tracking eCommerce orders.',
|
||||
}),
|
||||
name: ecommerceName,
|
||||
description: ecommerceDescription,
|
||||
previewImagePath: '/plugins/kibana/home/sample_data_resources/ecommerce/dashboard.png',
|
||||
darkPreviewImagePath: '/plugins/kibana/home/sample_data_resources/ecommerce/dashboard_dark.png',
|
||||
overviewDashboard: '722b74f0-b882-11e8-a6d9-e546fe2bba5f',
|
||||
appLinks: initialAppLinks,
|
||||
defaultIndex: 'ff959d40-b880-11e8-a6d9-e546fe2bba5f',
|
||||
savedObjects: getSavedObjects(),
|
||||
dataIndices: [
|
||||
|
@ -46,5 +52,6 @@ export function ecommerceSpecProvider() {
|
|||
preserveDayOfWeekTimeOfDay: true,
|
||||
},
|
||||
],
|
||||
status: 'not_installed',
|
||||
};
|
||||
}
|
||||
};
|
File diff suppressed because one or more lines are too long
|
@ -21,19 +21,25 @@ import path from 'path';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { getSavedObjects } from './saved_objects';
|
||||
import { fieldMappings } from './field_mappings';
|
||||
import { SampleDatasetSchema, AppLinkSchema } from '../../lib/sample_dataset_registry_types';
|
||||
|
||||
export function flightsSpecProvider() {
|
||||
const flightsName = i18n.translate('home.sampleData.flightsSpecTitle', {
|
||||
defaultMessage: 'Sample flight data',
|
||||
});
|
||||
const flightsDescription = i18n.translate('home.sampleData.flightsSpecDescription', {
|
||||
defaultMessage: 'Sample data, visualizations, and dashboards for monitoring flight routes.',
|
||||
});
|
||||
const initialAppLinks = [] as AppLinkSchema[];
|
||||
|
||||
export const flightsSpecProvider = function(): SampleDatasetSchema {
|
||||
return {
|
||||
id: 'flights',
|
||||
name: i18n.translate('server.sampleData.flightsSpecTitle', {
|
||||
defaultMessage: 'Sample flight data',
|
||||
}),
|
||||
description: i18n.translate('server.sampleData.flightsSpecDescription', {
|
||||
defaultMessage: 'Sample data, visualizations, and dashboards for monitoring flight routes.',
|
||||
}),
|
||||
name: flightsName,
|
||||
description: flightsDescription,
|
||||
previewImagePath: '/plugins/kibana/home/sample_data_resources/flights/dashboard.png',
|
||||
darkPreviewImagePath: '/plugins/kibana/home/sample_data_resources/flights/dashboard_dark.png',
|
||||
overviewDashboard: '7adfa750-4c81-11e8-b3d7-01146121b73d',
|
||||
appLinks: initialAppLinks,
|
||||
defaultIndex: 'd3d7af60-4c81-11e8-b3d7-01146121b73d',
|
||||
savedObjects: getSavedObjects(),
|
||||
dataIndices: [
|
||||
|
@ -46,5 +52,6 @@ export function flightsSpecProvider() {
|
|||
preserveDayOfWeekTimeOfDay: true,
|
||||
},
|
||||
],
|
||||
status: 'not_installed',
|
||||
};
|
||||
}
|
||||
};
|
|
@ -17,17 +17,21 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
/* eslint max-len: 0 */
|
||||
/* eslint-disable */
|
||||
|
||||
export const getSavedObjects = () => [
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { SavedObject } from 'kibana/server';
|
||||
|
||||
export const getSavedObjects = (): SavedObject[] => [
|
||||
{
|
||||
id: 'aeb212e0-4c84-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.controlsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.controlsTitle', {
|
||||
defaultMessage: '[Flights] Controls',
|
||||
}),
|
||||
visState:
|
||||
|
@ -39,15 +43,16 @@ export const getSavedObjects = () => [
|
|||
searchSourceJSON: '{}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'c8fc3d30-4c87-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.flightCountAndAverageTicketPriceTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.flightCountAndAverageTicketPriceTitle', {
|
||||
defaultMessage: '[Flights] Flight Count and Average Ticket Price',
|
||||
}),
|
||||
visState:
|
||||
|
@ -61,15 +66,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '571aaf70-4c88-11e8-b3d7-01146121b73d',
|
||||
type: 'search',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.flightLogTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.flightLogTitle', {
|
||||
defaultMessage: '[Flights] Flight Log',
|
||||
}),
|
||||
description: '',
|
||||
|
@ -92,15 +98,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","highlightAll":true,"version":true,"query":{"language":"kuery","query":""},"filter":[]}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '8f4d0c00-4c86-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.airlineCarrierTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.airlineCarrierTitle', {
|
||||
defaultMessage: '[Flights] Airline Carrier',
|
||||
}),
|
||||
visState:
|
||||
|
@ -113,15 +120,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'f8290060-4c88-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.delayTypeTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.delayTypeTitle', {
|
||||
defaultMessage: '[Flights] Delay Type',
|
||||
}),
|
||||
visState:
|
||||
|
@ -134,15 +142,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'bcb63b50-4c89-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.delaysAndCancellationsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.delaysAndCancellationsTitle', {
|
||||
defaultMessage: '[Flights] Delays & Cancellations',
|
||||
}),
|
||||
visState:
|
||||
|
@ -154,15 +163,16 @@ export const getSavedObjects = () => [
|
|||
searchSourceJSON: '{}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '9886b410-4c8b-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.delayBucketsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.delayBucketsTitle', {
|
||||
defaultMessage: '[Flights] Delay Buckets',
|
||||
}),
|
||||
visState:
|
||||
|
@ -175,15 +185,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[{"meta":{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","negate":true,"disabled":false,"alias":null,"type":"phrase","key":"FlightDelayMin","value":"0","params":{"query":0,"type":"phrase"}},"query":{"match":{"FlightDelayMin":{"query":0,"type":"phrase"}}},"$state":{"store":"appState"}}],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '76e3c090-4c8c-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.flightDelaysTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.flightDelaysTitle', {
|
||||
defaultMessage: '[Flights] Flight Delays',
|
||||
}),
|
||||
visState:
|
||||
|
@ -196,15 +207,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '707665a0-4c8c-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.flightCancellationsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.flightCancellationsTitle', {
|
||||
defaultMessage: '[Flights] Flight Cancellations',
|
||||
}),
|
||||
visState:
|
||||
|
@ -217,15 +229,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '293b5a30-4c8f-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.destinationWeatherTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.destinationWeatherTitle', {
|
||||
defaultMessage: '[Flights] Destination Weather',
|
||||
}),
|
||||
visState:
|
||||
|
@ -238,15 +251,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '129be430-4c93-11e8-b3d7-01146121b73d',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.markdownInstructionsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.markdownInstructionsTitle', {
|
||||
defaultMessage: '[Flights] Markdown Instructions',
|
||||
}),
|
||||
visState:
|
||||
|
@ -258,15 +272,16 @@ export const getSavedObjects = () => [
|
|||
searchSourceJSON: '{}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '334084f0-52fd-11e8-a160-89cc2ad9e8e2',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.originCountryTicketPricesTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.originCountryTicketPricesTitle', {
|
||||
defaultMessage: '[Flights] Origin Country Ticket Prices',
|
||||
}),
|
||||
visState:
|
||||
|
@ -279,15 +294,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'f8283bf0-52fd-11e8-a160-89cc2ad9e8e2',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.totalFlightDelaysTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.totalFlightDelaysTitle', {
|
||||
defaultMessage: '[Flights] Total Flight Delays',
|
||||
}),
|
||||
visState:
|
||||
|
@ -301,15 +317,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[{"meta":{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","negate":false,"disabled":false,"alias":null,"type":"phrase","key":"FlightDelay","value":"true","params":{"query":true,"type":"phrase"}},"query":{"match":{"FlightDelay":{"query":true,"type":"phrase"}}},"$state":{"store":"appState"}}],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '08884800-52fe-11e8-a160-89cc2ad9e8e2',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.totalFlightCancellationsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.totalFlightCancellationsTitle', {
|
||||
defaultMessage: '[Flights] Total Flight Cancellations',
|
||||
}),
|
||||
visState:
|
||||
|
@ -323,15 +340,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[{"meta":{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","negate":false,"disabled":false,"alias":null,"type":"phrase","key":"Cancelled","value":"true","params":{"query":true,"type":"phrase"}},"query":{"match":{"Cancelled":{"query":true,"type":"phrase"}}},"$state":{"store":"appState"}}],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'e6944e50-52fe-11e8-a160-89cc2ad9e8e2',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.originCountryTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.originCountryTitle', {
|
||||
defaultMessage: '[Flights] Origin Country vs. Destination Country',
|
||||
}),
|
||||
visState:
|
||||
|
@ -345,15 +363,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '01c413e0-5395-11e8-99bf-1ba7b1bdaa61',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.totalFlightsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.totalFlightsTitle', {
|
||||
defaultMessage: '[Flights] Total Flights',
|
||||
}),
|
||||
visState:
|
||||
|
@ -366,15 +385,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '2edf78b0-5395-11e8-99bf-1ba7b1bdaa61',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.averageTicketPriceTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.averageTicketPriceTitle', {
|
||||
defaultMessage: '[Flights] Average Ticket Price',
|
||||
}),
|
||||
visState:
|
||||
|
@ -387,15 +407,16 @@ export const getSavedObjects = () => [
|
|||
'{"index":"d3d7af60-4c81-11e8-b3d7-01146121b73d","filter":[],"query":{"query":"","language":"kuery"}}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'ed78a660-53a0-11e8-acbd-0be0ad9d822b',
|
||||
type: 'visualization',
|
||||
updated_at: '2018-05-09T15:55:51.195Z',
|
||||
version: 3,
|
||||
version: '3',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.airportConnectionsTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.airportConnectionsTitle', {
|
||||
defaultMessage: '[Flights] Airport Connections (Hover Over Airport)',
|
||||
}),
|
||||
visState:
|
||||
|
@ -407,12 +428,13 @@ export const getSavedObjects = () => [
|
|||
searchSourceJSON: '{"query":{"query":"","language":"kuery"},"filter":[]}',
|
||||
},
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: 'd3d7af60-4c81-11e8-b3d7-01146121b73d',
|
||||
type: 'index-pattern',
|
||||
updated_at: '2018-05-09T15:49:03.736Z',
|
||||
version: 1,
|
||||
version: '1',
|
||||
migrationVersion: {},
|
||||
attributes: {
|
||||
title: 'kibana_sample_data_flights',
|
||||
|
@ -422,12 +444,13 @@ export const getSavedObjects = () => [
|
|||
fieldFormatMap:
|
||||
'{"hour_of_day":{"id":"number","params":{"pattern":"00"}},"AvgTicketPrice":{"id":"number","params":{"pattern":"$0,0.[00]"}}}',
|
||||
},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '7adfa750-4c81-11e8-b3d7-01146121b73d',
|
||||
type: 'dashboard',
|
||||
updated_at: '2018-05-09T15:59:04.578Z',
|
||||
version: 4,
|
||||
version: '4',
|
||||
references: [
|
||||
{
|
||||
name: 'panel_0',
|
||||
|
@ -524,12 +547,12 @@ export const getSavedObjects = () => [
|
|||
dashboard: '7.0.0',
|
||||
},
|
||||
attributes: {
|
||||
title: i18n.translate('server.sampleData.flightsSpec.globalFlightDashboardTitle', {
|
||||
title: i18n.translate('home.sampleData.flightsSpec.globalFlightDashboardTitle', {
|
||||
defaultMessage: '[Flights] Global Flight Dashboard',
|
||||
}),
|
||||
hits: 0,
|
||||
description: i18n.translate(
|
||||
'server.sampleData.flightsSpec.globalFlightDashboardDescription',
|
||||
'home.sampleData.flightsSpec.globalFlightDashboardDescription',
|
||||
{
|
||||
defaultMessage:
|
||||
'Analyze mock flight data for ES-Air, Logstash Airways, Kibana Airlines and JetBeats',
|
|
@ -21,19 +21,25 @@ import path from 'path';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { getSavedObjects } from './saved_objects';
|
||||
import { fieldMappings } from './field_mappings';
|
||||
import { SampleDatasetSchema, AppLinkSchema } from '../../lib/sample_dataset_registry_types';
|
||||
|
||||
export function logsSpecProvider() {
|
||||
const logsName = i18n.translate('home.sampleData.logsSpecTitle', {
|
||||
defaultMessage: 'Sample web logs',
|
||||
});
|
||||
const logsDescription = i18n.translate('home.sampleData.logsSpecDescription', {
|
||||
defaultMessage: 'Sample data, visualizations, and dashboards for monitoring web logs.',
|
||||
});
|
||||
const initialAppLinks = [] as AppLinkSchema[];
|
||||
|
||||
export const logsSpecProvider = function(): SampleDatasetSchema {
|
||||
return {
|
||||
id: 'logs',
|
||||
name: i18n.translate('server.sampleData.logsSpecTitle', {
|
||||
defaultMessage: 'Sample web logs',
|
||||
}),
|
||||
description: i18n.translate('server.sampleData.logsSpecDescription', {
|
||||
defaultMessage: 'Sample data, visualizations, and dashboards for monitoring web logs.',
|
||||
}),
|
||||
name: logsName,
|
||||
description: logsDescription,
|
||||
previewImagePath: '/plugins/kibana/home/sample_data_resources/logs/dashboard.png',
|
||||
darkPreviewImagePath: '/plugins/kibana/home/sample_data_resources/logs/dashboard_dark.png',
|
||||
overviewDashboard: 'edf84fe0-e1a0-11e7-b6d5-4dc382ef7f5b',
|
||||
appLinks: initialAppLinks,
|
||||
defaultIndex: '90943e30-9a47-11e8-b64d-95841ca0b247',
|
||||
savedObjects: getSavedObjects(),
|
||||
dataIndices: [
|
||||
|
@ -46,5 +52,6 @@ export function logsSpecProvider() {
|
|||
preserveDayOfWeekTimeOfDay: true,
|
||||
},
|
||||
],
|
||||
status: 'not_installed',
|
||||
};
|
||||
}
|
||||
};
|
File diff suppressed because one or more lines are too long
|
@ -16,5 +16,10 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
export {
|
||||
SampleDataRegistry,
|
||||
SampleDataRegistrySetup,
|
||||
SampleDataRegistryStart,
|
||||
} from './sample_data_registry';
|
||||
|
||||
export { sampleDataMixin } from './sample_data_mixin';
|
||||
export { SampleDatasetSchema, SampleDatasetProvider } from './lib/sample_dataset_registry_types';
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export function createIndexName(sampleDataSetId, dataIndexId) {
|
||||
export const createIndexName = function(sampleDataSetId: string, dataIndexId: string): string {
|
||||
// Sample data schema was updated to support multiple indices in 6.5.
|
||||
// This if statement ensures that sample data sets that used a single index prior to the schema change
|
||||
// have the same index name to avoid orphaned indices when uninstalling.
|
||||
|
@ -25,4 +25,4 @@ export function createIndexName(sampleDataSetId, dataIndexId) {
|
|||
return `kibana_sample_data_${sampleDataSetId}`;
|
||||
}
|
||||
return `kibana_sample_data_${sampleDataSetId}_${dataIndexId}`;
|
||||
}
|
||||
};
|
|
@ -19,20 +19,20 @@
|
|||
|
||||
import readline from 'readline';
|
||||
import fs from 'fs';
|
||||
import zlib from 'zlib';
|
||||
import { createUnzip } from 'zlib';
|
||||
|
||||
const BULK_INSERT_SIZE = 500;
|
||||
|
||||
export function loadData(path, bulkInsert) {
|
||||
export function loadData(path: any, bulkInsert: (docs: any[]) => Promise<void>) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let count = 0;
|
||||
let docs = [];
|
||||
let isPaused = false;
|
||||
let count: number = 0;
|
||||
let docs: any[] = [];
|
||||
let isPaused: boolean = false;
|
||||
|
||||
// pause does not stop lines already in buffer. Use smaller buffer size to avoid bulk inserting to many records
|
||||
const readStream = fs.createReadStream(path, { highWaterMark: 1024 * 4 });
|
||||
// eslint-disable-next-line new-cap
|
||||
const lineStream = readline.createInterface({ input: readStream.pipe(zlib.Unzip()) });
|
||||
|
||||
const lineStream = readline.createInterface({ input: readStream.pipe(createUnzip()) });
|
||||
const onClose = async () => {
|
||||
if (docs.length > 0) {
|
||||
try {
|
||||
|
@ -46,7 +46,7 @@ export function loadData(path, bulkInsert) {
|
|||
};
|
||||
lineStream.on('close', onClose);
|
||||
|
||||
const closeWithError = err => {
|
||||
const closeWithError = (err: any) => {
|
||||
lineStream.removeListener('close', onClose);
|
||||
lineStream.close();
|
||||
reject(err);
|
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { SavedObject } from 'src/core/server';
|
||||
|
||||
export enum DatasetStatusTypes {
|
||||
NOT_INSTALLED = 'not_installed',
|
||||
INSTALLED = 'installed',
|
||||
UNKNOWN = 'unknown',
|
||||
}
|
||||
export interface SampleDatasetDashboardPanel {
|
||||
sampleDataId: string;
|
||||
dashboardId: string;
|
||||
oldEmbeddableId: string;
|
||||
embeddableId: string;
|
||||
embeddableType: EmbeddableTypes;
|
||||
embeddableConfig: object;
|
||||
}
|
||||
export enum EmbeddableTypes {
|
||||
MAP_SAVED_OBJECT_TYPE = 'map',
|
||||
SEARCH_EMBEDDABLE_TYPE = 'search',
|
||||
VISUALIZE_EMBEDDABLE_TYPE = 'visualization',
|
||||
}
|
||||
export interface DataIndexSchema {
|
||||
id: string;
|
||||
|
||||
// path to newline delimented JSON file containing data relative to KIBANA_HOME
|
||||
dataPath: string;
|
||||
|
||||
// Object defining Elasticsearch field mappings (contents of index.mappings.type.properties)
|
||||
fields: object;
|
||||
|
||||
// times fields that will be updated relative to now when data is installed
|
||||
timeFields: string[];
|
||||
|
||||
// Reference to now in your test data set.
|
||||
// When data is installed, timestamps are converted to the present time.
|
||||
// The distance between a timestamp and currentTimeMarker is preserved but the date and time will change.
|
||||
// For example:
|
||||
// sample data set: timestamp: 2018-01-01T00:00:00Z, currentTimeMarker: 2018-01-01T12:00:00Z
|
||||
// installed data set: timestamp: 2018-04-18T20:33:14Z, currentTimeMarker: 2018-04-19T08:33:14Z
|
||||
currentTimeMarker: string;
|
||||
|
||||
// Set to true to move timestamp to current week, preserving day of week and time of day
|
||||
// Relative distance from timestamp to currentTimeMarker will not remain the same
|
||||
preserveDayOfWeekTimeOfDay: boolean;
|
||||
}
|
||||
|
||||
export interface AppLinkSchema {
|
||||
path: string;
|
||||
icon: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface SampleDatasetSchema {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
previewImagePath: string;
|
||||
darkPreviewImagePath: string;
|
||||
|
||||
// saved object id of main dashboard for sample data set
|
||||
overviewDashboard: string;
|
||||
appLinks: AppLinkSchema[];
|
||||
|
||||
// saved object id of default index-pattern for sample data set
|
||||
defaultIndex: string;
|
||||
|
||||
// Kibana saved objects (index patter, visualizations, dashboard, ...)
|
||||
// Should provide a nice demo of Kibana's functionality with the sample data set
|
||||
savedObjects: SavedObject[];
|
||||
dataIndices: DataIndexSchema[];
|
||||
status?: string | undefined;
|
||||
statusMsg?: unknown;
|
||||
}
|
||||
|
||||
export type SampleDatasetProvider = () => SampleDatasetSchema;
|
|
@ -19,29 +19,34 @@
|
|||
|
||||
const MILLISECONDS_IN_DAY = 86400000;
|
||||
|
||||
function iso8601ToDateIgnoringTime(iso8601) {
|
||||
function iso8601ToDateIgnoringTime(iso8601: string) {
|
||||
const split = iso8601.split('-');
|
||||
if (split.length < 3) {
|
||||
throw new Error('Unexpected timestamp format, expecting YYYY-MM-DDTHH:mm:ss');
|
||||
}
|
||||
const year = parseInt(split[0]);
|
||||
const month = parseInt(split[1]) - 1; // javascript months are zero-based indexed
|
||||
const date = parseInt(split[2]);
|
||||
const year = parseInt(split[0], 10);
|
||||
const month = parseInt(split[1], 10) - 1; // javascript months are zero-based indexed
|
||||
const date = parseInt(split[2], 10);
|
||||
return new Date(year, month, date);
|
||||
}
|
||||
|
||||
export function dateToIso8601IgnoringTime(date) {
|
||||
export function dateToIso8601IgnoringTime(date: Date) {
|
||||
// not using "Date.toISOString" because only using Date methods that deal with local time
|
||||
const year = date.getFullYear();
|
||||
const month = date.getMonth() + 1;
|
||||
const dateItem = new Date(date);
|
||||
const year = dateItem.getFullYear();
|
||||
const month = dateItem.getMonth() + 1;
|
||||
const monthString = month < 10 ? `0${month}` : `${month}`;
|
||||
const dateString = date.getDate() < 10 ? `0${date.getDate()}` : `${date.getDate()}`;
|
||||
const dateString = dateItem.getDate() < 10 ? `0${dateItem.getDate()}` : `${dateItem.getDate()}`;
|
||||
return `${year}-${monthString}-${dateString}`;
|
||||
}
|
||||
|
||||
// Translate source timestamp by targetReference timestamp,
|
||||
// perserving the distance between source and sourceReference
|
||||
export function translateTimeRelativeToDifference(source, sourceReference, targetReference) {
|
||||
export function translateTimeRelativeToDifference(
|
||||
source: string,
|
||||
sourceReference: any,
|
||||
targetReference: any
|
||||
) {
|
||||
const sourceDate = iso8601ToDateIgnoringTime(source);
|
||||
const sourceReferenceDate = iso8601ToDateIgnoringTime(sourceReference);
|
||||
const targetReferenceDate = iso8601ToDateIgnoringTime(targetReference);
|
||||
|
@ -54,7 +59,11 @@ export function translateTimeRelativeToDifference(source, sourceReference, targe
|
|||
|
||||
// Translate source timestamp by targetReference timestamp,
|
||||
// perserving the week distance between source and sourceReference and day of week of the source timestamp
|
||||
export function translateTimeRelativeToWeek(source, sourceReference, targetReference) {
|
||||
export function translateTimeRelativeToWeek(
|
||||
source: string,
|
||||
sourceReference: any,
|
||||
targetReference: any
|
||||
) {
|
||||
const sourceReferenceDate = iso8601ToDateIgnoringTime(sourceReference);
|
||||
const targetReferenceDate = iso8601ToDateIgnoringTime(targetReference);
|
||||
|
|
@ -16,7 +16,6 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export { createListRoute } from './list';
|
||||
export { createInstallRoute } from './install';
|
||||
export { createUninstallRoute } from './uninstall';
|
186
src/plugins/home/server/services/sample_data/routes/install.ts
Normal file
186
src/plugins/home/server/services/sample_data/routes/install.ts
Normal file
|
@ -0,0 +1,186 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { IRouter, Logger, RequestHandlerContext } from 'src/core/server';
|
||||
import { SampleDatasetSchema } from '../lib/sample_dataset_registry_types';
|
||||
import { createIndexName } from '../lib/create_index_name';
|
||||
import {
|
||||
dateToIso8601IgnoringTime,
|
||||
translateTimeRelativeToDifference,
|
||||
translateTimeRelativeToWeek,
|
||||
} from '../lib/translate_timestamp';
|
||||
import { loadData } from '../lib/load_data';
|
||||
import { SampleDataUsageTracker } from '../usage/usage';
|
||||
|
||||
const insertDataIntoIndex = (
|
||||
dataIndexConfig: any,
|
||||
index: string,
|
||||
nowReference: string,
|
||||
context: RequestHandlerContext,
|
||||
logger: Logger
|
||||
) => {
|
||||
function updateTimestamps(doc: any) {
|
||||
dataIndexConfig.timeFields
|
||||
.filter((timeFieldName: string) => doc[timeFieldName])
|
||||
.forEach((timeFieldName: string) => {
|
||||
doc[timeFieldName] = dataIndexConfig.preserveDayOfWeekTimeOfDay
|
||||
? translateTimeRelativeToWeek(
|
||||
doc[timeFieldName],
|
||||
dataIndexConfig.currentTimeMarker,
|
||||
nowReference
|
||||
)
|
||||
: translateTimeRelativeToDifference(
|
||||
doc[timeFieldName],
|
||||
dataIndexConfig.currentTimeMarker,
|
||||
nowReference
|
||||
);
|
||||
});
|
||||
return doc;
|
||||
}
|
||||
|
||||
const bulkInsert = async (docs: any) => {
|
||||
const insertCmd = { index: { _index: index } };
|
||||
const bulk: any[] = [];
|
||||
docs.forEach((doc: any) => {
|
||||
bulk.push(insertCmd);
|
||||
bulk.push(updateTimestamps(doc));
|
||||
});
|
||||
const resp = await context.core.elasticsearch.adminClient.callAsCurrentUser('bulk', {
|
||||
body: bulk,
|
||||
});
|
||||
if (resp.errors) {
|
||||
const errMsg = `sample_data install errors while bulk inserting. Elasticsearch response: ${JSON.stringify(
|
||||
resp,
|
||||
null,
|
||||
''
|
||||
)}`;
|
||||
logger.warn(errMsg);
|
||||
return Promise.reject(
|
||||
new Error(`Unable to load sample data into index "${index}", see kibana logs for details`)
|
||||
);
|
||||
}
|
||||
};
|
||||
return loadData(dataIndexConfig.dataPath, bulkInsert); // this returns a Promise
|
||||
};
|
||||
|
||||
export function createInstallRoute(
|
||||
router: IRouter,
|
||||
sampleDatasets: SampleDatasetSchema[],
|
||||
logger: Logger,
|
||||
usageTracker: SampleDataUsageTracker
|
||||
): void {
|
||||
router.post(
|
||||
{
|
||||
path: '/api/sample_data/{id}',
|
||||
validate: {
|
||||
params: schema.object({ id: schema.string() }),
|
||||
// TODO validate now as date
|
||||
query: schema.object({ now: schema.maybe(schema.string()) }),
|
||||
},
|
||||
},
|
||||
async (context, req, res) => {
|
||||
const { params, query } = req;
|
||||
const sampleDataset = sampleDatasets.find(({ id }) => id === params.id);
|
||||
if (!sampleDataset) {
|
||||
return res.notFound();
|
||||
}
|
||||
// @ts-ignore Custom query validation used
|
||||
const now = query.now ? new Date(query.now) : new Date();
|
||||
const nowReference = dateToIso8601IgnoringTime(now);
|
||||
const counts = {};
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
|
||||
// clean up any old installation of dataset
|
||||
try {
|
||||
await context.core.elasticsearch.dataClient.callAsCurrentUser('indices.delete', {
|
||||
index,
|
||||
});
|
||||
} catch (err) {
|
||||
// ignore delete errors
|
||||
}
|
||||
|
||||
try {
|
||||
const createIndexParams = {
|
||||
index,
|
||||
body: {
|
||||
settings: { index: { number_of_shards: 1, auto_expand_replicas: '0-1' } },
|
||||
mappings: { properties: dataIndexConfig.fields },
|
||||
},
|
||||
};
|
||||
await context.core.elasticsearch.dataClient.callAsCurrentUser(
|
||||
'indices.create',
|
||||
createIndexParams
|
||||
);
|
||||
} catch (err) {
|
||||
const errMsg = `Unable to create sample data index "${index}", error: ${err.message}`;
|
||||
logger.warn(errMsg);
|
||||
return res.customError({ body: errMsg, statusCode: err.status });
|
||||
}
|
||||
|
||||
try {
|
||||
const count = await insertDataIntoIndex(
|
||||
dataIndexConfig,
|
||||
index,
|
||||
nowReference,
|
||||
context,
|
||||
logger
|
||||
);
|
||||
(counts as any)[index] = count;
|
||||
} catch (err) {
|
||||
const errMsg = `sample_data install errors while loading data. Error: ${err}`;
|
||||
logger.warn(errMsg);
|
||||
return res.internalError({ body: errMsg });
|
||||
}
|
||||
}
|
||||
|
||||
let createResults;
|
||||
try {
|
||||
createResults = await context.core.savedObjects.client.bulkCreate(
|
||||
sampleDataset.savedObjects,
|
||||
{ overwrite: true }
|
||||
);
|
||||
} catch (err) {
|
||||
const errMsg = `bulkCreate failed, error: ${err.message}`;
|
||||
logger.warn(errMsg);
|
||||
return res.internalError({ body: errMsg });
|
||||
}
|
||||
const errors = createResults.saved_objects.filter(savedObjectCreateResult => {
|
||||
return Boolean(savedObjectCreateResult.error);
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
const errMsg = `sample_data install errors while loading saved objects. Errors: ${errors.join(
|
||||
','
|
||||
)}`;
|
||||
logger.warn(errMsg);
|
||||
return res.customError({ body: errMsg, statusCode: 403 });
|
||||
}
|
||||
usageTracker.addInstall(params.id);
|
||||
|
||||
// FINALLY
|
||||
return res.ok({
|
||||
body: {
|
||||
elasticsearchIndicesCreated: counts,
|
||||
kibanaSavedObjectsLoaded: sampleDataset.savedObjects.length,
|
||||
},
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
92
src/plugins/home/server/services/sample_data/routes/list.ts
Normal file
92
src/plugins/home/server/services/sample_data/routes/list.ts
Normal file
|
@ -0,0 +1,92 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { isBoom } from 'boom';
|
||||
import { IRouter } from 'src/core/server';
|
||||
import { SampleDatasetSchema } from '../lib/sample_dataset_registry_types';
|
||||
import { createIndexName } from '../lib/create_index_name';
|
||||
|
||||
const NOT_INSTALLED = 'not_installed';
|
||||
const INSTALLED = 'installed';
|
||||
const UNKNOWN = 'unknown';
|
||||
|
||||
export const createListRoute = (router: IRouter, sampleDatasets: SampleDatasetSchema[]) => {
|
||||
router.get({ path: '/api/sample_data', validate: false }, async (context, req, res) => {
|
||||
const registeredSampleDatasets = sampleDatasets.map(sampleDataset => {
|
||||
return {
|
||||
id: sampleDataset.id,
|
||||
name: sampleDataset.name,
|
||||
description: sampleDataset.description,
|
||||
previewImagePath: sampleDataset.previewImagePath,
|
||||
darkPreviewImagePath: sampleDataset.darkPreviewImagePath,
|
||||
overviewDashboard: sampleDataset.overviewDashboard,
|
||||
appLinks: sampleDataset.appLinks,
|
||||
defaultIndex: sampleDataset.defaultIndex,
|
||||
dataIndices: sampleDataset.dataIndices.map(({ id }) => ({ id })),
|
||||
status: sampleDataset.status,
|
||||
statusMsg: sampleDataset.statusMsg,
|
||||
};
|
||||
});
|
||||
const isInstalledPromises = registeredSampleDatasets.map(async sampleDataset => {
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
try {
|
||||
const indexExists = await context.core.elasticsearch.dataClient.callAsCurrentUser(
|
||||
'indices.exists',
|
||||
{ index }
|
||||
);
|
||||
if (!indexExists) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
|
||||
const { count } = await context.core.elasticsearch.dataClient.callAsCurrentUser('count', {
|
||||
index,
|
||||
});
|
||||
if (count === 0) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
sampleDataset.status = UNKNOWN;
|
||||
sampleDataset.statusMsg = err.message;
|
||||
return;
|
||||
}
|
||||
}
|
||||
try {
|
||||
await context.core.savedObjects.client.get('dashboard', sampleDataset.overviewDashboard);
|
||||
} catch (err) {
|
||||
// savedObjectClient.get() throws an boom error when object is not found.
|
||||
if (isBoom(err) && err.output.statusCode === 404) {
|
||||
sampleDataset.status = NOT_INSTALLED;
|
||||
return;
|
||||
}
|
||||
|
||||
sampleDataset.status = UNKNOWN;
|
||||
sampleDataset.statusMsg = err.message;
|
||||
return;
|
||||
}
|
||||
|
||||
sampleDataset.status = INSTALLED;
|
||||
});
|
||||
|
||||
await Promise.all(isInstalledPromises);
|
||||
return res.ok({ body: registeredSampleDatasets });
|
||||
});
|
||||
};
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import _ from 'lodash';
|
||||
import { IRouter } from 'src/core/server';
|
||||
import { SampleDatasetSchema } from '../lib/sample_dataset_registry_types';
|
||||
import { createIndexName } from '../lib/create_index_name';
|
||||
import { SampleDataUsageTracker } from '../usage/usage';
|
||||
|
||||
export function createUninstallRoute(
|
||||
router: IRouter,
|
||||
sampleDatasets: SampleDatasetSchema[],
|
||||
usageTracker: SampleDataUsageTracker
|
||||
): void {
|
||||
router.delete(
|
||||
{
|
||||
path: '/api/sample_data/{id}',
|
||||
validate: {
|
||||
params: schema.object({ id: schema.string() }),
|
||||
},
|
||||
},
|
||||
async (
|
||||
{
|
||||
core: {
|
||||
elasticsearch: {
|
||||
dataClient: { callAsCurrentUser },
|
||||
},
|
||||
savedObjects: { client: savedObjectsClient },
|
||||
},
|
||||
},
|
||||
request,
|
||||
response
|
||||
) => {
|
||||
const sampleDataset = sampleDatasets.find(({ id }) => id === request.params.id);
|
||||
|
||||
if (!sampleDataset) {
|
||||
return response.notFound();
|
||||
}
|
||||
|
||||
for (let i = 0; i < sampleDataset.dataIndices.length; i++) {
|
||||
const dataIndexConfig = sampleDataset.dataIndices[i];
|
||||
const index = createIndexName(sampleDataset.id, dataIndexConfig.id);
|
||||
|
||||
try {
|
||||
await callAsCurrentUser('indices.delete', { index });
|
||||
} catch (err) {
|
||||
return response.customError({
|
||||
statusCode: err.status,
|
||||
body: {
|
||||
message: `Unable to delete sample data index "${index}", error: ${err.message}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const deletePromises = sampleDataset.savedObjects.map(({ type, id }) =>
|
||||
savedObjectsClient.delete(type, id)
|
||||
);
|
||||
|
||||
try {
|
||||
await Promise.all(deletePromises);
|
||||
} catch (err) {
|
||||
// ignore 404s since users could have deleted some of the saved objects via the UI
|
||||
if (_.get(err, 'output.statusCode') !== 404) {
|
||||
return response.customError({
|
||||
statusCode: err.status,
|
||||
body: {
|
||||
message: `Unable to delete sample dataset saved objects, error: ${err.message}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// track the usage operation in a non-blocking way
|
||||
usageTracker.addUninstall(request.params.id);
|
||||
|
||||
return response.noContent();
|
||||
}
|
||||
);
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
SampleDataRegistrySetup,
|
||||
SampleDataRegistryStart,
|
||||
SampleDataRegistry,
|
||||
} from './sample_data_registry';
|
||||
|
||||
const createSetupMock = (): jest.Mocked<SampleDataRegistrySetup> => {
|
||||
const setup = {
|
||||
registerSampleDataset: jest.fn(),
|
||||
getSampleDatasets: jest.fn(),
|
||||
addSavedObjectsToSampleDataset: jest.fn(),
|
||||
addAppLinksToSampleDataset: jest.fn(),
|
||||
replacePanelInSampleDatasetDashboard: jest.fn(),
|
||||
};
|
||||
return setup;
|
||||
};
|
||||
|
||||
const createStartMock = (): jest.Mocked<SampleDataRegistryStart> => {
|
||||
const start = {};
|
||||
return start;
|
||||
};
|
||||
|
||||
const createMock = (): jest.Mocked<PublicMethodsOf<SampleDataRegistry>> => {
|
||||
const service = {
|
||||
setup: jest.fn(),
|
||||
start: jest.fn(),
|
||||
};
|
||||
service.setup.mockImplementation(createSetupMock);
|
||||
service.start.mockImplementation(createStartMock);
|
||||
return service;
|
||||
};
|
||||
|
||||
export const sampleDataRegistryMock = {
|
||||
createSetup: createSetupMock,
|
||||
createStart: createStartMock,
|
||||
create: createMock,
|
||||
};
|
|
@ -0,0 +1,182 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import Joi from 'joi';
|
||||
import { CoreSetup, PluginInitializerContext } from 'src/core/server';
|
||||
import { SavedObject } from 'src/core/public';
|
||||
import {
|
||||
SampleDatasetProvider,
|
||||
SampleDatasetSchema,
|
||||
AppLinkSchema,
|
||||
SampleDatasetDashboardPanel,
|
||||
} from './lib/sample_dataset_registry_types';
|
||||
import { sampleDataSchema } from './lib/sample_dataset_schema';
|
||||
|
||||
import { flightsSpecProvider, logsSpecProvider, ecommerceSpecProvider } from './data_sets';
|
||||
import { createListRoute, createInstallRoute } from './routes';
|
||||
import { UsageCollectionSetup } from '../../../../usage_collection/server';
|
||||
import { makeSampleDataUsageCollector, usage } from './usage';
|
||||
import { createUninstallRoute } from './routes/uninstall';
|
||||
|
||||
const flightsSampleDataset = flightsSpecProvider();
|
||||
const logsSampleDataset = logsSpecProvider();
|
||||
const ecommerceSampleDataset = ecommerceSpecProvider();
|
||||
|
||||
export class SampleDataRegistry {
|
||||
constructor(private readonly initContext: PluginInitializerContext) {}
|
||||
private readonly sampleDatasets: SampleDatasetSchema[] = [
|
||||
flightsSampleDataset,
|
||||
logsSampleDataset,
|
||||
ecommerceSampleDataset,
|
||||
];
|
||||
|
||||
public setup(core: CoreSetup, usageCollections: UsageCollectionSetup | undefined) {
|
||||
if (usageCollections) {
|
||||
makeSampleDataUsageCollector(usageCollections, this.initContext);
|
||||
}
|
||||
const usageTracker = usage(
|
||||
core.savedObjects,
|
||||
this.initContext.logger.get('sample_data', 'telemetry')
|
||||
);
|
||||
const router = core.http.createRouter();
|
||||
createListRoute(router, this.sampleDatasets);
|
||||
createInstallRoute(
|
||||
router,
|
||||
this.sampleDatasets,
|
||||
this.initContext.logger.get('sampleData'),
|
||||
usageTracker
|
||||
);
|
||||
createUninstallRoute(router, this.sampleDatasets, usageTracker);
|
||||
|
||||
return {
|
||||
registerSampleDataset: (specProvider: SampleDatasetProvider) => {
|
||||
const { error, value } = Joi.validate(specProvider(), sampleDataSchema);
|
||||
|
||||
if (error) {
|
||||
throw new Error(`Unable to register sample dataset spec because it's invalid. ${error}`);
|
||||
}
|
||||
const defaultIndexSavedObjectJson = value.savedObjects.find((savedObjectJson: any) => {
|
||||
return (
|
||||
savedObjectJson.type === 'index-pattern' && savedObjectJson.id === value.defaultIndex
|
||||
);
|
||||
});
|
||||
if (!defaultIndexSavedObjectJson) {
|
||||
throw new Error(
|
||||
`Unable to register sample dataset spec, defaultIndex: "${value.defaultIndex}" does not exist in savedObjects list.`
|
||||
);
|
||||
}
|
||||
|
||||
const dashboardSavedObjectJson = value.savedObjects.find((savedObjectJson: any) => {
|
||||
return (
|
||||
savedObjectJson.type === 'dashboard' && savedObjectJson.id === value.overviewDashboard
|
||||
);
|
||||
});
|
||||
if (!dashboardSavedObjectJson) {
|
||||
throw new Error(
|
||||
`Unable to register sample dataset spec, overviewDashboard: "${value.overviewDashboard}" does not exist in savedObject list.`
|
||||
);
|
||||
}
|
||||
this.sampleDatasets.push(value);
|
||||
},
|
||||
getSampleDatasets: () => this.sampleDatasets,
|
||||
|
||||
addSavedObjectsToSampleDataset: (id: string, savedObjects: SavedObject[]) => {
|
||||
const sampleDataset = this.sampleDatasets.find(dataset => {
|
||||
return dataset.id === id;
|
||||
});
|
||||
|
||||
if (!sampleDataset) {
|
||||
throw new Error(`Unable to find sample dataset with id: ${id}`);
|
||||
}
|
||||
|
||||
sampleDataset.savedObjects = sampleDataset.savedObjects.concat(savedObjects);
|
||||
},
|
||||
|
||||
addAppLinksToSampleDataset: (id: string, appLinks: AppLinkSchema[]) => {
|
||||
const sampleDataset = this.sampleDatasets.find(dataset => {
|
||||
return dataset.id === id;
|
||||
});
|
||||
|
||||
if (!sampleDataset) {
|
||||
throw new Error(`Unable to find sample dataset with id: ${id}`);
|
||||
}
|
||||
|
||||
sampleDataset.appLinks = sampleDataset.appLinks
|
||||
? sampleDataset.appLinks.concat(appLinks)
|
||||
: [];
|
||||
},
|
||||
|
||||
replacePanelInSampleDatasetDashboard: ({
|
||||
sampleDataId,
|
||||
dashboardId,
|
||||
oldEmbeddableId,
|
||||
embeddableId,
|
||||
embeddableType,
|
||||
embeddableConfig,
|
||||
}: SampleDatasetDashboardPanel) => {
|
||||
const sampleDataset = this.sampleDatasets.find(dataset => {
|
||||
return dataset.id === sampleDataId;
|
||||
});
|
||||
if (!sampleDataset) {
|
||||
throw new Error(`Unable to find sample dataset with id: ${sampleDataId}`);
|
||||
}
|
||||
|
||||
const dashboard = sampleDataset.savedObjects.find((savedObject: SavedObject) => {
|
||||
return savedObject.id === dashboardId && savedObject.type === 'dashboard';
|
||||
});
|
||||
if (!dashboard) {
|
||||
throw new Error(`Unable to find dashboard with id: ${dashboardId}`);
|
||||
}
|
||||
try {
|
||||
const reference = dashboard.references.find((referenceItem: any) => {
|
||||
return referenceItem.id === oldEmbeddableId;
|
||||
});
|
||||
if (!reference) {
|
||||
throw new Error(`Unable to find reference for embeddable: ${oldEmbeddableId}`);
|
||||
}
|
||||
reference.type = embeddableType;
|
||||
reference.id = embeddableId;
|
||||
|
||||
const panels = JSON.parse(dashboard.attributes.panelsJSON);
|
||||
const panel = panels.find((panelItem: any) => {
|
||||
return panelItem.panelRefName === reference.name;
|
||||
});
|
||||
if (!panel) {
|
||||
throw new Error(`Unable to find panel for reference: ${reference.name}`);
|
||||
}
|
||||
panel.embeddableConfig = embeddableConfig;
|
||||
dashboard.attributes.panelsJSON = JSON.stringify(panels);
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Unable to replace panel with embeddable ${oldEmbeddableId}, error: ${error}`
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
public start() {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
/** @public */
|
||||
export type SampleDataRegistrySetup = ReturnType<SampleDataRegistry['setup']>;
|
||||
|
||||
/** @public */
|
||||
export type SampleDataRegistryStart = ReturnType<SampleDataRegistry['start']>;
|
|
@ -17,17 +17,19 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { Server } from 'hapi';
|
||||
import { PluginInitializerContext } from 'kibana/server';
|
||||
import { first } from 'rxjs/operators';
|
||||
import { fetchProvider } from './collector_fetch';
|
||||
import { UsageCollectionSetup } from '../../../../plugins/usage_collection/server';
|
||||
import { UsageCollectionSetup } from '../../../../../usage_collection/server';
|
||||
|
||||
export function makeSampleDataUsageCollector(
|
||||
export async function makeSampleDataUsageCollector(
|
||||
usageCollection: UsageCollectionSetup,
|
||||
server: Server
|
||||
context: PluginInitializerContext
|
||||
) {
|
||||
let index: string;
|
||||
try {
|
||||
index = server.config().get('kibana.index');
|
||||
const config = await context.config.legacy.globalConfig$.pipe(first()).toPromise();
|
||||
index = config.kibana.index;
|
||||
} catch (err) {
|
||||
return; // kibana plugin is not enabled (test environment)
|
||||
}
|
|
@ -17,40 +17,39 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import * as Hapi from 'hapi';
|
||||
import { Logger, SavedObjectsServiceSetup } from 'kibana/server';
|
||||
|
||||
const SAVED_OBJECT_ID = 'sample-data-telemetry';
|
||||
|
||||
export function usage(request: Hapi.Request) {
|
||||
const { server } = request;
|
||||
export interface SampleDataUsageTracker {
|
||||
addInstall(dataSet: string): void;
|
||||
addUninstall(dataSet: string): void;
|
||||
}
|
||||
|
||||
export function usage(
|
||||
savedObjects: SavedObjectsServiceSetup,
|
||||
logger: Logger
|
||||
): SampleDataUsageTracker {
|
||||
const handleIncrementError = (err: Error) => {
|
||||
if (err != null) {
|
||||
server.log(['debug', 'sample_data', 'telemetry'], err.stack);
|
||||
if (err && err.stack) {
|
||||
logger.debug(err.stack);
|
||||
}
|
||||
server.log(
|
||||
['warning', 'sample_data', 'telemetry'],
|
||||
`saved objects repository incrementCounter encountered an error: ${err}`
|
||||
);
|
||||
logger.warn(`saved objects repository incrementCounter encountered an error: ${err}`);
|
||||
};
|
||||
|
||||
const {
|
||||
savedObjects: { getSavedObjectsRepository },
|
||||
} = server;
|
||||
const { callWithInternalUser } = server.plugins.elasticsearch.getCluster('admin');
|
||||
const internalRepository = getSavedObjectsRepository(callWithInternalUser);
|
||||
const internalRepository = savedObjects.createInternalRepository();
|
||||
|
||||
return {
|
||||
addInstall: async (dataSet: string) => {
|
||||
try {
|
||||
internalRepository.incrementCounter(SAVED_OBJECT_ID, dataSet, `installCount`);
|
||||
await internalRepository.incrementCounter(SAVED_OBJECT_ID, dataSet, `installCount`);
|
||||
} catch (err) {
|
||||
handleIncrementError(err);
|
||||
}
|
||||
},
|
||||
addUninstall: async (dataSet: string) => {
|
||||
try {
|
||||
internalRepository.incrementCounter(SAVED_OBJECT_ID, dataSet, `unInstallCount`);
|
||||
await internalRepository.incrementCounter(SAVED_OBJECT_ID, dataSet, `unInstallCount`);
|
||||
} catch (err) {
|
||||
handleIncrementError(err);
|
||||
}
|
|
@ -26,7 +26,7 @@ export default function({ getService }: FtrProviderContext) {
|
|||
describe('prototype pollution smoke test', () => {
|
||||
it('prevents payloads with the "constructor.prototype" pollution vector from being accepted', async () => {
|
||||
await supertest
|
||||
.post('/api/sample_data/some_data_id')
|
||||
.post('/api/saved_objects/_log_legacy_import')
|
||||
.send([
|
||||
{
|
||||
constructor: {
|
||||
|
@ -44,7 +44,7 @@ export default function({ getService }: FtrProviderContext) {
|
|||
|
||||
it('prevents payloads with the "__proto__" pollution vector from being accepted', async () => {
|
||||
await supertest
|
||||
.post('/api/sample_data/some_data_id')
|
||||
.post('/api/saved_objects/_log_legacy_import')
|
||||
.send(JSON.parse(`{"foo": { "__proto__": {} } }`))
|
||||
.expect(400, {
|
||||
statusCode: 400,
|
||||
|
|
|
@ -96,7 +96,7 @@ export default function({ getService }) {
|
|||
await supertest
|
||||
.delete(`/api/sample_data/flights`)
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
.expect(204);
|
||||
});
|
||||
|
||||
it('should remove elasticsearch index containing sample data', async () => {
|
||||
|
|
|
@ -63,8 +63,8 @@ export class Plugin {
|
|||
|
||||
registerCanvasUsageCollector(plugins.usageCollection, core);
|
||||
loadSampleData(
|
||||
plugins.sampleData.addSavedObjectsToSampleDataset,
|
||||
plugins.sampleData.addAppLinksToSampleDataset
|
||||
plugins.home.sampleData.addSavedObjectsToSampleDataset,
|
||||
plugins.home.sampleData.addAppLinksToSampleDataset
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,9 +7,12 @@
|
|||
import { CANVAS as label } from '../../i18n';
|
||||
// @ts-ignore Untyped local
|
||||
import { ecommerceSavedObjects, flightsSavedObjects, webLogsSavedObjects } from './index';
|
||||
import { SampleDataRegistrySetup } from '../../../../../../src/plugins/home/server';
|
||||
|
||||
// @ts-ignore: Untyped in Kibana
|
||||
export function loadSampleData(addSavedObjectsToSampleDataset, addAppLinksToSampleDataset) {
|
||||
export function loadSampleData(
|
||||
addSavedObjectsToSampleDataset: SampleDataRegistrySetup['addSavedObjectsToSampleDataset'],
|
||||
addAppLinksToSampleDataset: SampleDataRegistrySetup['addAppLinksToSampleDataset']
|
||||
) {
|
||||
const now = new Date();
|
||||
const nowTimestamp = now.toISOString();
|
||||
|
||||
|
@ -27,23 +30,29 @@ export function loadSampleData(addSavedObjectsToSampleDataset, addAppLinksToSamp
|
|||
}
|
||||
|
||||
addSavedObjectsToSampleDataset('ecommerce', updateCanvasWorkpadTimestamps(ecommerceSavedObjects));
|
||||
addAppLinksToSampleDataset('ecommerce', {
|
||||
path: '/app/canvas#/workpad/workpad-e08b9bdb-ec14-4339-94c4-063bddfd610e',
|
||||
icon: 'canvasApp',
|
||||
label,
|
||||
});
|
||||
addAppLinksToSampleDataset('ecommerce', [
|
||||
{
|
||||
path: '/app/canvas#/workpad/workpad-e08b9bdb-ec14-4339-94c4-063bddfd610e',
|
||||
icon: 'canvasApp',
|
||||
label,
|
||||
},
|
||||
]);
|
||||
|
||||
addSavedObjectsToSampleDataset('flights', updateCanvasWorkpadTimestamps(flightsSavedObjects));
|
||||
addAppLinksToSampleDataset('flights', {
|
||||
path: '/app/canvas#/workpad/workpad-a474e74b-aedc-47c3-894a-db77e62c41e0',
|
||||
icon: 'canvasApp',
|
||||
label,
|
||||
});
|
||||
addAppLinksToSampleDataset('flights', [
|
||||
{
|
||||
path: '/app/canvas#/workpad/workpad-a474e74b-aedc-47c3-894a-db77e62c41e0',
|
||||
icon: 'canvasApp',
|
||||
label,
|
||||
},
|
||||
]);
|
||||
|
||||
addSavedObjectsToSampleDataset('logs', updateCanvasWorkpadTimestamps(webLogsSavedObjects));
|
||||
addAppLinksToSampleDataset('logs', {
|
||||
path: '/app/canvas#/workpad/workpad-ad72a4e9-b422-480c-be6d-a64a0b79541d',
|
||||
icon: 'canvasApp',
|
||||
label,
|
||||
});
|
||||
addAppLinksToSampleDataset('logs', [
|
||||
{
|
||||
path: '/app/canvas#/workpad/workpad-ad72a4e9-b422-480c-be6d-a64a0b79541d',
|
||||
icon: 'canvasApp',
|
||||
label,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import { ElasticsearchPlugin } from 'src/legacy/core_plugins/elasticsearch';
|
||||
import { Legacy } from 'kibana';
|
||||
|
||||
import { CoreSetup as ExistingCoreSetup } from 'src/core/server';
|
||||
import { HomeServerPluginSetup } from 'src/plugins/home/server';
|
||||
import { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
|
||||
import { PluginSetupContract } from '../../../../plugins/features/server';
|
||||
|
||||
|
@ -23,6 +23,7 @@ export interface CoreSetup {
|
|||
|
||||
export interface PluginsSetup {
|
||||
features: PluginSetupContract;
|
||||
home: HomeServerPluginSetup;
|
||||
interpreter: {
|
||||
register: (specs: any) => any;
|
||||
};
|
||||
|
@ -39,9 +40,7 @@ export interface PluginsSetup {
|
|||
export async function createSetupShim(
|
||||
server: Legacy.Server
|
||||
): Promise<{ coreSetup: CoreSetup; pluginsSetup: PluginsSetup }> {
|
||||
// @ts-ignore: New Platform object not typed
|
||||
const setup: ExistingCoreSetup = server.newPlatform.setup.core;
|
||||
|
||||
const setup = server.newPlatform.setup.core;
|
||||
return {
|
||||
coreSetup: {
|
||||
...setup,
|
||||
|
@ -58,17 +57,12 @@ export async function createSetupShim(
|
|||
pluginsSetup: {
|
||||
// @ts-ignore: New Platform not typed
|
||||
features: server.newPlatform.setup.plugins.features,
|
||||
home: server.newPlatform.setup.plugins.home,
|
||||
// @ts-ignore Interpreter plugin not typed on legacy server
|
||||
interpreter: server.plugins.interpreter,
|
||||
kibana: {
|
||||
injectedUiAppVars: await server.getInjectedUiAppVars('kibana'),
|
||||
},
|
||||
sampleData: {
|
||||
// @ts-ignore: Missing from Legacy Server Type
|
||||
addSavedObjectsToSampleDataset: server.addSavedObjectsToSampleDataset,
|
||||
// @ts-ignore: Missing from Legacy Server Type
|
||||
addAppLinksToSampleDataset: server.addAppLinksToSampleDataset,
|
||||
},
|
||||
usageCollection: server.newPlatform.setup.plugins.usageCollection,
|
||||
},
|
||||
};
|
||||
|
|
|
@ -19,10 +19,7 @@ import { SpacesPluginSetup } from '../../../plugins/spaces/server';
|
|||
import { VisTypeTimeseriesSetup } from '../../../../src/plugins/vis_type_timeseries/server';
|
||||
import { APMPluginContract } from '../../../plugins/apm/server';
|
||||
|
||||
const APP_ID = 'infra';
|
||||
const logsSampleDataLinkLabel = i18n.translate('xpack.infra.sampleDataLinkLabel', {
|
||||
defaultMessage: 'Logs',
|
||||
});
|
||||
export const APP_ID = 'infra';
|
||||
|
||||
export function infra(kibana: any) {
|
||||
return new kibana.Plugin({
|
||||
|
@ -89,6 +86,7 @@ export function infra(kibana: any) {
|
|||
} as unknown) as PluginInitializerContext;
|
||||
// NP_TODO: Use real types from the other plugins as they are migrated
|
||||
const pluginDeps: InfraServerPluginDeps = {
|
||||
home: legacyServer.newPlatform.setup.plugins.home,
|
||||
usageCollection: plugins.usageCollection as UsageCollectionSetup,
|
||||
indexPatterns: {
|
||||
indexPatternsServiceFactory: legacyServer.indexPatternsServiceFactory,
|
||||
|
@ -111,15 +109,6 @@ export function infra(kibana: any) {
|
|||
'defineInternalSourceConfiguration',
|
||||
libs.sources.defineInternalSourceConfiguration.bind(libs.sources)
|
||||
);
|
||||
|
||||
// NP_TODO: How do we move this to new platform?
|
||||
legacyServer.addAppLinksToSampleDataset('logs', [
|
||||
{
|
||||
path: `/app/${APP_ID}#/logs`,
|
||||
label: logsSampleDataLinkLabel,
|
||||
icon: 'logsApp',
|
||||
},
|
||||
]);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
@ -12,9 +12,11 @@ import { PluginSetupContract as FeaturesPluginSetup } from '../../../../../../..
|
|||
import { SpacesPluginSetup } from '../../../../../../../plugins/spaces/server';
|
||||
import { VisTypeTimeseriesSetup } from '../../../../../../../../src/plugins/vis_type_timeseries/server';
|
||||
import { APMPluginContract } from '../../../../../../../plugins/apm/server';
|
||||
import { HomeServerPluginSetup } from '../../../../../../../../src/plugins/home/server';
|
||||
|
||||
// NP_TODO: Compose real types from plugins we depend on, no "any"
|
||||
export interface InfraServerPluginDeps {
|
||||
home: HomeServerPluginSetup;
|
||||
spaces: SpacesPluginSetup;
|
||||
usageCollection: UsageCollectionSetup;
|
||||
metrics: VisTypeTimeseriesSetup;
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { CoreSetup, PluginInitializerContext } from 'src/core/server';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { Server } from 'hapi';
|
||||
import { InfraConfig } from '../../../../plugins/infra/server';
|
||||
import { initInfraServer } from './infra_server';
|
||||
|
@ -23,12 +24,17 @@ import { InfraSources } from './lib/sources';
|
|||
import { InfraServerPluginDeps } from './lib/adapters/framework';
|
||||
import { METRICS_FEATURE, LOGS_FEATURE } from './features';
|
||||
import { UsageCollector } from './usage/usage_collector';
|
||||
import { APP_ID } from '../index';
|
||||
import { InfraStaticSourceConfiguration } from './lib/sources/types';
|
||||
|
||||
export interface KbnServer extends Server {
|
||||
usage: any;
|
||||
}
|
||||
|
||||
const logsSampleDataLinkLabel = i18n.translate('xpack.infra.sampleDataLinkLabel', {
|
||||
defaultMessage: 'Logs',
|
||||
});
|
||||
|
||||
export interface InfraPluginSetup {
|
||||
defineInternalSourceConfiguration: (
|
||||
sourceId: string,
|
||||
|
@ -107,6 +113,14 @@ export class InfraServerPlugin {
|
|||
plugins.features.registerFeature(METRICS_FEATURE);
|
||||
plugins.features.registerFeature(LOGS_FEATURE);
|
||||
|
||||
plugins.home.sampleData.addAppLinksToSampleDataset('logs', [
|
||||
{
|
||||
path: `/app/${APP_ID}#/logs`,
|
||||
label: logsSampleDataLinkLabel,
|
||||
icon: 'logsApp',
|
||||
},
|
||||
]);
|
||||
|
||||
initInfraServer(this.libs);
|
||||
|
||||
// Telemetry
|
||||
|
|
|
@ -102,6 +102,7 @@ export function maps(kibana) {
|
|||
const pluginsSetup = {
|
||||
featuresPlugin: newPlatformPlugins.features,
|
||||
licensing: newPlatformPlugins.licensing,
|
||||
home: newPlatformPlugins.home,
|
||||
};
|
||||
|
||||
// legacy dependencies
|
||||
|
@ -117,9 +118,6 @@ export function maps(kibana) {
|
|||
savedObjects: {
|
||||
getSavedObjectsRepository: server.savedObjects.getSavedObjectsRepository,
|
||||
},
|
||||
addSavedObjectsToSampleDataset: server.addSavedObjectsToSampleDataset,
|
||||
addAppLinksToSampleDataset: server.addAppLinksToSampleDataset,
|
||||
replacePanelInSampleDatasetDashboard: server.replacePanelInSampleDatasetDashboard,
|
||||
injectUiAppVars: server.injectUiAppVars,
|
||||
getInjectedUiAppVars: server.getInjectedUiAppVars,
|
||||
};
|
||||
|
|
|
@ -13,7 +13,7 @@ import { initRoutes } from './routes';
|
|||
|
||||
export class MapPlugin {
|
||||
setup(core, plugins, __LEGACY) {
|
||||
const { featuresPlugin, licensing } = plugins;
|
||||
const { featuresPlugin, home, licensing } = plugins;
|
||||
let routesInitialized = false;
|
||||
|
||||
featuresPlugin.registerFeature({
|
||||
|
@ -54,66 +54,68 @@ export class MapPlugin {
|
|||
const sampleDataLinkLabel = i18n.translate('xpack.maps.sampleDataLinkLabel', {
|
||||
defaultMessage: 'Map',
|
||||
});
|
||||
__LEGACY.addSavedObjectsToSampleDataset('ecommerce', getEcommerceSavedObjects());
|
||||
if (home) {
|
||||
home.sampleData.addSavedObjectsToSampleDataset('ecommerce', getEcommerceSavedObjects());
|
||||
|
||||
__LEGACY.addAppLinksToSampleDataset('ecommerce', [
|
||||
{
|
||||
path: createMapPath('2c9c1f60-1909-11e9-919b-ffe5949a18d2'),
|
||||
label: sampleDataLinkLabel,
|
||||
icon: APP_ICON,
|
||||
},
|
||||
]);
|
||||
home.sampleData.addAppLinksToSampleDataset('ecommerce', [
|
||||
{
|
||||
path: createMapPath('2c9c1f60-1909-11e9-919b-ffe5949a18d2'),
|
||||
label: sampleDataLinkLabel,
|
||||
icon: APP_ICON,
|
||||
},
|
||||
]);
|
||||
|
||||
__LEGACY.replacePanelInSampleDatasetDashboard({
|
||||
sampleDataId: 'ecommerce',
|
||||
dashboardId: '722b74f0-b882-11e8-a6d9-e546fe2bba5f',
|
||||
oldEmbeddableId: '9c6f83f0-bb4d-11e8-9c84-77068524bcab',
|
||||
embeddableId: '2c9c1f60-1909-11e9-919b-ffe5949a18d2',
|
||||
embeddableType: 'map',
|
||||
embeddableConfig: {
|
||||
isLayerTOCOpen: false,
|
||||
},
|
||||
});
|
||||
home.sampleData.replacePanelInSampleDatasetDashboard({
|
||||
sampleDataId: 'ecommerce',
|
||||
dashboardId: '722b74f0-b882-11e8-a6d9-e546fe2bba5f',
|
||||
oldEmbeddableId: '9c6f83f0-bb4d-11e8-9c84-77068524bcab',
|
||||
embeddableId: '2c9c1f60-1909-11e9-919b-ffe5949a18d2',
|
||||
embeddableType: 'map',
|
||||
embeddableConfig: {
|
||||
isLayerTOCOpen: false,
|
||||
},
|
||||
});
|
||||
|
||||
__LEGACY.addSavedObjectsToSampleDataset('flights', getFlightsSavedObjects());
|
||||
home.sampleData.addSavedObjectsToSampleDataset('flights', getFlightsSavedObjects());
|
||||
|
||||
__LEGACY.addAppLinksToSampleDataset('flights', [
|
||||
{
|
||||
path: createMapPath('5dd88580-1906-11e9-919b-ffe5949a18d2'),
|
||||
label: sampleDataLinkLabel,
|
||||
icon: APP_ICON,
|
||||
},
|
||||
]);
|
||||
home.sampleData.addAppLinksToSampleDataset('flights', [
|
||||
{
|
||||
path: createMapPath('5dd88580-1906-11e9-919b-ffe5949a18d2'),
|
||||
label: sampleDataLinkLabel,
|
||||
icon: APP_ICON,
|
||||
},
|
||||
]);
|
||||
|
||||
__LEGACY.replacePanelInSampleDatasetDashboard({
|
||||
sampleDataId: 'flights',
|
||||
dashboardId: '7adfa750-4c81-11e8-b3d7-01146121b73d',
|
||||
oldEmbeddableId: '334084f0-52fd-11e8-a160-89cc2ad9e8e2',
|
||||
embeddableId: '5dd88580-1906-11e9-919b-ffe5949a18d2',
|
||||
embeddableType: MAP_SAVED_OBJECT_TYPE,
|
||||
embeddableConfig: {
|
||||
isLayerTOCOpen: true,
|
||||
},
|
||||
});
|
||||
home.sampleData.replacePanelInSampleDatasetDashboard({
|
||||
sampleDataId: 'flights',
|
||||
dashboardId: '7adfa750-4c81-11e8-b3d7-01146121b73d',
|
||||
oldEmbeddableId: '334084f0-52fd-11e8-a160-89cc2ad9e8e2',
|
||||
embeddableId: '5dd88580-1906-11e9-919b-ffe5949a18d2',
|
||||
embeddableType: MAP_SAVED_OBJECT_TYPE,
|
||||
embeddableConfig: {
|
||||
isLayerTOCOpen: true,
|
||||
},
|
||||
});
|
||||
|
||||
__LEGACY.addSavedObjectsToSampleDataset('logs', getWebLogsSavedObjects());
|
||||
__LEGACY.addAppLinksToSampleDataset('logs', [
|
||||
{
|
||||
path: createMapPath('de71f4f0-1902-11e9-919b-ffe5949a18d2'),
|
||||
label: sampleDataLinkLabel,
|
||||
icon: APP_ICON,
|
||||
},
|
||||
]);
|
||||
__LEGACY.replacePanelInSampleDatasetDashboard({
|
||||
sampleDataId: 'logs',
|
||||
dashboardId: 'edf84fe0-e1a0-11e7-b6d5-4dc382ef7f5b',
|
||||
oldEmbeddableId: '06cf9c40-9ee8-11e7-8711-e7a007dcef99',
|
||||
embeddableId: 'de71f4f0-1902-11e9-919b-ffe5949a18d2',
|
||||
embeddableType: MAP_SAVED_OBJECT_TYPE,
|
||||
embeddableConfig: {
|
||||
isLayerTOCOpen: false,
|
||||
},
|
||||
});
|
||||
home.sampleData.addSavedObjectsToSampleDataset('logs', getWebLogsSavedObjects());
|
||||
home.sampleData.addAppLinksToSampleDataset('logs', [
|
||||
{
|
||||
path: createMapPath('de71f4f0-1902-11e9-919b-ffe5949a18d2'),
|
||||
label: sampleDataLinkLabel,
|
||||
icon: APP_ICON,
|
||||
},
|
||||
]);
|
||||
home.sampleData.replacePanelInSampleDatasetDashboard({
|
||||
sampleDataId: 'logs',
|
||||
dashboardId: 'edf84fe0-e1a0-11e7-b6d5-4dc382ef7f5b',
|
||||
oldEmbeddableId: '06cf9c40-9ee8-11e7-8711-e7a007dcef99',
|
||||
embeddableId: 'de71f4f0-1902-11e9-919b-ffe5949a18d2',
|
||||
embeddableType: MAP_SAVED_OBJECT_TYPE,
|
||||
embeddableConfig: {
|
||||
isLayerTOCOpen: false,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
__LEGACY.injectUiAppVars(APP_ID, async () => {
|
||||
return await __LEGACY.getInjectedUiAppVars('kibana');
|
||||
|
|
|
@ -78,17 +78,17 @@ export const ml = (kibana: any) => {
|
|||
};
|
||||
|
||||
const core: MlCoreSetup = {
|
||||
addAppLinksToSampleDataset: server.addAppLinksToSampleDataset,
|
||||
injectUiAppVars: server.injectUiAppVars,
|
||||
http: mlHttpService,
|
||||
savedObjects: server.savedObjects,
|
||||
};
|
||||
const { usageCollection, cloud } = kbnServer.newPlatform.setup.plugins;
|
||||
const { usageCollection, cloud, home } = kbnServer.newPlatform.setup.plugins;
|
||||
const plugins = {
|
||||
elasticsearch: server.plugins.elasticsearch,
|
||||
security: server.plugins.security,
|
||||
xpackMain: server.plugins.xpack_main,
|
||||
spaces: server.plugins.spaces,
|
||||
home,
|
||||
usageCollection: usageCollection as UsageCollectionSetup,
|
||||
cloud: cloud as CloudSetup,
|
||||
ml: this,
|
||||
|
|
|
@ -55,6 +55,7 @@ import { jobAuditMessagesRoutes } from '../routes/job_audit_messages';
|
|||
// @ts-ignore: could not find declaration file for module
|
||||
import { fileDataVisualizerRoutes } from '../routes/file_data_visualizer';
|
||||
import { initMlServerLog, LogInitialization } from '../client/log';
|
||||
import { HomeServerPluginSetup } from '../../../../../../src/plugins/home/server';
|
||||
|
||||
type CoreHttpSetup = CoreSetup['http'];
|
||||
export interface MlHttpServiceSetup extends CoreHttpSetup {
|
||||
|
@ -66,7 +67,6 @@ export interface MlXpackMainPlugin extends XPackMainPlugin {
|
|||
}
|
||||
|
||||
export interface MlCoreSetup {
|
||||
addAppLinksToSampleDataset: () => any;
|
||||
injectUiAppVars: (id: string, callback: () => {}) => any;
|
||||
http: MlHttpServiceSetup;
|
||||
savedObjects: SavedObjectsLegacyService;
|
||||
|
@ -82,6 +82,7 @@ export interface PluginsSetup {
|
|||
spaces: any;
|
||||
usageCollection?: UsageCollectionSetup;
|
||||
cloud?: CloudSetup;
|
||||
home?: HomeServerPluginSetup;
|
||||
// TODO: this is temporary for `mirrorPluginStatus`
|
||||
ml: any;
|
||||
}
|
||||
|
@ -112,7 +113,7 @@ export class Plugin {
|
|||
|
||||
public setup(core: MlCoreSetup, plugins: PluginsSetup) {
|
||||
const xpackMainPlugin: MlXpackMainPlugin = plugins.xpackMain;
|
||||
const { addAppLinksToSampleDataset, http, injectUiAppVars } = core;
|
||||
const { http, injectUiAppVars } = core;
|
||||
const pluginId = this.pluginId;
|
||||
|
||||
mirrorPluginStatus(xpackMainPlugin, plugins.ml);
|
||||
|
@ -124,10 +125,12 @@ export class Plugin {
|
|||
|
||||
// Add links to the Kibana sample data sets if ml is enabled
|
||||
// and there is a full license (trial or platinum).
|
||||
if (mlFeature.isEnabled() === true) {
|
||||
if (mlFeature.isEnabled() === true && plugins.home) {
|
||||
const licenseCheckResults = mlFeature.getLicenseCheckResults();
|
||||
if (licenseCheckResults.licenseType === LICENSE_TYPE.FULL) {
|
||||
addLinksToSampleDatasets({ addAppLinksToSampleDataset });
|
||||
addLinksToSampleDatasets({
|
||||
addAppLinksToSampleDataset: plugins.home.sampleData.addAppLinksToSampleDataset,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -2755,59 +2755,59 @@
|
|||
"regionMap.visParams.colorSchemaLabel": "カラー図表",
|
||||
"regionMap.visParams.layerSettingsTitle": "レイヤー設定",
|
||||
"regionMap.visParams.outlineWeightLabel": "境界の太さ",
|
||||
"server.sampleData.ecommerceSpec.averageSalesPerRegionTitle": "[e コマース] 地域ごとの平均売上",
|
||||
"server.sampleData.ecommerceSpec.averageSalesPriceTitle": "[e コマース] 平均販売価格",
|
||||
"server.sampleData.ecommerceSpec.averageSoldQuantityTitle": "[e コマース] 平均販売数",
|
||||
"server.sampleData.ecommerceSpec.controlsTitle": "[e コマース] コントロール",
|
||||
"server.sampleData.ecommerceSpec.markdownTitle": "[e コマース] マークダウン",
|
||||
"server.sampleData.ecommerceSpec.ordersTitle": "[e コマース] 注文",
|
||||
"server.sampleData.ecommerceSpec.promotionTrackingTitle": "[e コマース] プロモーショントラッキング",
|
||||
"server.sampleData.ecommerceSpec.revenueDashboardDescription": "サンプルの e コマースの注文と収益を分析します",
|
||||
"server.sampleData.ecommerceSpec.revenueDashboardTitle": "[e コマース] 収益ダッシュボード",
|
||||
"server.sampleData.ecommerceSpec.salesByCategoryTitle": "[e コマース] カテゴリーごとの売上",
|
||||
"server.sampleData.ecommerceSpec.salesByGenderTitle": "[e コマース] 性別ごとの売上",
|
||||
"server.sampleData.ecommerceSpec.soldProductsPerDayTitle": "[e コマース] 1 日の販売製品",
|
||||
"server.sampleData.ecommerceSpec.topSellingProductsTitle": "[e コマース] トップセラー製品",
|
||||
"server.sampleData.ecommerceSpec.totalRevenueTitle": "[e コマース] 合計収益",
|
||||
"server.sampleData.ecommerceSpecDescription": "e コマースの注文をトラッキングするサンプルデータ、ビジュアライゼーション、ダッシュボードです。",
|
||||
"server.sampleData.ecommerceSpecTitle": "サンプル e コマース注文",
|
||||
"server.sampleData.flightsSpec.airlineCarrierTitle": "[フライト] 航空会社",
|
||||
"server.sampleData.flightsSpec.airportConnectionsTitle": "[フライト] 空港乗り継ぎ (空港にカーソルを合わせてください)",
|
||||
"server.sampleData.flightsSpec.averageTicketPriceTitle": "[フライト] 平均運賃",
|
||||
"server.sampleData.flightsSpec.controlsTitle": "[フライト] コントロール",
|
||||
"server.sampleData.flightsSpec.delayBucketsTitle": "[フライト] 遅延バケット",
|
||||
"server.sampleData.flightsSpec.delaysAndCancellationsTitle": "[フライト] 遅延・欠航",
|
||||
"server.sampleData.flightsSpec.delayTypeTitle": "[フライト] 遅延タイプ",
|
||||
"server.sampleData.flightsSpec.destinationWeatherTitle": "[フライト] 目的地の天候",
|
||||
"server.sampleData.flightsSpec.flightCancellationsTitle": "[フライト] フライト欠航",
|
||||
"server.sampleData.flightsSpec.flightCountAndAverageTicketPriceTitle": "[フライト] カウントと平均運賃",
|
||||
"server.sampleData.flightsSpec.flightDelaysTitle": "[フライト] フライトの遅延",
|
||||
"server.sampleData.flightsSpec.flightLogTitle": "[フライト] 飛行記録",
|
||||
"server.sampleData.flightsSpec.globalFlightDashboardDescription": "ES-Air、Logstash Airways、Kibana Airlines、JetBeats のサンプル飛行データを分析します",
|
||||
"server.sampleData.flightsSpec.globalFlightDashboardTitle": "[フライト] グローバルフライトダッシュボード",
|
||||
"server.sampleData.flightsSpec.markdownInstructionsTitle": "[フライト] マークダウンの指示",
|
||||
"server.sampleData.flightsSpec.originCountryTicketPricesTitle": "[フライト] 出発国の運賃",
|
||||
"server.sampleData.flightsSpec.originCountryTitle": "[Flights] 出発国と到着国の比較",
|
||||
"server.sampleData.flightsSpec.totalFlightCancellationsTitle": "[フライト] フライト欠航合計",
|
||||
"server.sampleData.flightsSpec.totalFlightDelaysTitle": "[フライト] フライト遅延合計",
|
||||
"server.sampleData.flightsSpec.totalFlightsTitle": "[フライト] フライト合計",
|
||||
"server.sampleData.flightsSpecDescription": "飛行ルートを監視するサンプルデータ、ビジュアライゼーション、ダッシュボードです。",
|
||||
"server.sampleData.flightsSpecTitle": "サンプル飛行データ",
|
||||
"server.sampleData.logsSpec.fileTypeScatterPlotTitle": "[ログ] ファイルタイプ散布図",
|
||||
"server.sampleData.logsSpec.goalsTitle": "[ログ] 目標",
|
||||
"server.sampleData.logsSpec.heatmapTitle": "[ログ] ヒートマップ",
|
||||
"server.sampleData.logsSpec.hostVisitsBytesTableTitle": "[ログ] ホスト、訪問数、バイト表",
|
||||
"server.sampleData.logsSpec.inputControlsTitle": "[ログ] インプットコントロール",
|
||||
"server.sampleData.logsSpec.markdownInstructionsTitle": "[ログ] マークダウンの指示",
|
||||
"server.sampleData.logsSpec.responseCodesOverTimeTitle": "[ログ] 一定期間の応答コードと注釈",
|
||||
"server.sampleData.logsSpec.sourceAndDestinationSankeyChartTitle": "[ログ] ソースと行先のサンキーダイアグラム",
|
||||
"server.sampleData.logsSpec.uniqueVisitorsByCountryTitle": "[ログ] 国ごとのユニークビジター",
|
||||
"server.sampleData.logsSpec.uniqueVisitorsTitle": "[ログ] ユニークビジターと平均バイトの比較",
|
||||
"server.sampleData.logsSpec.visitorOSTitle": "[ログ] OS 別のビジター",
|
||||
"server.sampleData.logsSpec.webTrafficDescription": "Elastic Web サイトのサンプル Webトラフィックログデータを分析します",
|
||||
"server.sampleData.logsSpec.webTrafficTitle": "[ログ] Web トラフィック",
|
||||
"server.sampleData.logsSpecDescription": "Web ログを監視するサンプルデータ、ビジュアライゼーション、ダッシュボードです。",
|
||||
"server.sampleData.logsSpecTitle": "サンプル Web ログ",
|
||||
"home.sampleData.ecommerceSpec.averageSalesPerRegionTitle": "[e コマース] 地域ごとの平均売上",
|
||||
"home.sampleData.ecommerceSpec.averageSalesPriceTitle": "[e コマース] 平均販売価格",
|
||||
"home.sampleData.ecommerceSpec.averageSoldQuantityTitle": "[e コマース] 平均販売数",
|
||||
"home.sampleData.ecommerceSpec.controlsTitle": "[e コマース] コントロール",
|
||||
"home.sampleData.ecommerceSpec.markdownTitle": "[e コマース] マークダウン",
|
||||
"home.sampleData.ecommerceSpec.ordersTitle": "[e コマース] 注文",
|
||||
"home.sampleData.ecommerceSpec.promotionTrackingTitle": "[e コマース] プロモーショントラッキング",
|
||||
"home.sampleData.ecommerceSpec.revenueDashboardDescription": "サンプルの e コマースの注文と収益を分析します",
|
||||
"home.sampleData.ecommerceSpec.revenueDashboardTitle": "[e コマース] 収益ダッシュボード",
|
||||
"home.sampleData.ecommerceSpec.salesByCategoryTitle": "[e コマース] カテゴリーごとの売上",
|
||||
"home.sampleData.ecommerceSpec.salesByGenderTitle": "[e コマース] 性別ごとの売上",
|
||||
"home.sampleData.ecommerceSpec.soldProductsPerDayTitle": "[e コマース] 1 日の販売製品",
|
||||
"home.sampleData.ecommerceSpec.topSellingProductsTitle": "[e コマース] トップセラー製品",
|
||||
"home.sampleData.ecommerceSpec.totalRevenueTitle": "[e コマース] 合計収益",
|
||||
"home.sampleData.ecommerceSpecDescription": "e コマースの注文をトラッキングするサンプルデータ、ビジュアライゼーション、ダッシュボードです。",
|
||||
"home.sampleData.ecommerceSpecTitle": "サンプル e コマース注文",
|
||||
"home.sampleData.flightsSpec.airlineCarrierTitle": "[フライト] 航空会社",
|
||||
"home.sampleData.flightsSpec.airportConnectionsTitle": "[フライト] 空港乗り継ぎ (空港にカーソルを合わせてください)",
|
||||
"home.sampleData.flightsSpec.averageTicketPriceTitle": "[フライト] 平均運賃",
|
||||
"home.sampleData.flightsSpec.controlsTitle": "[フライト] コントロール",
|
||||
"home.sampleData.flightsSpec.delayBucketsTitle": "[フライト] 遅延バケット",
|
||||
"home.sampleData.flightsSpec.delaysAndCancellationsTitle": "[フライト] 遅延・欠航",
|
||||
"home.sampleData.flightsSpec.delayTypeTitle": "[フライト] 遅延タイプ",
|
||||
"home.sampleData.flightsSpec.destinationWeatherTitle": "[フライト] 目的地の天候",
|
||||
"home.sampleData.flightsSpec.flightCancellationsTitle": "[フライト] フライト欠航",
|
||||
"home.sampleData.flightsSpec.flightCountAndAverageTicketPriceTitle": "[フライト] カウントと平均運賃",
|
||||
"home.sampleData.flightsSpec.flightDelaysTitle": "[フライト] フライトの遅延",
|
||||
"home.sampleData.flightsSpec.flightLogTitle": "[フライト] 飛行記録",
|
||||
"home.sampleData.flightsSpec.globalFlightDashboardDescription": "ES-Air、Logstash Airways、Kibana Airlines、JetBeats のサンプル飛行データを分析します",
|
||||
"home.sampleData.flightsSpec.globalFlightDashboardTitle": "[フライト] グローバルフライトダッシュボード",
|
||||
"home.sampleData.flightsSpec.markdownInstructionsTitle": "[フライト] マークダウンの指示",
|
||||
"home.sampleData.flightsSpec.originCountryTicketPricesTitle": "[フライト] 出発国の運賃",
|
||||
"home.sampleData.flightsSpec.originCountryTitle": "[Flights] 出発国と到着国の比較",
|
||||
"home.sampleData.flightsSpec.totalFlightCancellationsTitle": "[フライト] フライト欠航合計",
|
||||
"home.sampleData.flightsSpec.totalFlightDelaysTitle": "[フライト] フライト遅延合計",
|
||||
"home.sampleData.flightsSpec.totalFlightsTitle": "[フライト] フライト合計",
|
||||
"home.sampleData.flightsSpecDescription": "飛行ルートを監視するサンプルデータ、ビジュアライゼーション、ダッシュボードです。",
|
||||
"home.sampleData.flightsSpecTitle": "サンプル飛行データ",
|
||||
"home.sampleData.logsSpec.fileTypeScatterPlotTitle": "[ログ] ファイルタイプ散布図",
|
||||
"home.sampleData.logsSpec.goalsTitle": "[ログ] 目標",
|
||||
"home.sampleData.logsSpec.heatmapTitle": "[ログ] ヒートマップ",
|
||||
"home.sampleData.logsSpec.hostVisitsBytesTableTitle": "[ログ] ホスト、訪問数、バイト表",
|
||||
"home.sampleData.logsSpec.inputControlsTitle": "[ログ] インプットコントロール",
|
||||
"home.sampleData.logsSpec.markdownInstructionsTitle": "[ログ] マークダウンの指示",
|
||||
"home.sampleData.logsSpec.responseCodesOverTimeTitle": "[ログ] 一定期間の応答コードと注釈",
|
||||
"home.sampleData.logsSpec.sourceAndDestinationSankeyChartTitle": "[ログ] ソースと行先のサンキーダイアグラム",
|
||||
"home.sampleData.logsSpec.uniqueVisitorsByCountryTitle": "[ログ] 国ごとのユニークビジター",
|
||||
"home.sampleData.logsSpec.uniqueVisitorsTitle": "[ログ] ユニークビジターと平均バイトの比較",
|
||||
"home.sampleData.logsSpec.visitorOSTitle": "[ログ] OS 別のビジター",
|
||||
"home.sampleData.logsSpec.webTrafficDescription": "Elastic Web サイトのサンプル Webトラフィックログデータを分析します",
|
||||
"home.sampleData.logsSpec.webTrafficTitle": "[ログ] Web トラフィック",
|
||||
"home.sampleData.logsSpecDescription": "Web ログを監視するサンプルデータ、ビジュアライゼーション、ダッシュボードです。",
|
||||
"home.sampleData.logsSpecTitle": "サンプル Web ログ",
|
||||
"server.stats.notReadyMessage": "まだ統計が準備できていません。後程再試行してください",
|
||||
"server.status.disabledTitle": "無効",
|
||||
"server.status.greenTitle": "緑",
|
||||
|
|
|
@ -2756,59 +2756,59 @@
|
|||
"regionMap.visParams.colorSchemaLabel": "颜色模式",
|
||||
"regionMap.visParams.layerSettingsTitle": "图层设置",
|
||||
"regionMap.visParams.outlineWeightLabel": "边框粗细",
|
||||
"server.sampleData.ecommerceSpec.averageSalesPerRegionTitle": "[电子商务] 每地区平均销售额",
|
||||
"server.sampleData.ecommerceSpec.averageSalesPriceTitle": "[电子商务] 平均销售价格",
|
||||
"server.sampleData.ecommerceSpec.averageSoldQuantityTitle": "[电子商务] 平均销售数量",
|
||||
"server.sampleData.ecommerceSpec.controlsTitle": "[电子商务] 控件",
|
||||
"server.sampleData.ecommerceSpec.markdownTitle": "[电子商务] Markdown",
|
||||
"server.sampleData.ecommerceSpec.ordersTitle": "[电子商务] 订单",
|
||||
"server.sampleData.ecommerceSpec.promotionTrackingTitle": "[电子商务] 促销追踪",
|
||||
"server.sampleData.ecommerceSpec.revenueDashboardDescription": "分析模拟的电子商务订单和收入",
|
||||
"server.sampleData.ecommerceSpec.revenueDashboardTitle": "[电子商务] 收入仪表板",
|
||||
"server.sampleData.ecommerceSpec.salesByCategoryTitle": "[电子商务] 按类别划分的销售额",
|
||||
"server.sampleData.ecommerceSpec.salesByGenderTitle": "[电子商务] 按性别划分的销售额",
|
||||
"server.sampleData.ecommerceSpec.soldProductsPerDayTitle": "[电子商务] 每天已售产品",
|
||||
"server.sampleData.ecommerceSpec.topSellingProductsTitle": "[电子商务] 热卖产品",
|
||||
"server.sampleData.ecommerceSpec.totalRevenueTitle": "[电子商务] 总收入",
|
||||
"server.sampleData.ecommerceSpecDescription": "用于追踪电子商务订单的样例数据、可视化和仪表板。",
|
||||
"server.sampleData.ecommerceSpecTitle": "样例电子商务订单",
|
||||
"server.sampleData.flightsSpec.airlineCarrierTitle": "[航班] 航空公司",
|
||||
"server.sampleData.flightsSpec.airportConnectionsTitle": "[航班] 机场航线(将鼠标悬停在机场上)",
|
||||
"server.sampleData.flightsSpec.averageTicketPriceTitle": "[航班] 平均票价",
|
||||
"server.sampleData.flightsSpec.controlsTitle": "[航班] 控件",
|
||||
"server.sampleData.flightsSpec.delayBucketsTitle": "[航班] 延误存储桶",
|
||||
"server.sampleData.flightsSpec.delaysAndCancellationsTitle": "[航班] 延误与取消",
|
||||
"server.sampleData.flightsSpec.delayTypeTitle": "[航班] 延误类型",
|
||||
"server.sampleData.flightsSpec.destinationWeatherTitle": "[航班] 到达地天气",
|
||||
"server.sampleData.flightsSpec.flightCancellationsTitle": "[航班] 航班取消",
|
||||
"server.sampleData.flightsSpec.flightCountAndAverageTicketPriceTitle": "[航班] 航班计数和平均票价",
|
||||
"server.sampleData.flightsSpec.flightDelaysTitle": "[航班] 航班延误",
|
||||
"server.sampleData.flightsSpec.flightLogTitle": "[航班] 飞行日志",
|
||||
"server.sampleData.flightsSpec.globalFlightDashboardDescription": "分析 ES-Air、Logstash Airways、Kibana Airlines 和 JetBeats 的模拟航班数据",
|
||||
"server.sampleData.flightsSpec.globalFlightDashboardTitle": "[航班] 全球航班仪表板",
|
||||
"server.sampleData.flightsSpec.markdownInstructionsTitle": "[航班] Markdown 说明",
|
||||
"server.sampleData.flightsSpec.originCountryTicketPricesTitle": "[航班] 始发国/地区票价",
|
||||
"server.sampleData.flightsSpec.originCountryTitle": "[航班] 始发国/地区与到达国/地区",
|
||||
"server.sampleData.flightsSpec.totalFlightCancellationsTitle": "[航班] 航班取消总数",
|
||||
"server.sampleData.flightsSpec.totalFlightDelaysTitle": "[航班] 航班延误总数",
|
||||
"server.sampleData.flightsSpec.totalFlightsTitle": "[航班] 航班总数",
|
||||
"server.sampleData.flightsSpecDescription": "用于监测航班路线的样例数据、可视化和仪表板。",
|
||||
"server.sampleData.flightsSpecTitle": "样例航班数据",
|
||||
"server.sampleData.logsSpec.fileTypeScatterPlotTitle": "[日志] 文件类型散点图",
|
||||
"server.sampleData.logsSpec.goalsTitle": "[日志] 目标",
|
||||
"server.sampleData.logsSpec.heatmapTitle": "[日志] 热图",
|
||||
"server.sampleData.logsSpec.hostVisitsBytesTableTitle": "[日志] 主机、访问和字节表",
|
||||
"server.sampleData.logsSpec.inputControlsTitle": "[日志] 输入控件",
|
||||
"server.sampleData.logsSpec.markdownInstructionsTitle": "[日志] Markdown 说明",
|
||||
"server.sampleData.logsSpec.responseCodesOverTimeTitle": "[日志] 时移响应代码 + 注释",
|
||||
"server.sampleData.logsSpec.sourceAndDestinationSankeyChartTitle": "[日志] 始发地和到达地 Sankey 图",
|
||||
"server.sampleData.logsSpec.uniqueVisitorsByCountryTitle": "[日志] 按国家/地区划分的独立访客",
|
||||
"server.sampleData.logsSpec.uniqueVisitorsTitle": "[日志] 独立访客与平均字节数",
|
||||
"server.sampleData.logsSpec.visitorOSTitle": "[日志] 按 OS 划分的访客",
|
||||
"server.sampleData.logsSpec.webTrafficDescription": "分析 Elastic 网站的模拟网络流量日志数据",
|
||||
"server.sampleData.logsSpec.webTrafficTitle": "[日志] 网络流量",
|
||||
"server.sampleData.logsSpecDescription": "用于监测 Web 日志的样例数据、可视化和仪表板。",
|
||||
"server.sampleData.logsSpecTitle": "样例 Web 日志",
|
||||
"home.sampleData.ecommerceSpec.averageSalesPerRegionTitle": "[电子商务] 每地区平均销售额",
|
||||
"home.sampleData.ecommerceSpec.averageSalesPriceTitle": "[电子商务] 平均销售价格",
|
||||
"home.sampleData.ecommerceSpec.averageSoldQuantityTitle": "[电子商务] 平均销售数量",
|
||||
"home.sampleData.ecommerceSpec.controlsTitle": "[电子商务] 控件",
|
||||
"home.sampleData.ecommerceSpec.markdownTitle": "[电子商务] Markdown",
|
||||
"home.sampleData.ecommerceSpec.ordersTitle": "[电子商务] 订单",
|
||||
"home.sampleData.ecommerceSpec.promotionTrackingTitle": "[电子商务] 促销追踪",
|
||||
"home.sampleData.ecommerceSpec.revenueDashboardDescription": "分析模拟的电子商务订单和收入",
|
||||
"home.sampleData.ecommerceSpec.revenueDashboardTitle": "[电子商务] 收入仪表板",
|
||||
"home.sampleData.ecommerceSpec.salesByCategoryTitle": "[电子商务] 按类别划分的销售额",
|
||||
"home.sampleData.ecommerceSpec.salesByGenderTitle": "[电子商务] 按性别划分的销售额",
|
||||
"home.sampleData.ecommerceSpec.soldProductsPerDayTitle": "[电子商务] 每天已售产品",
|
||||
"home.sampleData.ecommerceSpec.topSellingProductsTitle": "[电子商务] 热卖产品",
|
||||
"home.sampleData.ecommerceSpec.totalRevenueTitle": "[电子商务] 总收入",
|
||||
"home.sampleData.ecommerceSpecDescription": "用于追踪电子商务订单的样例数据、可视化和仪表板。",
|
||||
"home.sampleData.ecommerceSpecTitle": "样例电子商务订单",
|
||||
"home.sampleData.flightsSpec.airlineCarrierTitle": "[航班] 航空公司",
|
||||
"home.sampleData.flightsSpec.airportConnectionsTitle": "[航班] 机场航线(将鼠标悬停在机场上)",
|
||||
"home.sampleData.flightsSpec.averageTicketPriceTitle": "[航班] 平均票价",
|
||||
"home.sampleData.flightsSpec.controlsTitle": "[航班] 控件",
|
||||
"home.sampleData.flightsSpec.delayBucketsTitle": "[航班] 延误存储桶",
|
||||
"home.sampleData.flightsSpec.delaysAndCancellationsTitle": "[航班] 延误与取消",
|
||||
"home.sampleData.flightsSpec.delayTypeTitle": "[航班] 延误类型",
|
||||
"home.sampleData.flightsSpec.destinationWeatherTitle": "[航班] 到达地天气",
|
||||
"home.sampleData.flightsSpec.flightCancellationsTitle": "[航班] 航班取消",
|
||||
"home.sampleData.flightsSpec.flightCountAndAverageTicketPriceTitle": "[航班] 航班计数和平均票价",
|
||||
"home.sampleData.flightsSpec.flightDelaysTitle": "[航班] 航班延误",
|
||||
"home.sampleData.flightsSpec.flightLogTitle": "[航班] 飞行日志",
|
||||
"home.sampleData.flightsSpec.globalFlightDashboardDescription": "分析 ES-Air、Logstash Airways、Kibana Airlines 和 JetBeats 的模拟航班数据",
|
||||
"home.sampleData.flightsSpec.globalFlightDashboardTitle": "[航班] 全球航班仪表板",
|
||||
"home.sampleData.flightsSpec.markdownInstructionsTitle": "[航班] Markdown 说明",
|
||||
"home.sampleData.flightsSpec.originCountryTicketPricesTitle": "[航班] 始发国/地区票价",
|
||||
"home.sampleData.flightsSpec.originCountryTitle": "[航班] 始发国/地区与到达国/地区",
|
||||
"home.sampleData.flightsSpec.totalFlightCancellationsTitle": "[航班] 航班取消总数",
|
||||
"home.sampleData.flightsSpec.totalFlightDelaysTitle": "[航班] 航班延误总数",
|
||||
"home.sampleData.flightsSpec.totalFlightsTitle": "[航班] 航班总数",
|
||||
"home.sampleData.flightsSpecDescription": "用于监测航班路线的样例数据、可视化和仪表板。",
|
||||
"home.sampleData.flightsSpecTitle": "样例航班数据",
|
||||
"home.sampleData.logsSpec.fileTypeScatterPlotTitle": "[日志] 文件类型散点图",
|
||||
"home.sampleData.logsSpec.goalsTitle": "[日志] 目标",
|
||||
"home.sampleData.logsSpec.heatmapTitle": "[日志] 热图",
|
||||
"home.sampleData.logsSpec.hostVisitsBytesTableTitle": "[日志] 主机、访问和字节表",
|
||||
"home.sampleData.logsSpec.inputControlsTitle": "[日志] 输入控件",
|
||||
"home.sampleData.logsSpec.markdownInstructionsTitle": "[日志] Markdown 说明",
|
||||
"home.sampleData.logsSpec.responseCodesOverTimeTitle": "[日志] 时移响应代码 + 注释",
|
||||
"home.sampleData.logsSpec.sourceAndDestinationSankeyChartTitle": "[日志] 始发地和到达地 Sankey 图",
|
||||
"home.sampleData.logsSpec.uniqueVisitorsByCountryTitle": "[日志] 按国家/地区划分的独立访客",
|
||||
"home.sampleData.logsSpec.uniqueVisitorsTitle": "[日志] 独立访客与平均字节数",
|
||||
"home.sampleData.logsSpec.visitorOSTitle": "[日志] 按 OS 划分的访客",
|
||||
"home.sampleData.logsSpec.webTrafficDescription": "分析 Elastic 网站的模拟网络流量日志数据",
|
||||
"home.sampleData.logsSpec.webTrafficTitle": "[日志] 网络流量",
|
||||
"home.sampleData.logsSpecDescription": "用于监测 Web 日志的样例数据、可视化和仪表板。",
|
||||
"home.sampleData.logsSpecTitle": "样例 Web 日志",
|
||||
"server.stats.notReadyMessage": "统计尚未就绪。请稍后重试",
|
||||
"server.status.disabledTitle": "已禁用",
|
||||
"server.status.greenTitle": "绿",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue