mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 18:51:07 -04:00
[APM] Fix Synthtrace script (#133620)
This commit is contained in:
parent
56f2c1cc9e
commit
702aa29e56
6 changed files with 123 additions and 108 deletions
|
@ -14,4 +14,4 @@ require('@babel/register')({
|
||||||
presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'],
|
presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'],
|
||||||
});
|
});
|
||||||
|
|
||||||
require('../src/scripts/run_synthtrace');
|
require('../src/scripts/run_synthtrace').runSynthtrace();
|
||||||
|
|
|
@ -3,6 +3,9 @@
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"description": "Elastic APM trace data generator",
|
"description": "Elastic APM trace data generator",
|
||||||
"license": "SSPL-1.0 OR Elastic License 2.0",
|
"license": "SSPL-1.0 OR Elastic License 2.0",
|
||||||
|
"bin": {
|
||||||
|
"synthtrace": "./bin/synthtrace"
|
||||||
|
},
|
||||||
"main": "./target_node/index.js",
|
"main": "./target_node/index.js",
|
||||||
"private": true
|
"private": true
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
* Side Public License, v 1.
|
* Side Public License, v 1.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
export { runSynthtrace } from './scripts/run_synthtrace';
|
||||||
export { timerange } from './lib/timerange';
|
export { timerange } from './lib/timerange';
|
||||||
export { apm } from './lib/apm';
|
export { apm } from './lib/apm';
|
||||||
export { stackMonitoring } from './lib/stack_monitoring';
|
export { stackMonitoring } from './lib/stack_monitoring';
|
||||||
|
|
|
@ -124,117 +124,119 @@ function options(y: Argv) {
|
||||||
|
|
||||||
export type RunCliFlags = ReturnType<typeof options>['argv'];
|
export type RunCliFlags = ReturnType<typeof options>['argv'];
|
||||||
|
|
||||||
yargs(process.argv.slice(2))
|
export function runSynthtrace() {
|
||||||
.command(
|
yargs(process.argv.slice(2))
|
||||||
'*',
|
.command(
|
||||||
'Generate data and index into Elasticsearch',
|
'*',
|
||||||
options,
|
'Generate data and index into Elasticsearch',
|
||||||
async (argv: RunCliFlags) => {
|
options,
|
||||||
if (argv.local) {
|
async (argv: RunCliFlags) => {
|
||||||
argv.target = 'http://localhost:9200';
|
if (argv.local) {
|
||||||
}
|
argv.target = 'http://localhost:9200';
|
||||||
if (argv.kibana && !argv.target) {
|
|
||||||
const url = new URL(argv.kibana);
|
|
||||||
// super naive inference of ES target based on public kibana Cloud endpoint
|
|
||||||
if (url.hostname.match(/\.kb\./)) {
|
|
||||||
argv.target = argv.kibana.replace(/\.kb\./, '.es.');
|
|
||||||
}
|
}
|
||||||
}
|
if (argv.kibana && !argv.target) {
|
||||||
|
const url = new URL(argv.kibana);
|
||||||
const runOptions = parseRunCliFlags(argv);
|
// super naive inference of ES target based on public kibana Cloud endpoint
|
||||||
|
if (url.hostname.match(/\.kb\./)) {
|
||||||
const { logger, apmEsClient } = getCommonServices(runOptions);
|
argv.target = argv.kibana.replace(/\.kb\./, '.es.');
|
||||||
|
|
||||||
const toMs = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
|
|
||||||
const to = new Date(toMs);
|
|
||||||
const defaultTimeRange = !runOptions.maxDocs ? '15m' : '520w';
|
|
||||||
const fromMs = argv.from
|
|
||||||
? datemath.parse(String(argv.from))!.valueOf()
|
|
||||||
: toMs - intervalToMs(defaultTimeRange);
|
|
||||||
const from = new Date(fromMs);
|
|
||||||
|
|
||||||
const live = argv.live;
|
|
||||||
|
|
||||||
if (runOptions.dryRun) {
|
|
||||||
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, '8.0.0');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// we need to know the running version to generate events that satisfy the min version requirements
|
|
||||||
let version = await apmEsClient.runningVersion();
|
|
||||||
logger.info(`Discovered Elasticsearch running version: ${version}`);
|
|
||||||
version = version.replace('-SNAPSHOT', '');
|
|
||||||
|
|
||||||
// We automatically set up managed APM either by migrating on cloud or installing the package locally
|
|
||||||
if (runOptions.cloudId || argv.local || argv.kibana) {
|
|
||||||
const kibanaClient = new ApmSynthtraceKibanaClient(logger);
|
|
||||||
if (runOptions.cloudId) {
|
|
||||||
await kibanaClient.migrateCloudToManagedApm(
|
|
||||||
runOptions.cloudId,
|
|
||||||
runOptions.username,
|
|
||||||
runOptions.password
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
let kibanaUrl: string | null = argv.kibana ?? null;
|
|
||||||
if (argv.local) {
|
|
||||||
kibanaUrl = await kibanaClient.discoverLocalKibana();
|
|
||||||
}
|
}
|
||||||
if (!kibanaUrl) throw Error('kibanaUrl could not be determined');
|
|
||||||
await kibanaClient.installApmPackage(
|
|
||||||
kibanaUrl,
|
|
||||||
version,
|
|
||||||
runOptions.username,
|
|
||||||
runOptions.password
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (runOptions.cloudId && runOptions.numShards && runOptions.numShards > 0) {
|
const runOptions = parseRunCliFlags(argv);
|
||||||
await apmEsClient.updateComponentTemplates(runOptions.numShards);
|
|
||||||
}
|
const { logger, apmEsClient } = getCommonServices(runOptions);
|
||||||
const aggregators: StreamAggregator[] = [];
|
|
||||||
const registry = new Map<string, () => StreamAggregator[]>([
|
const toMs = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
|
||||||
['service', () => [new ServiceLatencyAggregator()]],
|
const to = new Date(toMs);
|
||||||
]);
|
const defaultTimeRange = !runOptions.maxDocs ? '15m' : '520w';
|
||||||
if (runOptions.streamProcessors && runOptions.streamProcessors.length > 0) {
|
const fromMs = argv.from
|
||||||
for (const processorName of runOptions.streamProcessors) {
|
? datemath.parse(String(argv.from))!.valueOf()
|
||||||
const factory = registry.get(processorName);
|
: toMs - intervalToMs(defaultTimeRange);
|
||||||
if (factory) {
|
const from = new Date(fromMs);
|
||||||
aggregators.push(...factory());
|
|
||||||
|
const live = argv.live;
|
||||||
|
|
||||||
|
if (runOptions.dryRun) {
|
||||||
|
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, '8.0.0');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we need to know the running version to generate events that satisfy the min version requirements
|
||||||
|
let version = await apmEsClient.runningVersion();
|
||||||
|
logger.info(`Discovered Elasticsearch running version: ${version}`);
|
||||||
|
version = version.replace('-SNAPSHOT', '');
|
||||||
|
|
||||||
|
// We automatically set up managed APM either by migrating on cloud or installing the package locally
|
||||||
|
if (runOptions.cloudId || argv.local || argv.kibana) {
|
||||||
|
const kibanaClient = new ApmSynthtraceKibanaClient(logger);
|
||||||
|
if (runOptions.cloudId) {
|
||||||
|
await kibanaClient.migrateCloudToManagedApm(
|
||||||
|
runOptions.cloudId,
|
||||||
|
runOptions.username,
|
||||||
|
runOptions.password
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
throw new Error(
|
let kibanaUrl: string | null = argv.kibana ?? null;
|
||||||
`No processor named ${processorName} configured on known processor registry`
|
if (argv.local) {
|
||||||
|
kibanaUrl = await kibanaClient.discoverLocalKibana();
|
||||||
|
}
|
||||||
|
if (!kibanaUrl) throw Error('kibanaUrl could not be determined');
|
||||||
|
await kibanaClient.installApmPackage(
|
||||||
|
kibanaUrl,
|
||||||
|
version,
|
||||||
|
runOptions.username,
|
||||||
|
runOptions.password
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (argv.clean) {
|
|
||||||
await apmEsClient.clean(aggregators.map((a) => a.getDataStreamName() + '-*'));
|
|
||||||
}
|
|
||||||
if (runOptions.gcpRepository) {
|
|
||||||
await apmEsClient.registerGcpRepository(runOptions.gcpRepository);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
if (runOptions.cloudId && runOptions.numShards && runOptions.numShards > 0) {
|
||||||
`Starting data generation\n: ${JSON.stringify(
|
await apmEsClient.updateComponentTemplates(runOptions.numShards);
|
||||||
{
|
}
|
||||||
...runOptions,
|
const aggregators: StreamAggregator[] = [];
|
||||||
from: from.toISOString(),
|
const registry = new Map<string, () => StreamAggregator[]>([
|
||||||
to: to.toISOString(),
|
['service', () => [new ServiceLatencyAggregator()]],
|
||||||
},
|
]);
|
||||||
null,
|
if (runOptions.streamProcessors && runOptions.streamProcessors.length > 0) {
|
||||||
2
|
for (const processorName of runOptions.streamProcessors) {
|
||||||
)}`
|
const factory = registry.get(processorName);
|
||||||
);
|
if (factory) {
|
||||||
|
aggregators.push(...factory());
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`No processor named ${processorName} configured on known processor registry`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (argv.clean) {
|
||||||
|
await apmEsClient.clean(aggregators.map((a) => a.getDataStreamName() + '-*'));
|
||||||
|
}
|
||||||
|
if (runOptions.gcpRepository) {
|
||||||
|
await apmEsClient.registerGcpRepository(runOptions.gcpRepository);
|
||||||
|
}
|
||||||
|
|
||||||
for (const aggregator of aggregators) await apmEsClient.createDataStream(aggregator);
|
logger.info(
|
||||||
|
`Starting data generation\n: ${JSON.stringify(
|
||||||
|
{
|
||||||
|
...runOptions,
|
||||||
|
from: from.toISOString(),
|
||||||
|
to: to.toISOString(),
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
2
|
||||||
|
)}`
|
||||||
|
);
|
||||||
|
|
||||||
if (runOptions.maxDocs !== 0)
|
for (const aggregator of aggregators) await apmEsClient.createDataStream(aggregator);
|
||||||
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, version);
|
|
||||||
|
|
||||||
if (live) {
|
if (runOptions.maxDocs !== 0)
|
||||||
await startLiveDataUpload(apmEsClient, logger, runOptions, to, version);
|
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, version);
|
||||||
|
|
||||||
|
if (live) {
|
||||||
|
await startLiveDataUpload(apmEsClient, logger, runOptions, to, version);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
)
|
.parse();
|
||||||
.parse();
|
}
|
||||||
|
|
|
@ -20,15 +20,18 @@ function getParsedFile(flags: RunCliFlags) {
|
||||||
throw new Error('Please specify a scenario to run');
|
throw new Error('Please specify a scenario to run');
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = [
|
const filepath = [
|
||||||
path.resolve(parsedFile),
|
path.resolve(parsedFile),
|
||||||
path.resolve(`${parsedFile}.ts`),
|
path.resolve(`${parsedFile}.ts`),
|
||||||
path.resolve(__dirname, '../../scenarios', parsedFile),
|
path.resolve(__dirname, '../../scenarios', parsedFile),
|
||||||
path.resolve(__dirname, '../../scenarios', `${parsedFile}.ts`),
|
path.resolve(__dirname, '../../scenarios', `${parsedFile}.ts`),
|
||||||
].find((filepath) => existsSync(filepath));
|
path.resolve(__dirname, '../../scenarios', `${parsedFile}.js`),
|
||||||
|
].find((p) => existsSync(p));
|
||||||
|
|
||||||
if (result) {
|
if (filepath) {
|
||||||
return result;
|
// eslint-disable-next-line no-console
|
||||||
|
console.log(`Loading scenario from ${filepath}`);
|
||||||
|
return filepath;
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`Could not find scenario file: "${parsedFile}"`);
|
throw new Error(`Could not find scenario file: "${parsedFile}"`);
|
||||||
|
|
|
@ -6,5 +6,11 @@
|
||||||
* Side Public License, v 1.
|
* Side Public License, v 1.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
require('../src/setup_node_env/node_version_validator');
|
require('../src/setup_node_env');
|
||||||
require('@elastic/apm-synthtrace/bin/synthtrace');
|
|
||||||
|
// We have to import directly from package since scenarios and worker.js are imported dynamically,
|
||||||
|
// If we import the package (require('@elastic/apm-synthtrace')) the program will be executed on the compiled files, and thus we need to
|
||||||
|
// compile scenarios with `yarn kbn bootstrap` every time scenario changes.
|
||||||
|
|
||||||
|
// eslint-disable-next-line @kbn/imports/uniform_imports
|
||||||
|
require('../packages/elastic-apm-synthtrace/src/index').runSynthtrace();
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue