mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* [APM] Fix Synthtrace script (#133620)
(cherry picked from commit 702aa29e56
)
# Conflicts:
# packages/elastic-apm-synthtrace/src/scripts/run_synthtrace.ts
* Update run_synthtrace.ts
This commit is contained in:
parent
5baf51ae93
commit
b13e4fa9c4
6 changed files with 119 additions and 103 deletions
|
@ -14,4 +14,4 @@ require('@babel/register')({
|
|||
presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'],
|
||||
});
|
||||
|
||||
require('../src/scripts/run_synthtrace');
|
||||
require('../src/scripts/run_synthtrace').runSynthtrace();
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
"version": "0.1.0",
|
||||
"description": "Elastic APM trace data generator",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0",
|
||||
"bin": {
|
||||
"synthtrace": "./bin/synthtrace"
|
||||
},
|
||||
"main": "./target_node/index.js",
|
||||
"private": true
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { runSynthtrace } from './scripts/run_synthtrace';
|
||||
export { timerange } from './lib/timerange';
|
||||
export { apm } from './lib/apm';
|
||||
export { stackMonitoring } from './lib/stack_monitoring';
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import datemath from '@kbn/datemath';
|
||||
import yargs from 'yargs/yargs';
|
||||
import { Argv } from 'yargs';
|
||||
|
@ -119,103 +120,105 @@ function options(y: Argv) {
|
|||
|
||||
export type RunCliFlags = ReturnType<typeof options>['argv'];
|
||||
|
||||
yargs(process.argv.slice(2))
|
||||
.command(
|
||||
'*',
|
||||
'Generate data and index into Elasticsearch',
|
||||
options,
|
||||
async (argv: RunCliFlags) => {
|
||||
if (argv.local) {
|
||||
argv.target = 'http://localhost:9200';
|
||||
}
|
||||
if (argv.kibana && !argv.target) {
|
||||
const url = new URL(argv.kibana);
|
||||
// super naive inference of ES target based on public kibana Cloud endpoint
|
||||
if (url.hostname.match(/\.kb\./)) {
|
||||
argv.target = argv.kibana.replace(/\.kb\./, '.es.');
|
||||
export function runSynthtrace() {
|
||||
yargs(process.argv.slice(2))
|
||||
.command(
|
||||
'*',
|
||||
'Generate data and index into Elasticsearch',
|
||||
options,
|
||||
async (argv: RunCliFlags) => {
|
||||
if (argv.local) {
|
||||
argv.target = 'http://localhost:9200';
|
||||
}
|
||||
}
|
||||
|
||||
const runOptions = parseRunCliFlags(argv);
|
||||
|
||||
const { logger, apmEsClient } = getCommonServices(runOptions);
|
||||
|
||||
const toMs = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
|
||||
const to = new Date(toMs);
|
||||
const defaultTimeRange = !runOptions.maxDocs ? '15m' : '520w';
|
||||
const fromMs = argv.from
|
||||
? datemath.parse(String(argv.from))!.valueOf()
|
||||
: toMs - intervalToMs(defaultTimeRange);
|
||||
const from = new Date(fromMs);
|
||||
|
||||
const live = argv.live;
|
||||
|
||||
if (runOptions.dryRun) {
|
||||
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, '8.0.0');
|
||||
return;
|
||||
}
|
||||
|
||||
// we need to know the running version to generate events that satisfy the min version requirements
|
||||
let version = await apmEsClient.runningVersion();
|
||||
logger.info(`Discovered Elasticsearch running version: ${version}`);
|
||||
version = version.replace('-SNAPSHOT', '');
|
||||
|
||||
// We automatically set up managed APM either by migrating on cloud or installing the package locally
|
||||
if (runOptions.cloudId || argv.local || argv.kibana) {
|
||||
const kibanaClient = new ApmSynthtraceKibanaClient(logger);
|
||||
if (runOptions.cloudId) {
|
||||
await kibanaClient.migrateCloudToManagedApm(
|
||||
runOptions.cloudId,
|
||||
runOptions.username,
|
||||
runOptions.password
|
||||
);
|
||||
} else {
|
||||
let kibanaUrl: string | null = argv.kibana ?? null;
|
||||
if (argv.local) {
|
||||
kibanaUrl = await kibanaClient.discoverLocalKibana();
|
||||
if (argv.kibana && !argv.target) {
|
||||
const url = new URL(argv.kibana);
|
||||
// super naive inference of ES target based on public kibana Cloud endpoint
|
||||
if (url.hostname.match(/\.kb\./)) {
|
||||
argv.target = argv.kibana.replace(/\.kb\./, '.es.');
|
||||
}
|
||||
if (!kibanaUrl) throw Error('kibanaUrl could not be determined');
|
||||
await kibanaClient.installApmPackage(
|
||||
kibanaUrl,
|
||||
version,
|
||||
runOptions.username,
|
||||
runOptions.password
|
||||
);
|
||||
}
|
||||
|
||||
const runOptions = parseRunCliFlags(argv);
|
||||
|
||||
const { logger, apmEsClient } = getCommonServices(runOptions);
|
||||
|
||||
const toMs = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
|
||||
const to = new Date(toMs);
|
||||
const defaultTimeRange = !runOptions.maxDocs ? '15m' : '520w';
|
||||
const fromMs = argv.from
|
||||
? datemath.parse(String(argv.from))!.valueOf()
|
||||
: toMs - intervalToMs(defaultTimeRange);
|
||||
const from = new Date(fromMs);
|
||||
|
||||
const live = argv.live;
|
||||
|
||||
if (runOptions.dryRun) {
|
||||
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, '8.0.0');
|
||||
return;
|
||||
}
|
||||
|
||||
// we need to know the running version to generate events that satisfy the min version requirements
|
||||
let version = await apmEsClient.runningVersion();
|
||||
logger.info(`Discovered Elasticsearch running version: ${version}`);
|
||||
version = version.replace('-SNAPSHOT', '');
|
||||
|
||||
// We automatically set up managed APM either by migrating on cloud or installing the package locally
|
||||
if (runOptions.cloudId || argv.local || argv.kibana) {
|
||||
const kibanaClient = new ApmSynthtraceKibanaClient(logger);
|
||||
if (runOptions.cloudId) {
|
||||
await kibanaClient.migrateCloudToManagedApm(
|
||||
runOptions.cloudId,
|
||||
runOptions.username,
|
||||
runOptions.password
|
||||
);
|
||||
} else {
|
||||
let kibanaUrl: string | null = argv.kibana ?? null;
|
||||
if (argv.local) {
|
||||
kibanaUrl = await kibanaClient.discoverLocalKibana();
|
||||
}
|
||||
if (!kibanaUrl) throw Error('kibanaUrl could not be determined');
|
||||
await kibanaClient.installApmPackage(
|
||||
kibanaUrl,
|
||||
version,
|
||||
runOptions.username,
|
||||
runOptions.password
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (runOptions.cloudId && runOptions.numShards && runOptions.numShards > 0) {
|
||||
await apmEsClient.updateComponentTemplates(runOptions.numShards);
|
||||
}
|
||||
|
||||
const aggregators: StreamAggregator[] = [new ServiceLatencyAggregator()];
|
||||
if (argv.clean) {
|
||||
await apmEsClient.clean(aggregators.map((a) => a.getDataStreamName() + '-*'));
|
||||
}
|
||||
if (runOptions.gcpRepository) {
|
||||
await apmEsClient.registerGcpRepository(runOptions.gcpRepository);
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Starting data generation\n: ${JSON.stringify(
|
||||
{
|
||||
...runOptions,
|
||||
from: from.toISOString(),
|
||||
to: to.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
);
|
||||
|
||||
for (const aggregator of aggregators) await apmEsClient.createDataStream(aggregator);
|
||||
|
||||
if (runOptions.maxDocs !== 0)
|
||||
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, version);
|
||||
|
||||
if (live) {
|
||||
await startLiveDataUpload(apmEsClient, logger, runOptions, to, version);
|
||||
}
|
||||
}
|
||||
|
||||
if (runOptions.cloudId && runOptions.numShards && runOptions.numShards > 0) {
|
||||
await apmEsClient.updateComponentTemplates(runOptions.numShards);
|
||||
}
|
||||
|
||||
const aggregators: StreamAggregator[] = [new ServiceLatencyAggregator()];
|
||||
if (argv.clean) {
|
||||
await apmEsClient.clean(aggregators.map((a) => a.getDataStreamName() + '-*'));
|
||||
}
|
||||
if (runOptions.gcpRepository) {
|
||||
await apmEsClient.registerGcpRepository(runOptions.gcpRepository);
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Starting data generation\n: ${JSON.stringify(
|
||||
{
|
||||
...runOptions,
|
||||
from: from.toISOString(),
|
||||
to: to.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
);
|
||||
|
||||
for (const aggregator of aggregators) await apmEsClient.createDataStream(aggregator);
|
||||
|
||||
if (runOptions.maxDocs !== 0)
|
||||
await startHistoricalDataUpload(apmEsClient, logger, runOptions, from, to, version);
|
||||
|
||||
if (live) {
|
||||
await startLiveDataUpload(apmEsClient, logger, runOptions, to, version);
|
||||
}
|
||||
}
|
||||
)
|
||||
.parse();
|
||||
)
|
||||
.parse();
|
||||
}
|
||||
|
|
|
@ -20,15 +20,18 @@ function getParsedFile(flags: RunCliFlags) {
|
|||
throw new Error('Please specify a scenario to run');
|
||||
}
|
||||
|
||||
const result = [
|
||||
const filepath = [
|
||||
path.resolve(parsedFile),
|
||||
path.resolve(`${parsedFile}.ts`),
|
||||
path.resolve(__dirname, '../../scenarios', parsedFile),
|
||||
path.resolve(__dirname, '../../scenarios', `${parsedFile}.ts`),
|
||||
].find((filepath) => existsSync(filepath));
|
||||
path.resolve(__dirname, '../../scenarios', `${parsedFile}.js`),
|
||||
].find((p) => existsSync(p));
|
||||
|
||||
if (result) {
|
||||
return result;
|
||||
if (filepath) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Loading scenario from ${filepath}`);
|
||||
return filepath;
|
||||
}
|
||||
|
||||
throw new Error(`Could not find scenario file: "${parsedFile}"`);
|
||||
|
|
|
@ -6,5 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
require('../src/setup_node_env/node_version_validator');
|
||||
require('@elastic/apm-synthtrace/bin/synthtrace');
|
||||
require('../src/setup_node_env');
|
||||
|
||||
// We have to import directly from package since scenarios and worker.js are imported dynamically,
|
||||
// If we import the package (require('@elastic/apm-synthtrace')) the program will be executed on the compiled files, and thus we need to
|
||||
// compile scenarios with `yarn kbn bootstrap` every time scenario changes.
|
||||
|
||||
// eslint-disable-next-line @kbn/imports/uniform_imports
|
||||
require('../packages/elastic-apm-synthtrace/src/index').runSynthtrace();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue