[APM] Add scenario options for synthtrace (#121034) (#123128)

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
(cherry picked from commit 75cf64dca5)

Co-authored-by: Dario Gieselaar <dario.gieselaar@elastic.co>
This commit is contained in:
Kibana Machine 2022-01-17 07:01:40 -05:00 committed by GitHub
parent 75eea5dc34
commit 75d72b30a3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 25 additions and 4 deletions

View file

@ -12,14 +12,14 @@ import { getApmWriteTargets } from '../../lib/apm/utils/get_apm_write_targets';
import { Scenario } from '../scenario';
import { getCommonServices } from '../utils/get_common_services';
const scenario: Scenario = async ({ target, logLevel }) => {
const scenario: Scenario = async ({ target, logLevel, scenarioOpts }) => {
const { client, logger } = getCommonServices({ target, logLevel });
const writeTargets = await getApmWriteTargets({ client });
const { numServices = 3 } = scenarioOpts || {};
return {
generate: ({ from, to }) => {
const numServices = 3;
const range = timerange(from, to);
const transactionName = '240rpm/75% 1000ms';

View file

@ -69,6 +69,12 @@ function options(y: Argv) {
describe: 'Target to index',
string: true,
})
.option('scenarioOpts', {
describe: 'Options specific to the scenario',
coerce: (arg) => {
return arg as Record<string, any> | undefined;
},
})
.conflicts('to', 'live');
}

View file

@ -47,7 +47,15 @@ export function parseRunCliFlags(flags: RunCliFlags) {
}
return {
...pick(flags, 'target', 'workers', 'clientWorkers', 'batchSize', 'writeTarget'),
...pick(
flags,
'target',
'workers',
'clientWorkers',
'batchSize',
'writeTarget',
'scenarioOpts'
),
intervalInMs,
bucketSizeInMs,
logLevel: parsedLogLevel,

View file

@ -24,6 +24,7 @@ export async function startHistoricalDataUpload({
target,
file,
writeTarget,
scenarioOpts,
}: RunOptions & { from: number; to: number }) {
let requestedUntil: number = from;
@ -57,6 +58,7 @@ export async function startHistoricalDataUpload({
target,
workers,
writeTarget,
scenarioOpts,
};
const worker = new Worker(Path.join(__dirname, './upload_next_batch.js'), {

View file

@ -24,6 +24,7 @@ export async function startLiveDataUpload({
logLevel,
workers,
writeTarget,
scenarioOpts,
}: RunOptions & { start: number }) {
let queuedEvents: ElasticsearchOutput[] = [];
let requestedUntil: number = start;
@ -41,6 +42,7 @@ export async function startLiveDataUpload({
target,
workers,
writeTarget,
scenarioOpts,
});
function uploadNextBatch() {

View file

@ -17,6 +17,7 @@ export interface WorkerData {
bucketFrom: number;
bucketTo: number;
file: string;
scenarioOpts: Record<string, any> | undefined;
logLevel: LogLevel;
clientWorkers: number;
batchSize: number;
@ -39,6 +40,7 @@ const {
workers,
target,
writeTarget,
scenarioOpts,
} = workerData as WorkerData;
async function uploadNextBatch() {
@ -63,6 +65,7 @@ async function uploadNextBatch() {
target,
workers,
writeTarget,
scenarioOpts,
});
const events = logger.perf('execute_scenario', () =>