[APM] Synthtrace improvements (#133303)

This commit is contained in:
Søren Louv-Jansen 2022-06-01 18:30:52 +02:00 committed by GitHub
parent 7958fa048c
commit 6cba3d786b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 120 additions and 77 deletions

View file

@ -91,12 +91,19 @@ const esEvents = toElasticsearchOutput([
### CLI
Via the CLI, you can upload scenarios, either using a fixed time range or continuously generating data. Some examples are available in in `src/scripts/examples`. Here's an example for live data:
Via the CLI, you can run scenarios, either using a fixed time range or continuously generating data. Scenarios are available in [`packages/elastic-apm-synthtrace/src/scenarios/`](https://github.com/elastic/kibana/blob/main/packages/elastic-apm-synthtrace/src/scenarios/).
`$ node packages/elastic-apm-synthtrace/src/scripts/run packages/elastic-apm-synthtrace/src/scripts/examples/01_simple_trace.ts --target=http://admin:changeme@localhost:9200 --live`
For live data ingestion:
```
node scripts/synthtrace simple_trace.ts --target=http://admin:changeme@localhost:9200 --live
```
For a fixed time window:
`$ node packages/elastic-apm-synthtrace/src/scripts/run packages/elastic-apm-synthtrace/src/scripts/examples/01_simple_trace.ts --target=http://admin:changeme@localhost:9200 --from=now-24h --to=now`
```
node scripts/synthtrace simple_trace.ts --target=http://admin:changeme@localhost:9200 --from=now-24h --to=now
```
The script will try to automatically find bootstrapped APM indices. **If these indices do not exist, the script will exit with an error. It will not bootstrap the indices itself.**
@ -104,24 +111,26 @@ The following options are supported:
### Connection options
| Option | Type | Default | Description |
|------------------------|-----------|:-----------|--------------------------------------------------------------------------------------------------------------------------------------------|
| `--target` | [string] | | Elasticsearch target |
| `--kibana` | [string] | | Kibana target, used to bootstrap datastreams/mappings/templates/settings |
| `--cloudId` | [string] | | Provide connection information and will force APM on the cloud to migrate to run as a Fleet integration |
| `--local` | [boolean] | | Shortcut during development, assumes `yarn es snapshot` and `yarn start` are running |
| `--username` | [string] | `elastic` | Basic authentication username |
| `--password` | [string] | `changeme` | Basic authentication password |
| Option | Type | Default | Description |
| ------------ | --------- | :--------- | ------------------------------------------------------------------------------------------------------- |
| `--target` | [string] | | Elasticsearch target |
| `--kibana` | [string] | | Kibana target, used to bootstrap datastreams/mappings/templates/settings |
| `--cloudId` | [string] | | Provide connection information and will force APM on the cloud to migrate to run as a Fleet integration |
| `--local` | [boolean] | | Shortcut during development, assumes `yarn es snapshot` and `yarn start` are running |
| `--username` | [string] | `elastic` | Basic authentication username |
| `--password` | [string] | `changeme` | Basic authentication password |
Note:
- If you only specify `--target` Synthtrace can not automatically setup APM.
- If you specify both `--target` and `--kibana` the tool will automatically attempt to install the appropriate APM package
Note:
- If you only specify `--target` Synthtrace can not automatically setup APM.
- If you specify both `--target` and `--kibana` the tool will automatically attempt to install the appropriate APM package
- For Cloud its easiest to specify `--cloudId` as it will unpack the ES/Kibana targets and migrate cloud over to managed APM automatically.
- If you only specify `--kibana` and it's using a cloud hostname a very naive `--target` to Elasticsearch will be inferred.
### Scenario options
| Option | Type | Default | Description |
|------------------------|-----------|:--------|--------------------------------------------------------------------------------------------------------------------------------------------|
| ---------------------- | --------- | :------ | ------------------------------------------------------------------------------------------------------------------------------------------ |
| `--from` | [date] | `now()` | The start of the time window |
| `--to` | [date] | | The end of the time window |
| `--maxDocs` | [number] | | The maximum number of documents we are allowed to generate |
@ -132,17 +141,17 @@ Note:
| `--forceLegacyIndices` | [boolean] | `false` | Force writing to legacy indices |
Note:
- The default `--to` is `15m` unless `--maxDocs` is specified in which case `--to` is calculated based on the scenario's TPM.
- You can combine `--from` `--maxDocs` and `--to` with `--live` to back-fill some data.
### Setup options
| Option | Type | Default | Description |
|-------------------|-----------|:-----------|---------------------------------------------------------------------------------------------------------|
| `--numShards` | [number] | | Updates the component templates to update the number of primary shards, requires cloudId to be provided |
| `--clean` | [boolean] | `false` | Clean APM data before indexing new data |
| `--workers` | [number] | | Amount of Node.js worker threads |
| `--logLevel` | [enum] | `info` | Log level |
| `--gcpRepository` | [string] | | Allows you to register a GCP repository in <client_name>:<bucket>[:base_path] format |
| `-p` | [string] | | Specify multiple sets of streamaggregators to be included in the StreamProcessor |
| Option | Type | Default | Description |
| ----------------- | --------- | :------ | ------------------------------------------------------------------------------------------------------- |
| `--numShards` | [number] | | Updates the component templates to update the number of primary shards, requires cloudId to be provided |
| `--clean` | [boolean] | `false` | Clean APM data before indexing new data |
| `--workers` | [number] | | Amount of Node.js worker threads |
| `--logLevel` | [enum] | `info` | Log level |
| `--gcpRepository` | [string] | | Allows you to register a GCP repository in <client_name>:<bucket>[:base_path] format |
| `-p` | [string] | | Specify multiple sets of streamaggregators to be included in the StreamProcessor |

View file

@ -1,3 +1,5 @@
#!/usr/bin/env node
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
@ -12,4 +14,4 @@ require('@babel/register')({
presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'],
});
require('./run_synthtrace');
require('../src/scripts/run_synthtrace');

View file

@ -83,9 +83,13 @@ export class ApmSynthtraceKibanaClient {
},
body: '{"force":true}',
});
const responseJson = await response.json();
if (responseJson.statusCode) {
throw Error(`unable to install apm package ${packageVersion}`);
throw Error(
`unable to install apm package ${packageVersion}. Received status code: ${responseJson.statusCode} and message: ${responseJson.message}`
);
}
if (responseJson.items) {
this.logger.info(`Installed apm package ${packageVersion}`);

View file

@ -6,11 +6,11 @@
* Side Public License, v 1.
*/
import { observer, timerange } from '../..';
import { Scenario } from '../scenario';
import { getLogger } from '../utils/get_common_services';
import { RunOptions } from '../utils/parse_run_cli_flags';
import { AgentConfigFields } from '../../lib/agent_config/agent_config_fields';
import { observer, timerange } from '..';
import { Scenario } from '../scripts/scenario';
import { getLogger } from '../scripts/utils/get_common_services';
import { RunOptions } from '../scripts/utils/parse_run_cli_flags';
import { AgentConfigFields } from '../lib/agent_config/agent_config_fields';
const scenario: Scenario<AgentConfigFields> = async (runOptions: RunOptions) => {
const logger = getLogger(runOptions);

View file

@ -6,11 +6,11 @@
* Side Public License, v 1.
*/
import { stackMonitoring, timerange } from '../..';
import { Scenario } from '../scenario';
import { getLogger } from '../utils/get_common_services';
import { RunOptions } from '../utils/parse_run_cli_flags';
import { ApmFields } from '../../lib/apm/apm_fields';
import { stackMonitoring, timerange } from '..';
import { Scenario } from '../scripts/scenario';
import { getLogger } from '../scripts/utils/get_common_services';
import { RunOptions } from '../scripts/utils/parse_run_cli_flags';
import { ApmFields } from '../lib/apm/apm_fields';
const scenario: Scenario<ApmFields> = async (runOptions: RunOptions) => {
const logger = getLogger(runOptions);

View file

@ -7,12 +7,12 @@
*/
import { random } from 'lodash';
import { apm, timerange } from '../..';
import { ApmFields } from '../../lib/apm/apm_fields';
import { Instance } from '../../lib/apm/instance';
import { Scenario } from '../scenario';
import { getLogger } from '../utils/get_common_services';
import { RunOptions } from '../utils/parse_run_cli_flags';
import { apm, timerange } from '..';
import { ApmFields } from '../lib/apm/apm_fields';
import { Instance } from '../lib/apm/instance';
import { Scenario } from '../scripts/scenario';
import { getLogger } from '../scripts/utils/get_common_services';
import { RunOptions } from '../scripts/utils/parse_run_cli_flags';
const scenario: Scenario<ApmFields> = async (runOptions: RunOptions) => {
const logger = getLogger(runOptions);

View file

@ -7,12 +7,12 @@
*/
import { random } from 'lodash';
import { apm, timerange } from '../..';
import { Instance } from '../../lib/apm/instance';
import { Scenario } from '../scenario';
import { getLogger } from '../utils/get_common_services';
import { RunOptions } from '../utils/parse_run_cli_flags';
import { ApmFields } from '../../lib/apm/apm_fields';
import { apm, timerange } from '..';
import { Instance } from '../lib/apm/instance';
import { Scenario } from '../scripts/scenario';
import { getLogger } from '../scripts/utils/get_common_services';
import { RunOptions } from '../scripts/utils/parse_run_cli_flags';
import { ApmFields } from '../lib/apm/apm_fields';
const scenario: Scenario<ApmFields> = async (runOptions: RunOptions) => {
const logger = getLogger(runOptions);

View file

@ -8,11 +8,11 @@
// Run with: node ./src/scripts/run ./src/scripts/examples/03_monitoring.ts --target=http://elastic:changeme@localhost:9200
import { stackMonitoring, timerange } from '../..';
import { Scenario } from '../scenario';
import { getLogger } from '../utils/get_common_services';
import { RunOptions } from '../utils/parse_run_cli_flags';
import { StackMonitoringFields } from '../../lib/stack_monitoring/stack_monitoring_fields';
import { stackMonitoring, timerange } from '..';
import { Scenario } from '../scripts/scenario';
import { getLogger } from '../scripts/utils/get_common_services';
import { RunOptions } from '../scripts/utils/parse_run_cli_flags';
import { StackMonitoringFields } from '../lib/stack_monitoring/stack_monitoring_fields';
const scenario: Scenario<StackMonitoringFields> = async (runOptions: RunOptions) => {
const logger = getLogger(runOptions);

View file

@ -6,12 +6,12 @@
* Side Public License, v 1.
*/
import { apm, timerange } from '../..';
import { ApmFields } from '../../lib/apm/apm_fields';
import { Instance } from '../../lib/apm/instance';
import { Scenario } from '../scenario';
import { getLogger } from '../utils/get_common_services';
import { RunOptions } from '../utils/parse_run_cli_flags';
import { apm, timerange } from '..';
import { ApmFields } from '../lib/apm/apm_fields';
import { Instance } from '../lib/apm/instance';
import { Scenario } from '../scripts/scenario';
import { getLogger } from '../scripts/utils/get_common_services';
import { RunOptions } from '../scripts/utils/parse_run_cli_flags';
const scenario: Scenario<ApmFields> = async (runOptions: RunOptions) => {
const logger = getLogger(runOptions);

View file

@ -7,9 +7,9 @@
*/
import { compact, shuffle } from 'lodash';
import { apm, ApmFields, EntityArrayIterable, timerange } from '../..';
import { generateLongId, generateShortId } from '../../lib/utils/generate_id';
import { Scenario } from '../scenario';
import { apm, ApmFields, EntityArrayIterable, timerange } from '..';
import { generateLongId, generateShortId } from '../lib/utils/generate_id';
import { Scenario } from '../scripts/scenario';
function generateExternalSpanLinks() {
// randomly creates external span links 0 - 10

View file

@ -5,20 +5,18 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import { Logger } from '../../lib/utils/create_logger';
import { Scenario } from '../scenario';
import { Fields } from '../../lib/entity';
export function getScenario({ file, logger }: { file: unknown; logger: Logger }) {
const location = Path.join(process.cwd(), String(file));
export function getScenario({ file, logger }: { file: string; logger: Logger }) {
logger.debug(`Loading scenario from ${file}`);
logger.debug(`Loading scenario from ${location}`);
return import(location).then((m) => {
return import(file).then((m) => {
if (m && m.default) {
return m.default;
}
throw new Error(`Could not find scenario at ${location}`);
throw new Error(`Could not import scenario at ${file}`);
}) as Promise<Scenario<Fields>>;
}

View file

@ -6,14 +6,37 @@
* Side Public License, v 1.
*/
import { existsSync } from 'fs';
import { pick } from 'lodash';
import path from 'path';
import { LogLevel } from '../../lib/utils/create_logger';
import { RunCliFlags } from '../run_synthtrace';
export function parseRunCliFlags(flags: RunCliFlags) {
const { file, _, logLevel } = flags;
function getParsedFile(flags: RunCliFlags) {
const { file, _ } = flags;
const parsedFile = (file || _[0]) as string;
const parsedFile = String(file || _[0]);
if (!parsedFile) {
throw new Error('Please specify a scenario to run');
}
const result = [
path.resolve(parsedFile),
path.resolve(`${parsedFile}.ts`),
path.resolve(__dirname, '../../scenarios', parsedFile),
path.resolve(__dirname, '../../scenarios', `${parsedFile}.ts`),
].find((filepath) => existsSync(filepath));
if (result) {
return result;
}
throw new Error(`Could not find scenario file: "${parsedFile}"`);
}
export function parseRunCliFlags(flags: RunCliFlags) {
const { logLevel } = flags;
const parsedFile = getParsedFile(flags);
let parsedLogLevel = LogLevel.info;
switch (logLevel) {

10
scripts/synthtrace.js Normal file
View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
require('../src/setup_node_env/node_version_validator');
require('@elastic/apm-synthtrace/bin/synthtrace');

View file

@ -30,8 +30,7 @@ yarn start
**Run Synthtrace**
```
node packages/elastic-apm-synthtrace/src/scripts/run packages/elastic-apm-synthtrace/src/scripts/examples/01_simple_trace.ts \
--local
node scripts/synthtrace simple_trace.ts --local
```
The `--local` flag is a shortcut to specifying `--target` and `--kibana`. It autodiscovers the current kibana basepath and installs the appropiate APM package.
@ -56,9 +55,7 @@ Use the [oblt-cli](https://github.com/elastic/observability-test-environments/bl
If you want to bootstrap some data on a cloud instance you can also use the following
```
node packages/elastic-apm-synthtrace/src/scripts/run packages/elastic-apm-synthtrace/src/scripts/examples/01_simple_trace.ts \
--cloudId "myname:<base64string>" \
--maxDocs 100000
node scripts/synthtrace simple_trace.ts --cloudId "myname:<base64string>" --maxDocs 100000
```
## 3. Local ES Cluster