mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[APM] Generate stack monitoring data (#118302)
* Generate stack monitoring data * Update file import * Fix imports after bad merge from upstream * A cluster stats generator * Wiring kibana docs to ES docs * Adding fields to get kibana cards rendering * [apm-synthtrace] Export types Fields, ApmException, ApmSynthtraceEsClient * [APM] Update integration tests with synthtrace changes * [APM] Update Cypress E2E tests with synthtrace changes * Fix lint errors Co-authored-by: Milton Hultgren <milton.hultgren@elastic.co> Co-authored-by: Mat Schaffer <mat@elastic.co>
This commit is contained in:
parent
a0650c7510
commit
39a29cc769
72 changed files with 1019 additions and 676 deletions
|
@ -6,17 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type { Exception } from './lib/entity';
|
||||
export { service } from './lib/service';
|
||||
export { browser } from './lib/browser';
|
||||
export { timerange } from './lib/timerange';
|
||||
export { getTransactionMetrics } from './lib/utils/get_transaction_metrics';
|
||||
export { getSpanDestinationMetrics } from './lib/utils/get_span_destination_metrics';
|
||||
export { getObserverDefaults } from './lib/defaults/get_observer_defaults';
|
||||
export { getChromeUserAgentDefaults } from './lib/defaults/get_chrome_user_agent_defaults';
|
||||
export { toElasticsearchOutput } from './lib/output/to_elasticsearch_output';
|
||||
export { getBreakdownMetrics } from './lib/utils/get_breakdown_metrics';
|
||||
export { apm } from './lib/apm';
|
||||
export { stackMonitoring } from './lib/stack_monitoring';
|
||||
export { cleanWriteTargets } from './lib/utils/clean_write_targets';
|
||||
export { getWriteTargets } from './lib/utils/get_write_targets';
|
||||
export { SynthtraceEsClient } from './lib/client/synthtrace_es_client';
|
||||
export { createLogger, LogLevel } from './lib/utils/create_logger';
|
||||
|
||||
export type { Fields } from './lib/entity';
|
||||
export type { ApmException, ApmSynthtraceEsClient } from './lib/apm';
|
||||
|
|
|
@ -6,12 +6,12 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from './entity';
|
||||
import { Serializable } from './serializable';
|
||||
import { generateLongId, generateShortId } from './utils/generate_id';
|
||||
import { ApmFields } from './apm_fields';
|
||||
import { Serializable } from '../serializable';
|
||||
import { generateLongId, generateShortId } from '../utils/generate_id';
|
||||
|
||||
export class ApmError extends Serializable {
|
||||
constructor(fields: Fields) {
|
||||
export class ApmError extends Serializable<ApmFields> {
|
||||
constructor(fields: ApmFields) {
|
||||
super({
|
||||
...fields,
|
||||
'processor.event': 'error',
|
78
packages/elastic-apm-synthtrace/src/lib/apm/apm_fields.ts
Normal file
78
packages/elastic-apm-synthtrace/src/lib/apm/apm_fields.ts
Normal file
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from '../entity';
|
||||
|
||||
export type ApmApplicationMetricFields = Partial<{
|
||||
'system.process.memory.size': number;
|
||||
'system.memory.actual.free': number;
|
||||
'system.memory.total': number;
|
||||
'system.cpu.total.norm.pct': number;
|
||||
'system.process.memory.rss.bytes': number;
|
||||
'system.process.cpu.total.norm.pct': number;
|
||||
}>;
|
||||
|
||||
export type ApmUserAgentFields = Partial<{
|
||||
'user_agent.original': string;
|
||||
'user_agent.os.name': string;
|
||||
'user_agent.name': string;
|
||||
'user_agent.device.name': string;
|
||||
'user_agent.version': number;
|
||||
}>;
|
||||
|
||||
export interface ApmException {
|
||||
message: string;
|
||||
}
|
||||
|
||||
export type ApmFields = Fields &
|
||||
Partial<{
|
||||
'agent.name': string;
|
||||
'agent.version': string;
|
||||
'container.id': string;
|
||||
'ecs.version': string;
|
||||
'event.outcome': string;
|
||||
'event.ingested': number;
|
||||
'error.id': string;
|
||||
'error.exception': ApmException[];
|
||||
'error.grouping_name': string;
|
||||
'error.grouping_key': string;
|
||||
'host.name': string;
|
||||
'kubernetes.pod.uid': string;
|
||||
'metricset.name': string;
|
||||
'observer.version': string;
|
||||
'observer.version_major': number;
|
||||
'parent.id': string;
|
||||
'processor.event': string;
|
||||
'processor.name': string;
|
||||
'trace.id': string;
|
||||
'transaction.name': string;
|
||||
'transaction.type': string;
|
||||
'transaction.id': string;
|
||||
'transaction.duration.us': number;
|
||||
'transaction.duration.histogram': {
|
||||
values: number[];
|
||||
counts: number[];
|
||||
};
|
||||
'transaction.sampled': true;
|
||||
'service.name': string;
|
||||
'service.environment': string;
|
||||
'service.node.name': string;
|
||||
'span.id': string;
|
||||
'span.name': string;
|
||||
'span.type': string;
|
||||
'span.subtype': string;
|
||||
'span.duration.us': number;
|
||||
'span.destination.service.name': string;
|
||||
'span.destination.service.resource': string;
|
||||
'span.destination.service.type': string;
|
||||
'span.destination.service.response_time.sum.us': number;
|
||||
'span.destination.service.response_time.count': number;
|
||||
'span.self_time.count': number;
|
||||
'span.self_time.sum.us': number;
|
||||
}> &
|
||||
ApmApplicationMetricFields;
|
|
@ -6,16 +6,16 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from './entity';
|
||||
import { Serializable } from './serializable';
|
||||
import { Serializable } from '../serializable';
|
||||
import { Span } from './span';
|
||||
import { Transaction } from './transaction';
|
||||
import { generateLongId } from './utils/generate_id';
|
||||
import { generateLongId } from '../utils/generate_id';
|
||||
import { ApmFields } from './apm_fields';
|
||||
|
||||
export class BaseSpan extends Serializable {
|
||||
export class BaseSpan extends Serializable<ApmFields> {
|
||||
private readonly _children: BaseSpan[] = [];
|
||||
|
||||
constructor(fields: Fields) {
|
||||
constructor(fields: ApmFields) {
|
||||
super({
|
||||
...fields,
|
||||
'event.outcome': 'unknown',
|
||||
|
@ -60,7 +60,7 @@ export class BaseSpan extends Serializable {
|
|||
return this;
|
||||
}
|
||||
|
||||
serialize(): Fields[] {
|
||||
serialize(): ApmFields[] {
|
||||
return [this.fields, ...this._children.flatMap((child) => child.serialize())];
|
||||
}
|
||||
|
|
@ -6,11 +6,12 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Entity, UserAgentFields } from './entity';
|
||||
import { ApmFields, ApmUserAgentFields } from './apm_fields';
|
||||
import { Entity } from '../entity';
|
||||
import { RumSpan } from './rum_span';
|
||||
import { RumTransaction } from './rum_transaction';
|
||||
|
||||
export class Browser extends Entity {
|
||||
export class Browser extends Entity<ApmFields> {
|
||||
transaction(transactionName: string, transactionType: string = 'page-load') {
|
||||
return new RumTransaction({
|
||||
...this.fields,
|
||||
|
@ -29,7 +30,7 @@ export class Browser extends Entity {
|
|||
}
|
||||
}
|
||||
|
||||
export function browser(serviceName: string, production: string, userAgent: UserAgentFields) {
|
||||
export function browser(serviceName: string, production: string, userAgent: ApmUserAgentFields) {
|
||||
return new Browser({
|
||||
'agent.name': 'rum-js',
|
||||
'service.name': serviceName,
|
|
@ -7,45 +7,52 @@
|
|||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { uploadEvents } from '../../scripts/utils/upload_events';
|
||||
import { Fields } from '../entity';
|
||||
import { cleanWriteTargets } from '../utils/clean_write_targets';
|
||||
import { uploadEvents } from '../../../scripts/utils/upload_events';
|
||||
import { Fields } from '../../entity';
|
||||
import { cleanWriteTargets } from '../../utils/clean_write_targets';
|
||||
import { getBreakdownMetrics } from '../utils/get_breakdown_metrics';
|
||||
import { getSpanDestinationMetrics } from '../utils/get_span_destination_metrics';
|
||||
import { getTransactionMetrics } from '../utils/get_transaction_metrics';
|
||||
import { getWriteTargets } from '../utils/get_write_targets';
|
||||
import { Logger } from '../utils/logger';
|
||||
import { getApmWriteTargets } from '../utils/get_apm_write_targets';
|
||||
import { Logger } from '../../utils/create_logger';
|
||||
import { apmEventsToElasticsearchOutput } from '../utils/apm_events_to_elasticsearch_output';
|
||||
|
||||
export class SynthtraceEsClient {
|
||||
export class ApmSynthtraceEsClient {
|
||||
constructor(private readonly client: Client, private readonly logger: Logger) {}
|
||||
|
||||
private getWriteTargets() {
|
||||
return getWriteTargets({ client: this.client });
|
||||
return getApmWriteTargets({ client: this.client });
|
||||
}
|
||||
|
||||
clean() {
|
||||
return this.getWriteTargets().then((writeTargets) =>
|
||||
cleanWriteTargets({ client: this.client, writeTargets, logger: this.logger })
|
||||
cleanWriteTargets({
|
||||
client: this.client,
|
||||
targets: Object.values(writeTargets),
|
||||
logger: this.logger,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async index(events: Fields[]) {
|
||||
const eventsToIndex = [
|
||||
...events,
|
||||
...getTransactionMetrics(events),
|
||||
...getSpanDestinationMetrics(events),
|
||||
...getBreakdownMetrics(events),
|
||||
];
|
||||
|
||||
const writeTargets = await this.getWriteTargets();
|
||||
|
||||
const eventsToIndex = apmEventsToElasticsearchOutput({
|
||||
events: [
|
||||
...events,
|
||||
...getTransactionMetrics(events),
|
||||
...getSpanDestinationMetrics(events),
|
||||
...getBreakdownMetrics(events),
|
||||
],
|
||||
writeTargets,
|
||||
});
|
||||
|
||||
await uploadEvents({
|
||||
batchSize: 1000,
|
||||
client: this.client,
|
||||
clientWorkers: 5,
|
||||
events: eventsToIndex,
|
||||
logger: this.logger,
|
||||
writeTargets,
|
||||
});
|
||||
|
||||
return this.client.indices.refresh({
|
|
@ -6,9 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { UserAgentFields } from '../entity';
|
||||
import { ApmUserAgentFields } from '../../apm/apm_fields';
|
||||
|
||||
export function getChromeUserAgentDefaults(): UserAgentFields {
|
||||
export function getChromeUserAgentDefaults(): ApmUserAgentFields {
|
||||
return {
|
||||
'user_agent.original':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.0.4638.54 Safari/537.36',
|
|
@ -6,9 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from '../entity';
|
||||
import { ApmFields } from '../apm_fields';
|
||||
|
||||
export function getObserverDefaults(): Fields {
|
||||
export function getObserverDefaults(): ApmFields {
|
||||
return {
|
||||
'observer.version': '7.16.0',
|
||||
'observer.version_major': 7,
|
34
packages/elastic-apm-synthtrace/src/lib/apm/index.ts
Normal file
34
packages/elastic-apm-synthtrace/src/lib/apm/index.ts
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { service } from './service';
|
||||
import { browser } from './browser';
|
||||
import { getTransactionMetrics } from './utils/get_transaction_metrics';
|
||||
import { getSpanDestinationMetrics } from './utils/get_span_destination_metrics';
|
||||
import { getObserverDefaults } from './defaults/get_observer_defaults';
|
||||
import { getChromeUserAgentDefaults } from './defaults/get_chrome_user_agent_defaults';
|
||||
import { apmEventsToElasticsearchOutput } from './utils/apm_events_to_elasticsearch_output';
|
||||
import { getBreakdownMetrics } from './utils/get_breakdown_metrics';
|
||||
import { getApmWriteTargets } from './utils/get_apm_write_targets';
|
||||
import { ApmSynthtraceEsClient } from './client/apm_synthtrace_es_client';
|
||||
|
||||
import type { ApmException } from './apm_fields';
|
||||
|
||||
export const apm = {
|
||||
service,
|
||||
browser,
|
||||
getTransactionMetrics,
|
||||
getSpanDestinationMetrics,
|
||||
getObserverDefaults,
|
||||
getChromeUserAgentDefaults,
|
||||
apmEventsToElasticsearchOutput,
|
||||
getBreakdownMetrics,
|
||||
getApmWriteTargets,
|
||||
ApmSynthtraceEsClient,
|
||||
};
|
||||
|
||||
export type { ApmSynthtraceEsClient, ApmException };
|
|
@ -7,12 +7,13 @@
|
|||
*/
|
||||
|
||||
import { ApmError } from './apm_error';
|
||||
import { ApplicationMetricFields, Entity } from './entity';
|
||||
import { Entity } from '../entity';
|
||||
import { Metricset } from './metricset';
|
||||
import { Span } from './span';
|
||||
import { Transaction } from './transaction';
|
||||
import { ApmApplicationMetricFields, ApmFields } from './apm_fields';
|
||||
|
||||
export class Instance extends Entity {
|
||||
export class Instance extends Entity<ApmFields> {
|
||||
transaction(transactionName: string, transactionType = 'request') {
|
||||
return new Transaction({
|
||||
...this.fields,
|
||||
|
@ -43,7 +44,7 @@ export class Instance extends Entity {
|
|||
return this;
|
||||
}
|
||||
|
||||
appMetrics(metrics: ApplicationMetricFields) {
|
||||
appMetrics(metrics: ApmApplicationMetricFields) {
|
||||
return new Metricset({
|
||||
...this.fields,
|
||||
'metricset.name': 'app',
|
|
@ -6,11 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from './entity';
|
||||
import { Serializable } from './serializable';
|
||||
import { Serializable } from '../serializable';
|
||||
import { ApmFields } from './apm_fields';
|
||||
|
||||
export class Metricset extends Serializable {
|
||||
constructor(fields: Fields) {
|
||||
export class Metricset extends Serializable<ApmFields> {
|
||||
constructor(fields: ApmFields) {
|
||||
super({
|
||||
'processor.event': 'metric',
|
||||
'processor.name': 'metric',
|
|
@ -6,10 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Entity } from './entity';
|
||||
import { Entity } from '../entity';
|
||||
import { ApmFields } from './apm_fields';
|
||||
import { Instance } from './instance';
|
||||
|
||||
export class Service extends Entity {
|
||||
export class Service extends Entity<ApmFields> {
|
||||
instance(instanceName: string) {
|
||||
return new Instance({
|
||||
...this.fields,
|
|
@ -7,11 +7,11 @@
|
|||
*/
|
||||
|
||||
import { BaseSpan } from './base_span';
|
||||
import { Fields } from './entity';
|
||||
import { generateShortId } from './utils/generate_id';
|
||||
import { generateShortId } from '../utils/generate_id';
|
||||
import { ApmFields } from './apm_fields';
|
||||
|
||||
export class Span extends BaseSpan {
|
||||
constructor(fields: Fields) {
|
||||
constructor(fields: ApmFields) {
|
||||
super({
|
||||
...fields,
|
||||
'processor.event': 'span',
|
|
@ -8,14 +8,14 @@
|
|||
|
||||
import { ApmError } from './apm_error';
|
||||
import { BaseSpan } from './base_span';
|
||||
import { Fields } from './entity';
|
||||
import { generateShortId } from './utils/generate_id';
|
||||
import { generateShortId } from '../utils/generate_id';
|
||||
import { ApmFields } from './apm_fields';
|
||||
|
||||
export class Transaction extends BaseSpan {
|
||||
private _sampled: boolean = true;
|
||||
private readonly _errors: ApmError[] = [];
|
||||
|
||||
constructor(fields: Fields) {
|
||||
constructor(fields: ApmFields) {
|
||||
super({
|
||||
...fields,
|
||||
'processor.event': 'transaction',
|
|
@ -8,15 +8,15 @@
|
|||
import moment from 'moment';
|
||||
import { pickBy } from 'lodash';
|
||||
import objectHash from 'object-hash';
|
||||
import { Fields } from '../entity';
|
||||
import { ApmFields } from '../apm_fields';
|
||||
import { createPicker } from './create_picker';
|
||||
|
||||
export function aggregate(events: Fields[], fields: string[]) {
|
||||
export function aggregate(events: ApmFields[], fields: string[]) {
|
||||
const picker = createPicker(fields);
|
||||
|
||||
const metricsets = new Map<string, { key: Fields; events: Fields[] }>();
|
||||
const metricsets = new Map<string, { key: ApmFields; events: ApmFields[] }>();
|
||||
|
||||
function getMetricsetKey(span: Fields) {
|
||||
function getMetricsetKey(span: ApmFields) {
|
||||
const timestamp = moment(span['@timestamp']).valueOf();
|
||||
return {
|
||||
'@timestamp': timestamp - (timestamp % (60 * 1000)),
|
|
@ -6,16 +6,12 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { set } from 'lodash';
|
||||
import { getObserverDefaults } from '../..';
|
||||
import { Fields } from '../entity';
|
||||
import { getObserverDefaults } from '../defaults/get_observer_defaults';
|
||||
import { ApmFields } from '../apm_fields';
|
||||
import { dedot } from '../../utils/dedot';
|
||||
import { ElasticsearchOutput } from '../../utils/to_elasticsearch_output';
|
||||
|
||||
export interface ElasticsearchOutput {
|
||||
_index: string;
|
||||
_source: unknown;
|
||||
}
|
||||
|
||||
export interface ElasticsearchOutputWriteTargets {
|
||||
export interface ApmElasticsearchOutputWriteTargets {
|
||||
transaction: string;
|
||||
span: string;
|
||||
error: string;
|
||||
|
@ -30,16 +26,14 @@ const esDocumentDefaults = {
|
|||
},
|
||||
};
|
||||
|
||||
// eslint-disable-next-line guard-for-in
|
||||
for (const key in observerDefaults) {
|
||||
set(esDocumentDefaults, key, observerDefaults[key as keyof typeof observerDefaults]);
|
||||
}
|
||||
export function toElasticsearchOutput({
|
||||
dedot(observerDefaults, esDocumentDefaults);
|
||||
|
||||
export function apmEventsToElasticsearchOutput({
|
||||
events,
|
||||
writeTargets,
|
||||
}: {
|
||||
events: Fields[];
|
||||
writeTargets: ElasticsearchOutputWriteTargets;
|
||||
events: ApmFields[];
|
||||
writeTargets: ApmElasticsearchOutputWriteTargets;
|
||||
}): ElasticsearchOutput[] {
|
||||
return events.map((event) => {
|
||||
const values = {};
|
||||
|
@ -55,15 +49,12 @@ export function toElasticsearchOutput({
|
|||
|
||||
Object.assign(document, esDocumentDefaults);
|
||||
|
||||
// eslint-disable-next-line guard-for-in
|
||||
for (const key in values) {
|
||||
const val = values[key as keyof typeof values];
|
||||
set(document, key, val);
|
||||
}
|
||||
dedot(values, document);
|
||||
|
||||
return {
|
||||
_index: writeTargets[event['processor.event'] as keyof ElasticsearchOutputWriteTargets],
|
||||
_index: writeTargets[event['processor.event'] as keyof ApmElasticsearchOutputWriteTargets],
|
||||
_source: document,
|
||||
timestamp: event['@timestamp']!,
|
||||
};
|
||||
});
|
||||
}
|
|
@ -7,13 +7,13 @@
|
|||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
|
||||
import { ApmElasticsearchOutputWriteTargets } from './apm_events_to_elasticsearch_output';
|
||||
|
||||
export async function getWriteTargets({
|
||||
export async function getApmWriteTargets({
|
||||
client,
|
||||
}: {
|
||||
client: Client;
|
||||
}): Promise<ElasticsearchOutputWriteTargets> {
|
||||
}): Promise<ApmElasticsearchOutputWriteTargets> {
|
||||
const [indicesResponse, datastreamsResponse] = await Promise.all([
|
||||
client.indices.getAlias({
|
||||
index: 'apm-*',
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
import objectHash from 'object-hash';
|
||||
import { groupBy, pickBy } from 'lodash';
|
||||
import { Fields } from '../entity';
|
||||
import { ApmFields } from '../apm_fields';
|
||||
import { createPicker } from './create_picker';
|
||||
|
||||
const instanceFields = [
|
||||
|
@ -29,7 +29,7 @@ const metricsetPicker = createPicker([
|
|||
'span.subtype',
|
||||
]);
|
||||
|
||||
export function getBreakdownMetrics(events: Fields[]) {
|
||||
export function getBreakdownMetrics(events: ApmFields[]) {
|
||||
const txWithSpans = groupBy(
|
||||
events.filter(
|
||||
(event) => event['processor.event'] === 'span' || event['processor.event'] === 'transaction'
|
||||
|
@ -37,13 +37,13 @@ export function getBreakdownMetrics(events: Fields[]) {
|
|||
(event) => event['transaction.id']
|
||||
);
|
||||
|
||||
const metricsets: Map<string, Fields> = new Map();
|
||||
const metricsets: Map<string, ApmFields> = new Map();
|
||||
|
||||
Object.keys(txWithSpans).forEach((transactionId) => {
|
||||
const txEvents = txWithSpans[transactionId];
|
||||
const transaction = txEvents.find((event) => event['processor.event'] === 'transaction')!;
|
||||
|
||||
const eventsById: Record<string, Fields> = {};
|
||||
const eventsById: Record<string, ApmFields> = {};
|
||||
const activityByParentId: Record<string, Array<{ from: number; to: number }>> = {};
|
||||
for (const event of txEvents) {
|
||||
const id =
|
|
@ -6,10 +6,10 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from '../entity';
|
||||
import { ApmFields } from '../apm_fields';
|
||||
import { aggregate } from './aggregate';
|
||||
|
||||
export function getSpanDestinationMetrics(events: Fields[]) {
|
||||
export function getSpanDestinationMetrics(events: ApmFields[]) {
|
||||
const exitSpans = events.filter((event) => !!event['span.destination.service.resource']);
|
||||
|
||||
const metricsets = aggregate(exitSpans, [
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import { sortBy } from 'lodash';
|
||||
import { Fields } from '../entity';
|
||||
import { ApmFields } from '../apm_fields';
|
||||
import { aggregate } from './aggregate';
|
||||
|
||||
function sortAndCompressHistogram(histogram?: { values: number[]; counts: number[] }) {
|
||||
|
@ -28,7 +28,7 @@ function sortAndCompressHistogram(histogram?: { values: number[]; counts: number
|
|||
);
|
||||
}
|
||||
|
||||
export function getTransactionMetrics(events: Fields[]) {
|
||||
export function getTransactionMetrics(events: ApmFields[]) {
|
||||
const transactions = events
|
||||
.filter((event) => event['processor.event'] === 'transaction')
|
||||
.map((transaction) => {
|
|
@ -6,83 +6,18 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type ApplicationMetricFields = Partial<{
|
||||
'system.process.memory.size': number;
|
||||
'system.memory.actual.free': number;
|
||||
'system.memory.total': number;
|
||||
'system.cpu.total.norm.pct': number;
|
||||
'system.process.memory.rss.bytes': number;
|
||||
'system.process.cpu.total.norm.pct': number;
|
||||
}>;
|
||||
|
||||
export type UserAgentFields = Partial<{
|
||||
'user_agent.original': string;
|
||||
'user_agent.os.name': string;
|
||||
'user_agent.name': string;
|
||||
'user_agent.device.name': string;
|
||||
'user_agent.version': number;
|
||||
}>;
|
||||
|
||||
export interface Exception {
|
||||
message: string;
|
||||
export interface Fields {
|
||||
'@timestamp'?: number;
|
||||
}
|
||||
|
||||
export type Fields = Partial<{
|
||||
'@timestamp': number;
|
||||
'agent.name': string;
|
||||
'agent.version': string;
|
||||
'container.id': string;
|
||||
'ecs.version': string;
|
||||
'event.outcome': string;
|
||||
'event.ingested': number;
|
||||
'error.id': string;
|
||||
'error.exception': Exception[];
|
||||
'error.grouping_name': string;
|
||||
'error.grouping_key': string;
|
||||
'host.name': string;
|
||||
'kubernetes.pod.uid': string;
|
||||
'metricset.name': string;
|
||||
'observer.version': string;
|
||||
'observer.version_major': number;
|
||||
'parent.id': string;
|
||||
'processor.event': string;
|
||||
'processor.name': string;
|
||||
'trace.id': string;
|
||||
'transaction.name': string;
|
||||
'transaction.type': string;
|
||||
'transaction.id': string;
|
||||
'transaction.duration.us': number;
|
||||
'transaction.duration.histogram': {
|
||||
values: number[];
|
||||
counts: number[];
|
||||
};
|
||||
'transaction.sampled': true;
|
||||
'service.name': string;
|
||||
'service.environment': string;
|
||||
'service.node.name': string;
|
||||
'span.id': string;
|
||||
'span.name': string;
|
||||
'span.type': string;
|
||||
'span.subtype': string;
|
||||
'span.duration.us': number;
|
||||
'span.destination.service.name': string;
|
||||
'span.destination.service.resource': string;
|
||||
'span.destination.service.type': string;
|
||||
'span.destination.service.response_time.sum.us': number;
|
||||
'span.destination.service.response_time.count': number;
|
||||
'span.self_time.count': number;
|
||||
'span.self_time.sum.us': number;
|
||||
}> &
|
||||
ApplicationMetricFields;
|
||||
|
||||
export class Entity {
|
||||
constructor(public readonly fields: Fields) {
|
||||
export class Entity<TFields extends Fields> {
|
||||
constructor(public readonly fields: TFields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
defaults(defaults: Fields) {
|
||||
defaults(defaults: TFields) {
|
||||
Object.keys(defaults).forEach((key) => {
|
||||
const fieldName: keyof Fields = key as any;
|
||||
const fieldName: keyof TFields = key as any;
|
||||
|
||||
if (!(fieldName in this.fields)) {
|
||||
this.fields[fieldName] = defaults[fieldName] as any;
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
|
||||
import { Entity, Fields } from './entity';
|
||||
|
||||
export class Serializable extends Entity {
|
||||
constructor(fields: Fields) {
|
||||
export class Serializable<TFields extends Fields> extends Entity<TFields> {
|
||||
constructor(fields: TFields) {
|
||||
super({
|
||||
...fields,
|
||||
});
|
||||
|
@ -19,7 +19,7 @@ export class Serializable extends Entity {
|
|||
this.fields['@timestamp'] = time;
|
||||
return this;
|
||||
}
|
||||
serialize(): Fields[] {
|
||||
serialize(): TFields[] {
|
||||
return [this.fields];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Entity } from '../entity';
|
||||
import { generateShortId } from '../utils/generate_id';
|
||||
import { Kibana } from './kibana';
|
||||
import { StackMonitoringFields } from './stack_monitoring_fields';
|
||||
import { ClusterStats } from './cluster_stats';
|
||||
|
||||
export class Cluster extends Entity<StackMonitoringFields> {
|
||||
kibana(name: string, index: string = '.kibana') {
|
||||
return new Kibana({
|
||||
cluster_uuid: this.fields.cluster_uuid,
|
||||
'kibana_stats.kibana.name': name,
|
||||
'kibana_stats.kibana.uuid': generateShortId(),
|
||||
'kibana_stats.kibana.index': index,
|
||||
type: 'kibana_stats',
|
||||
});
|
||||
}
|
||||
|
||||
stats() {
|
||||
return new ClusterStats({
|
||||
...this.fields,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function cluster(name: string) {
|
||||
return new Cluster({
|
||||
cluster_name: name,
|
||||
cluster_uuid: generateShortId(),
|
||||
});
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Serializable } from '../serializable';
|
||||
import { StackMonitoringFields } from './stack_monitoring_fields';
|
||||
|
||||
export class ClusterStats extends Serializable<StackMonitoringFields> {
|
||||
constructor(fields: StackMonitoringFields) {
|
||||
super(fields);
|
||||
|
||||
this.fields.type = 'cluster_stats';
|
||||
this.fields['license.status'] = 'active';
|
||||
}
|
||||
|
||||
timestamp(timestamp: number) {
|
||||
super.timestamp(timestamp);
|
||||
this.fields['cluster_stats.timestamp'] = new Date(timestamp).toISOString();
|
||||
return this;
|
||||
}
|
||||
|
||||
indices(count: number) {
|
||||
this.fields['cluster_stats.indices.count'] = count;
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { cluster } from './cluster';
|
||||
|
||||
export const stackMonitoring = {
|
||||
cluster,
|
||||
};
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Serializable } from '../serializable';
|
||||
import { StackMonitoringFields } from './stack_monitoring_fields';
|
||||
import { KibanaStats } from './kibana_stats';
|
||||
|
||||
export class Kibana extends Serializable<StackMonitoringFields> {
|
||||
stats() {
|
||||
return new KibanaStats({
|
||||
...this.fields,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Serializable } from '../serializable';
|
||||
import { StackMonitoringFields } from './stack_monitoring_fields';
|
||||
|
||||
export class KibanaStats extends Serializable<StackMonitoringFields> {
|
||||
timestamp(timestamp: number) {
|
||||
super.timestamp(timestamp);
|
||||
this.fields['kibana_stats.timestamp'] = new Date(timestamp).toISOString();
|
||||
this.fields['kibana_stats.response_times.max'] = 250;
|
||||
this.fields['kibana_stats.kibana.status'] = 'green';
|
||||
return this;
|
||||
}
|
||||
|
||||
requests(disconnects: number, total: number) {
|
||||
this.fields['kibana_stats.requests.disconnects'] = disconnects;
|
||||
this.fields['kibana_stats.requests.total'] = total;
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from '../entity';
|
||||
|
||||
export type StackMonitoringFields = Fields &
|
||||
Partial<{
|
||||
cluster_name: string;
|
||||
cluster_uuid: string;
|
||||
type: string;
|
||||
|
||||
'cluster_stats.timestamp': string;
|
||||
'cluster_stats.indices.count': number;
|
||||
'license.status': string;
|
||||
|
||||
'kibana_stats.kibana.name': string;
|
||||
'kibana_stats.kibana.uuid': string;
|
||||
'kibana_stats.kibana.status': string;
|
||||
'kibana_stats.kibana.index': string;
|
||||
'kibana_stats.requests.disconnects': number;
|
||||
'kibana_stats.requests.total': number;
|
||||
'kibana_stats.timestamp': string;
|
||||
'kibana_stats.response_times.max': number;
|
||||
}>;
|
|
@ -7,20 +7,17 @@
|
|||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
|
||||
import { Logger } from './logger';
|
||||
import { Logger } from './create_logger';
|
||||
|
||||
export async function cleanWriteTargets({
|
||||
writeTargets,
|
||||
targets,
|
||||
client,
|
||||
logger,
|
||||
}: {
|
||||
writeTargets: ElasticsearchOutputWriteTargets;
|
||||
targets: string[];
|
||||
client: Client;
|
||||
logger: Logger;
|
||||
}) {
|
||||
const targets = Object.values(writeTargets);
|
||||
|
||||
logger.info(`Cleaning indices: ${targets.join(', ')}`);
|
||||
|
||||
const response = await client.deleteByQuery({
|
||||
|
|
16
packages/elastic-apm-synthtrace/src/lib/utils/dedot.ts
Normal file
16
packages/elastic-apm-synthtrace/src/lib/utils/dedot.ts
Normal file
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { set } from 'lodash';
|
||||
|
||||
export function dedot(source: Record<string, any>, target: Record<string, any>) {
|
||||
// eslint-disable-next-line guard-for-in
|
||||
for (const key in source) {
|
||||
const val = source[key as keyof typeof source];
|
||||
set(target, key, val);
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { isPromise } from 'util/types';
|
||||
|
||||
export enum LogLevel {
|
||||
trace = 0,
|
||||
debug = 1,
|
||||
info = 2,
|
||||
error = 3,
|
||||
}
|
||||
|
||||
function getTimeString() {
|
||||
return `[${new Date().toLocaleTimeString()}]`;
|
||||
}
|
||||
|
||||
export function createLogger(logLevel: LogLevel) {
|
||||
function logPerf(name: string, start: bigint) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(
|
||||
getTimeString(),
|
||||
`${name}: ${Number(process.hrtime.bigint() - start) / 1000000}ms`
|
||||
);
|
||||
}
|
||||
return {
|
||||
perf: <T extends any>(name: string, cb: () => T): T => {
|
||||
if (logLevel <= LogLevel.trace) {
|
||||
const start = process.hrtime.bigint();
|
||||
const val = cb();
|
||||
if (isPromise(val)) {
|
||||
val.then(() => {
|
||||
logPerf(name, start);
|
||||
});
|
||||
} else {
|
||||
logPerf(name, start);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
return cb();
|
||||
},
|
||||
debug: (...args: any[]) => {
|
||||
if (logLevel <= LogLevel.debug) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(getTimeString(), ...args);
|
||||
}
|
||||
},
|
||||
info: (...args: any[]) => {
|
||||
if (logLevel <= LogLevel.info) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(getTimeString(), ...args);
|
||||
}
|
||||
},
|
||||
error: (...args: any[]) => {
|
||||
if (logLevel <= LogLevel.error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(getTimeString(), ...args);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export type Logger = ReturnType<typeof createLogger>;
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from '../entity';
|
||||
import { dedot } from './dedot';
|
||||
|
||||
export interface ElasticsearchOutput {
|
||||
_index: string;
|
||||
_source: unknown;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
export function eventsToElasticsearchOutput({
|
||||
events,
|
||||
writeTarget,
|
||||
}: {
|
||||
events: Fields[];
|
||||
writeTarget: string;
|
||||
}): ElasticsearchOutput[] {
|
||||
return events.map((event) => {
|
||||
const values = {};
|
||||
|
||||
const timestamp = event['@timestamp']!;
|
||||
|
||||
Object.assign(values, event, {
|
||||
'@timestamp': new Date(timestamp).toISOString(),
|
||||
});
|
||||
|
||||
const document = {};
|
||||
|
||||
dedot(values, document);
|
||||
|
||||
return {
|
||||
_index: writeTarget,
|
||||
_source: document,
|
||||
timestamp,
|
||||
};
|
||||
});
|
||||
}
|
|
@ -6,75 +6,103 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { service, timerange, getTransactionMetrics, getSpanDestinationMetrics } from '../..';
|
||||
import { getBreakdownMetrics } from '../../lib/utils/get_breakdown_metrics';
|
||||
import { apm, timerange } from '../../index';
|
||||
import { apmEventsToElasticsearchOutput } from '../../lib/apm/utils/apm_events_to_elasticsearch_output';
|
||||
import { getApmWriteTargets } from '../../lib/apm/utils/get_apm_write_targets';
|
||||
import { Scenario } from '../scenario';
|
||||
import { getCommonServices } from '../utils/get_common_services';
|
||||
|
||||
export default function ({ from, to }: { from: number; to: number }) {
|
||||
const numServices = 3;
|
||||
const scenario: Scenario = async ({ target, logLevel }) => {
|
||||
const { client, logger } = getCommonServices({ target, logLevel });
|
||||
const writeTargets = await getApmWriteTargets({ client });
|
||||
|
||||
const range = timerange(from, to);
|
||||
return {
|
||||
generate: ({ from, to }) => {
|
||||
const numServices = 3;
|
||||
|
||||
const transactionName = '240rpm/75% 1000ms';
|
||||
const range = timerange(from, to);
|
||||
|
||||
const successfulTimestamps = range.interval('1s').rate(3);
|
||||
const transactionName = '240rpm/75% 1000ms';
|
||||
|
||||
const failedTimestamps = range.interval('1s').rate(1);
|
||||
const successfulTimestamps = range.interval('1s').rate(3);
|
||||
|
||||
return new Array(numServices).fill(undefined).flatMap((_, index) => {
|
||||
const instance = service(`opbeans-go-${index}`, 'production', 'go').instance('instance');
|
||||
const failedTimestamps = range.interval('1s').rate(1);
|
||||
|
||||
const successfulTraceEvents = successfulTimestamps.flatMap((timestamp) =>
|
||||
instance
|
||||
.transaction(transactionName)
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.success()
|
||||
.children(
|
||||
instance
|
||||
.span('GET apm-*/_search', 'db', 'elasticsearch')
|
||||
.duration(1000)
|
||||
.success()
|
||||
.destination('elasticsearch')
|
||||
.timestamp(timestamp),
|
||||
instance.span('custom_operation', 'custom').duration(100).success().timestamp(timestamp)
|
||||
)
|
||||
.serialize()
|
||||
);
|
||||
return new Array(numServices).fill(undefined).flatMap((_, index) => {
|
||||
const events = logger.perf('generating_apm_events', () => {
|
||||
const instance = apm
|
||||
.service(`opbeans-go-${index}`, 'production', 'go')
|
||||
.instance('instance');
|
||||
|
||||
const failedTraceEvents = failedTimestamps.flatMap((timestamp) =>
|
||||
instance
|
||||
.transaction(transactionName)
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instance.error('[ResponseError] index_not_found_exception').timestamp(timestamp + 50)
|
||||
)
|
||||
.serialize()
|
||||
);
|
||||
const successfulTraceEvents = successfulTimestamps.flatMap((timestamp) =>
|
||||
instance
|
||||
.transaction(transactionName)
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.success()
|
||||
.children(
|
||||
instance
|
||||
.span('GET apm-*/_search', 'db', 'elasticsearch')
|
||||
.duration(1000)
|
||||
.success()
|
||||
.destination('elasticsearch')
|
||||
.timestamp(timestamp),
|
||||
instance
|
||||
.span('custom_operation', 'custom')
|
||||
.duration(100)
|
||||
.success()
|
||||
.timestamp(timestamp)
|
||||
)
|
||||
.serialize()
|
||||
);
|
||||
|
||||
const metricsets = range
|
||||
.interval('30s')
|
||||
.rate(1)
|
||||
.flatMap((timestamp) =>
|
||||
instance
|
||||
.appMetrics({
|
||||
'system.memory.actual.free': 800,
|
||||
'system.memory.total': 1000,
|
||||
'system.cpu.total.norm.pct': 0.6,
|
||||
'system.process.cpu.total.norm.pct': 0.7,
|
||||
const failedTraceEvents = failedTimestamps.flatMap((timestamp) =>
|
||||
instance
|
||||
.transaction(transactionName)
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instance
|
||||
.error('[ResponseError] index_not_found_exception')
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.serialize()
|
||||
);
|
||||
|
||||
const metricsets = range
|
||||
.interval('30s')
|
||||
.rate(1)
|
||||
.flatMap((timestamp) =>
|
||||
instance
|
||||
.appMetrics({
|
||||
'system.memory.actual.free': 800,
|
||||
'system.memory.total': 1000,
|
||||
'system.cpu.total.norm.pct': 0.6,
|
||||
'system.process.cpu.total.norm.pct': 0.7,
|
||||
})
|
||||
.timestamp(timestamp)
|
||||
.serialize()
|
||||
);
|
||||
return [...successfulTraceEvents, ...failedTraceEvents, ...metricsets];
|
||||
});
|
||||
|
||||
return logger.perf('apm_events_to_es_output', () =>
|
||||
apmEventsToElasticsearchOutput({
|
||||
events: [
|
||||
...events,
|
||||
...logger.perf('get_transaction_metrics', () => apm.getTransactionMetrics(events)),
|
||||
...logger.perf('get_span_destination_metrics', () =>
|
||||
apm.getSpanDestinationMetrics(events)
|
||||
),
|
||||
...logger.perf('get_breakdown_metrics', () => apm.getBreakdownMetrics(events)),
|
||||
],
|
||||
writeTargets,
|
||||
})
|
||||
.timestamp(timestamp)
|
||||
.serialize()
|
||||
);
|
||||
const events = successfulTraceEvents.concat(failedTraceEvents);
|
||||
);
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
return [
|
||||
...events,
|
||||
...metricsets,
|
||||
...getTransactionMetrics(events),
|
||||
...getSpanDestinationMetrics(events),
|
||||
...getBreakdownMetrics(events),
|
||||
];
|
||||
});
|
||||
}
|
||||
export default scenario;
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { stackMonitoring, timerange } from '../../index';
|
||||
import { eventsToElasticsearchOutput } from '../../lib/utils/to_elasticsearch_output';
|
||||
import { Scenario } from '../scenario';
|
||||
import { getCommonServices } from '../utils/get_common_services';
|
||||
|
||||
const scenario: Scenario = async ({ target, writeTarget, logLevel }) => {
|
||||
const { logger } = getCommonServices({ target, logLevel });
|
||||
|
||||
if (!writeTarget) {
|
||||
throw new Error('Write target is not defined');
|
||||
}
|
||||
|
||||
return {
|
||||
generate: ({ from, to }) => {
|
||||
const kibanaStats = stackMonitoring.cluster('cluster-01').kibana('kibana-01').stats();
|
||||
|
||||
const range = timerange(from, to);
|
||||
return range
|
||||
.interval('30s')
|
||||
.rate(1)
|
||||
.flatMap((timestamp) => {
|
||||
const events = logger.perf('generating_sm_events', () => {
|
||||
return kibanaStats.timestamp(timestamp).requests(10, 20).serialize();
|
||||
});
|
||||
|
||||
return logger.perf('sm_events_to_es_output', () => {
|
||||
const smEvents = eventsToElasticsearchOutput({ events, writeTarget });
|
||||
smEvents.forEach((event: any) => {
|
||||
const ts = event._source['@timestamp'];
|
||||
delete event._source['@timestamp'];
|
||||
event._source.timestamp = ts;
|
||||
});
|
||||
return smEvents;
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default scenario;
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// Run with: node ./src/scripts/run ./src/scripts/examples/03_monitoring.ts --target=http://elastic:changeme@localhost:9200
|
||||
|
||||
import { stackMonitoring, timerange } from '../../index';
|
||||
import {
|
||||
ElasticsearchOutput,
|
||||
eventsToElasticsearchOutput,
|
||||
} from '../../lib/utils/to_elasticsearch_output';
|
||||
import { Scenario } from '../scenario';
|
||||
import { getCommonServices } from '../utils/get_common_services';
|
||||
import { StackMonitoringFields } from '../../lib/stack_monitoring/stack_monitoring_fields';
|
||||
|
||||
// TODO (mat): move this into a function like utils/apm_events_to_elasticsearch_output.ts
|
||||
function smEventsToElasticsearchOutput(
|
||||
events: StackMonitoringFields[],
|
||||
writeTarget: string
|
||||
): ElasticsearchOutput[] {
|
||||
const smEvents = eventsToElasticsearchOutput({ events, writeTarget });
|
||||
smEvents.forEach((event: any) => {
|
||||
const ts = event._source['@timestamp'];
|
||||
delete event._source['@timestamp'];
|
||||
event._source.timestamp = ts;
|
||||
});
|
||||
return smEvents;
|
||||
}
|
||||
|
||||
const scenario: Scenario = async ({ target, logLevel }) => {
|
||||
const { logger } = getCommonServices({ target, logLevel });
|
||||
|
||||
return {
|
||||
generate: ({ from, to }) => {
|
||||
const cluster = stackMonitoring.cluster('test-cluster');
|
||||
const clusterStats = cluster.stats();
|
||||
const kibanaStats = cluster.kibana('kibana-01').stats();
|
||||
|
||||
const range = timerange(from, to);
|
||||
return range
|
||||
.interval('10s')
|
||||
.rate(1)
|
||||
.flatMap((timestamp) => {
|
||||
const clusterEvents = logger.perf('generating_es_events', () => {
|
||||
return clusterStats.timestamp(timestamp).indices(115).serialize();
|
||||
});
|
||||
const clusterOutputs = smEventsToElasticsearchOutput(
|
||||
clusterEvents,
|
||||
'.monitoring-es-7-synthtrace'
|
||||
);
|
||||
|
||||
const kibanaEvents = logger.perf('generating_kb_events', () => {
|
||||
return kibanaStats.timestamp(timestamp).requests(10, 20).serialize();
|
||||
});
|
||||
const kibanaOutputs = smEventsToElasticsearchOutput(
|
||||
kibanaEvents,
|
||||
'.monitoring-kibana-7-synthtrace'
|
||||
);
|
||||
|
||||
return [...clusterOutputs, ...kibanaOutputs];
|
||||
});
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export default scenario;
|
|
@ -7,136 +7,109 @@
|
|||
*/
|
||||
import datemath from '@elastic/datemath';
|
||||
import yargs from 'yargs/yargs';
|
||||
import { cleanWriteTargets } from '../lib/utils/clean_write_targets';
|
||||
import { Argv } from 'yargs';
|
||||
import { intervalToMs } from './utils/interval_to_ms';
|
||||
import { getCommonResources } from './utils/get_common_resources';
|
||||
import { startHistoricalDataUpload } from './utils/start_historical_data_upload';
|
||||
import { startLiveDataUpload } from './utils/start_live_data_upload';
|
||||
import { parseRunCliFlags } from './utils/parse_run_cli_flags';
|
||||
import { getCommonServices } from './utils/get_common_services';
|
||||
|
||||
function options(y: Argv) {
|
||||
return y
|
||||
.positional('file', {
|
||||
describe: 'File that contains the trace scenario',
|
||||
demandOption: true,
|
||||
string: true,
|
||||
})
|
||||
.option('target', {
|
||||
describe: 'Elasticsearch target, including username/password',
|
||||
demandOption: true,
|
||||
string: true,
|
||||
})
|
||||
.option('from', {
|
||||
description: 'The start of the time window',
|
||||
})
|
||||
.option('to', {
|
||||
description: 'The end of the time window',
|
||||
})
|
||||
.option('live', {
|
||||
description: 'Generate and index data continuously',
|
||||
boolean: true,
|
||||
})
|
||||
.option('clean', {
|
||||
describe: 'Clean APM indices before indexing new data',
|
||||
default: false,
|
||||
boolean: true,
|
||||
})
|
||||
.option('workers', {
|
||||
describe: 'Amount of Node.js worker threads',
|
||||
default: 5,
|
||||
})
|
||||
.option('bucketSize', {
|
||||
describe: 'Size of bucket for which to generate data',
|
||||
default: '15m',
|
||||
})
|
||||
.option('interval', {
|
||||
describe: 'The interval at which to index data',
|
||||
default: '10s',
|
||||
})
|
||||
.option('clientWorkers', {
|
||||
describe: 'Number of concurrently connected ES clients',
|
||||
default: 5,
|
||||
})
|
||||
.option('batchSize', {
|
||||
describe: 'Number of documents per bulk index request',
|
||||
default: 1000,
|
||||
})
|
||||
.option('logLevel', {
|
||||
describe: 'Log level',
|
||||
default: 'info',
|
||||
})
|
||||
.option('writeTarget', {
|
||||
describe: 'Target to index',
|
||||
string: true,
|
||||
})
|
||||
.conflicts('to', 'live');
|
||||
}
|
||||
|
||||
export type RunCliFlags = ReturnType<typeof options>['argv'];
|
||||
|
||||
yargs(process.argv.slice(2))
|
||||
.command(
|
||||
'*',
|
||||
'Generate data and index into Elasticsearch',
|
||||
(y) => {
|
||||
return y
|
||||
.positional('file', {
|
||||
describe: 'File that contains the trace scenario',
|
||||
demandOption: true,
|
||||
string: true,
|
||||
})
|
||||
.option('target', {
|
||||
describe: 'Elasticsearch target, including username/password',
|
||||
demandOption: true,
|
||||
string: true,
|
||||
})
|
||||
.option('from', {
|
||||
description: 'The start of the time window',
|
||||
})
|
||||
.option('to', {
|
||||
description: 'The end of the time window',
|
||||
})
|
||||
.option('live', {
|
||||
description: 'Generate and index data continuously',
|
||||
boolean: true,
|
||||
})
|
||||
.option('clean', {
|
||||
describe: 'Clean APM indices before indexing new data',
|
||||
default: false,
|
||||
boolean: true,
|
||||
})
|
||||
.option('workers', {
|
||||
describe: 'Amount of Node.js worker threads',
|
||||
default: 5,
|
||||
})
|
||||
.option('bucketSize', {
|
||||
describe: 'Size of bucket for which to generate data',
|
||||
default: '15m',
|
||||
})
|
||||
.option('interval', {
|
||||
describe: 'The interval at which to index data',
|
||||
default: '10s',
|
||||
})
|
||||
.option('clientWorkers', {
|
||||
describe: 'Number of concurrently connected ES clients',
|
||||
default: 5,
|
||||
})
|
||||
.option('batchSize', {
|
||||
describe: 'Number of documents per bulk index request',
|
||||
default: 1000,
|
||||
})
|
||||
.option('logLevel', {
|
||||
describe: 'Log level',
|
||||
default: 'info',
|
||||
})
|
||||
.conflicts('to', 'live');
|
||||
},
|
||||
async (argv) => {
|
||||
const file = String(argv.file || argv._[0]);
|
||||
.command('*', 'Generate data and index into Elasticsearch', options, async (argv) => {
|
||||
const runOptions = parseRunCliFlags(argv);
|
||||
|
||||
const { target, workers, clean, clientWorkers, batchSize } = argv;
|
||||
const { logger } = getCommonServices(runOptions);
|
||||
|
||||
const { scenario, intervalInMs, bucketSizeInMs, logger, writeTargets, client, logLevel } =
|
||||
await getCommonResources({
|
||||
...argv,
|
||||
file,
|
||||
});
|
||||
const to = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
|
||||
const from = argv.from
|
||||
? datemath.parse(String(argv.from))!.valueOf()
|
||||
: to - intervalToMs('15m');
|
||||
|
||||
if (clean) {
|
||||
await cleanWriteTargets({ writeTargets, client, logger });
|
||||
}
|
||||
const live = argv.live;
|
||||
|
||||
const to = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
|
||||
const from = argv.from
|
||||
? datemath.parse(String(argv.from))!.valueOf()
|
||||
: to - intervalToMs('15m');
|
||||
logger.info(
|
||||
`Starting data generation\n: ${JSON.stringify(
|
||||
{
|
||||
...runOptions,
|
||||
from: new Date(from).toISOString(),
|
||||
to: new Date(to).toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
);
|
||||
|
||||
const live = argv.live;
|
||||
startHistoricalDataUpload({
|
||||
...runOptions,
|
||||
from,
|
||||
to,
|
||||
});
|
||||
|
||||
logger.info(
|
||||
`Starting data generation\n: ${JSON.stringify(
|
||||
{
|
||||
intervalInMs,
|
||||
bucketSizeInMs,
|
||||
workers,
|
||||
target,
|
||||
writeTargets,
|
||||
from: new Date(from).toISOString(),
|
||||
to: new Date(to).toISOString(),
|
||||
live,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
);
|
||||
|
||||
startHistoricalDataUpload({
|
||||
from,
|
||||
to,
|
||||
file,
|
||||
bucketSizeInMs,
|
||||
client,
|
||||
workers,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
logger,
|
||||
logLevel,
|
||||
target,
|
||||
if (live) {
|
||||
startLiveDataUpload({
|
||||
...runOptions,
|
||||
start: to,
|
||||
});
|
||||
|
||||
if (live) {
|
||||
startLiveDataUpload({
|
||||
bucketSizeInMs,
|
||||
client,
|
||||
intervalInMs,
|
||||
logger,
|
||||
scenario,
|
||||
start: to,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
});
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
.parse();
|
||||
|
|
13
packages/elastic-apm-synthtrace/src/scripts/scenario.ts
Normal file
13
packages/elastic-apm-synthtrace/src/scripts/scenario.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ElasticsearchOutput } from '../lib/utils/to_elasticsearch_output';
|
||||
import { RunOptions } from './utils/parse_run_cli_flags';
|
||||
|
||||
type Generate = (range: { from: number; to: number }) => ElasticsearchOutput[];
|
||||
export type Scenario = (options: RunOptions) => Promise<{ generate: Generate }>;
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { createLogger, LogLevel } from '../../lib/utils/create_logger';
|
||||
|
||||
export function getCommonServices({ target, logLevel }: { target: string; logLevel: LogLevel }) {
|
||||
const client = new Client({
|
||||
node: target,
|
||||
});
|
||||
|
||||
const logger = createLogger(logLevel);
|
||||
|
||||
return {
|
||||
logger,
|
||||
client,
|
||||
};
|
||||
}
|
||||
|
||||
export type RunServices = ReturnType<typeof getCommonServices>;
|
|
@ -6,10 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
import Path from 'path';
|
||||
import { Fields } from '../../lib/entity';
|
||||
import { Logger } from '../../lib/utils/create_logger';
|
||||
|
||||
export type Scenario = (options: { from: number; to: number }) => Fields[];
|
||||
import { Scenario } from '../scenario';
|
||||
|
||||
export function getScenario({ file, logger }: { file: unknown; logger: Logger }) {
|
||||
const location = Path.join(process.cwd(), String(file));
|
||||
|
|
|
@ -6,25 +6,16 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { getScenario } from './get_scenario';
|
||||
import { getWriteTargets } from '../../lib/utils/get_write_targets';
|
||||
import { pick } from 'lodash';
|
||||
import { LogLevel } from '../../lib/utils/create_logger';
|
||||
import { RunCliFlags } from '../run';
|
||||
import { intervalToMs } from './interval_to_ms';
|
||||
import { createLogger, LogLevel } from '../../lib/utils/create_logger';
|
||||
|
||||
export async function getCommonResources({
|
||||
file,
|
||||
interval,
|
||||
bucketSize,
|
||||
target,
|
||||
logLevel,
|
||||
}: {
|
||||
file: string;
|
||||
interval: string;
|
||||
bucketSize: string;
|
||||
target: string;
|
||||
logLevel: string;
|
||||
}) {
|
||||
export function parseRunCliFlags(flags: RunCliFlags) {
|
||||
const { file, _, logLevel, interval, bucketSize } = flags;
|
||||
|
||||
const parsedFile = String(file || _[0]);
|
||||
|
||||
let parsedLogLevel = LogLevel.info;
|
||||
switch (logLevel) {
|
||||
case 'trace':
|
||||
|
@ -44,8 +35,6 @@ export async function getCommonResources({
|
|||
break;
|
||||
}
|
||||
|
||||
const logger = createLogger(parsedLogLevel);
|
||||
|
||||
const intervalInMs = intervalToMs(interval);
|
||||
if (!intervalInMs) {
|
||||
throw new Error('Invalid interval');
|
||||
|
@ -57,22 +46,13 @@ export async function getCommonResources({
|
|||
throw new Error('Invalid bucket size');
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: target,
|
||||
});
|
||||
|
||||
const [scenario, writeTargets] = await Promise.all([
|
||||
getScenario({ file, logger }),
|
||||
getWriteTargets({ client }),
|
||||
]);
|
||||
|
||||
return {
|
||||
scenario,
|
||||
writeTargets,
|
||||
logger,
|
||||
client,
|
||||
...pick(flags, 'target', 'workers', 'clientWorkers', 'batchSize', 'writeTarget'),
|
||||
intervalInMs,
|
||||
bucketSizeInMs,
|
||||
logLevel: parsedLogLevel,
|
||||
file: parsedFile,
|
||||
};
|
||||
}
|
||||
|
||||
export type RunOptions = ReturnType<typeof parseRunCliFlags>;
|
|
@ -5,41 +5,30 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import pLimit from 'p-limit';
|
||||
import Path from 'path';
|
||||
import { Worker } from 'worker_threads';
|
||||
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
|
||||
import { Logger, LogLevel } from '../../lib/utils/create_logger';
|
||||
import { getCommonServices } from './get_common_services';
|
||||
import { RunOptions } from './parse_run_cli_flags';
|
||||
import { WorkerData } from './upload_next_batch';
|
||||
|
||||
export async function startHistoricalDataUpload({
|
||||
from,
|
||||
to,
|
||||
intervalInMs,
|
||||
bucketSizeInMs,
|
||||
workers,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
logLevel,
|
||||
logger,
|
||||
target,
|
||||
file,
|
||||
}: {
|
||||
from: number;
|
||||
to: number;
|
||||
bucketSizeInMs: number;
|
||||
client: Client;
|
||||
workers: number;
|
||||
clientWorkers: number;
|
||||
batchSize: number;
|
||||
writeTargets: ElasticsearchOutputWriteTargets;
|
||||
logger: Logger;
|
||||
logLevel: LogLevel;
|
||||
target: string;
|
||||
file: string;
|
||||
}) {
|
||||
writeTarget,
|
||||
}: RunOptions & { from: number; to: number }) {
|
||||
let requestedUntil: number = from;
|
||||
|
||||
const { logger } = getCommonServices({ target, logLevel });
|
||||
|
||||
function processNextBatch() {
|
||||
const bucketFrom = requestedUntil;
|
||||
const bucketTo = Math.min(to, bucketFrom + bucketSizeInMs);
|
||||
|
@ -56,17 +45,22 @@ export async function startHistoricalDataUpload({
|
|||
).toISOString()}`
|
||||
);
|
||||
|
||||
const workerData: WorkerData = {
|
||||
bucketFrom,
|
||||
bucketTo,
|
||||
file,
|
||||
logLevel,
|
||||
batchSize,
|
||||
bucketSizeInMs,
|
||||
clientWorkers,
|
||||
intervalInMs,
|
||||
target,
|
||||
workers,
|
||||
writeTarget,
|
||||
};
|
||||
|
||||
const worker = new Worker(Path.join(__dirname, './upload_next_batch.js'), {
|
||||
workerData: {
|
||||
bucketFrom,
|
||||
bucketTo,
|
||||
logLevel,
|
||||
writeTargets,
|
||||
target,
|
||||
file,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
},
|
||||
workerData,
|
||||
});
|
||||
|
||||
logger.perf('created_worker', () => {
|
||||
|
|
|
@ -6,44 +6,49 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { partition } from 'lodash';
|
||||
import { Fields } from '../../lib/entity';
|
||||
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
|
||||
import { Scenario } from './get_scenario';
|
||||
import { Logger } from '../../lib/utils/create_logger';
|
||||
import { getScenario } from './get_scenario';
|
||||
import { uploadEvents } from './upload_events';
|
||||
import { RunOptions } from './parse_run_cli_flags';
|
||||
import { getCommonServices } from './get_common_services';
|
||||
import { ElasticsearchOutput } from '../../lib/utils/to_elasticsearch_output';
|
||||
|
||||
export function startLiveDataUpload({
|
||||
export async function startLiveDataUpload({
|
||||
file,
|
||||
start,
|
||||
bucketSizeInMs,
|
||||
intervalInMs,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
scenario,
|
||||
client,
|
||||
logger,
|
||||
}: {
|
||||
start: number;
|
||||
bucketSizeInMs: number;
|
||||
intervalInMs: number;
|
||||
clientWorkers: number;
|
||||
batchSize: number;
|
||||
writeTargets: ElasticsearchOutputWriteTargets;
|
||||
scenario: Scenario;
|
||||
client: Client;
|
||||
logger: Logger;
|
||||
}) {
|
||||
let queuedEvents: Fields[] = [];
|
||||
target,
|
||||
logLevel,
|
||||
workers,
|
||||
writeTarget,
|
||||
}: RunOptions & { start: number }) {
|
||||
let queuedEvents: ElasticsearchOutput[] = [];
|
||||
let requestedUntil: number = start;
|
||||
|
||||
const { logger, client } = getCommonServices({ target, logLevel });
|
||||
|
||||
const scenario = await getScenario({ file, logger });
|
||||
const { generate } = await scenario({
|
||||
batchSize,
|
||||
bucketSizeInMs,
|
||||
clientWorkers,
|
||||
file,
|
||||
intervalInMs,
|
||||
logLevel,
|
||||
target,
|
||||
workers,
|
||||
writeTarget,
|
||||
});
|
||||
|
||||
function uploadNextBatch() {
|
||||
const end = new Date().getTime();
|
||||
if (end > requestedUntil) {
|
||||
const bucketFrom = requestedUntil;
|
||||
const bucketTo = requestedUntil + bucketSizeInMs;
|
||||
const nextEvents = scenario({ from: bucketFrom, to: bucketTo });
|
||||
const nextEvents = generate({ from: bucketFrom, to: bucketTo });
|
||||
logger.debug(
|
||||
`Requesting ${new Date(bucketFrom).toISOString()} to ${new Date(
|
||||
bucketTo
|
||||
|
@ -55,7 +60,7 @@ export function startLiveDataUpload({
|
|||
|
||||
const [eventsToUpload, eventsToRemainInQueue] = partition(
|
||||
queuedEvents,
|
||||
(event) => event['@timestamp']! <= end
|
||||
(event) => event.timestamp <= end
|
||||
);
|
||||
|
||||
logger.info(`Uploading until ${new Date(end).toISOString()}, events: ${eventsToUpload.length}`);
|
||||
|
@ -64,11 +69,10 @@ export function startLiveDataUpload({
|
|||
|
||||
uploadEvents({
|
||||
events: eventsToUpload,
|
||||
client,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
logger,
|
||||
client,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -9,11 +9,7 @@ import { Client } from '@elastic/elasticsearch';
|
|||
import { chunk } from 'lodash';
|
||||
import pLimit from 'p-limit';
|
||||
import { inspect } from 'util';
|
||||
import { Fields } from '../../lib/entity';
|
||||
import {
|
||||
ElasticsearchOutputWriteTargets,
|
||||
toElasticsearchOutput,
|
||||
} from '../../lib/output/to_elasticsearch_output';
|
||||
import { ElasticsearchOutput } from '../../lib/utils/to_elasticsearch_output';
|
||||
import { Logger } from '../../lib/utils/create_logger';
|
||||
|
||||
export function uploadEvents({
|
||||
|
@ -21,24 +17,23 @@ export function uploadEvents({
|
|||
client,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
logger,
|
||||
}: {
|
||||
events: Fields[];
|
||||
events: ElasticsearchOutput[];
|
||||
client: Client;
|
||||
clientWorkers: number;
|
||||
batchSize: number;
|
||||
writeTargets: ElasticsearchOutputWriteTargets;
|
||||
logger: Logger;
|
||||
}) {
|
||||
const esDocuments = logger.perf('to_elasticsearch_output', () => {
|
||||
return toElasticsearchOutput({ events, writeTargets });
|
||||
});
|
||||
const fn = pLimit(clientWorkers);
|
||||
|
||||
const batches = chunk(esDocuments, batchSize);
|
||||
const batches = chunk(events, batchSize);
|
||||
|
||||
logger.debug(`Uploading ${esDocuments.length} in ${batches.length} batches`);
|
||||
if (!batches.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Uploading ${events.length} in ${batches.length} batches`);
|
||||
|
||||
const time = new Date().getTime();
|
||||
|
||||
|
@ -47,7 +42,6 @@ export function uploadEvents({
|
|||
fn(() => {
|
||||
return logger.perf('bulk_upload', () =>
|
||||
client.bulk({
|
||||
require_alias: true,
|
||||
refresh: false,
|
||||
body: batch.flatMap((doc) => {
|
||||
return [{ index: { _index: doc._index } }, doc._source];
|
||||
|
|
|
@ -9,22 +9,37 @@
|
|||
// add this to workerExample.js file.
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { workerData } from 'worker_threads';
|
||||
import { ElasticsearchOutputWriteTargets } from '../../lib/output/to_elasticsearch_output';
|
||||
import { getScenario } from './get_scenario';
|
||||
import { createLogger, LogLevel } from '../../lib/utils/create_logger';
|
||||
import { uploadEvents } from './upload_events';
|
||||
|
||||
const { bucketFrom, bucketTo, file, logLevel, target, writeTargets, clientWorkers, batchSize } =
|
||||
workerData as {
|
||||
bucketFrom: number;
|
||||
bucketTo: number;
|
||||
file: string;
|
||||
logLevel: LogLevel;
|
||||
target: string;
|
||||
writeTargets: ElasticsearchOutputWriteTargets;
|
||||
clientWorkers: number;
|
||||
batchSize: number;
|
||||
};
|
||||
export interface WorkerData {
|
||||
bucketFrom: number;
|
||||
bucketTo: number;
|
||||
file: string;
|
||||
logLevel: LogLevel;
|
||||
clientWorkers: number;
|
||||
batchSize: number;
|
||||
intervalInMs: number;
|
||||
bucketSizeInMs: number;
|
||||
target: string;
|
||||
workers: number;
|
||||
writeTarget?: string;
|
||||
}
|
||||
|
||||
const {
|
||||
bucketFrom,
|
||||
bucketTo,
|
||||
file,
|
||||
logLevel,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
intervalInMs,
|
||||
bucketSizeInMs,
|
||||
workers,
|
||||
target,
|
||||
writeTarget,
|
||||
} = workerData as WorkerData;
|
||||
|
||||
async function uploadNextBatch() {
|
||||
if (bucketFrom === bucketTo) {
|
||||
|
@ -38,8 +53,20 @@ async function uploadNextBatch() {
|
|||
|
||||
const scenario = await logger.perf('get_scenario', () => getScenario({ file, logger }));
|
||||
|
||||
const { generate } = await scenario({
|
||||
intervalInMs,
|
||||
bucketSizeInMs,
|
||||
logLevel,
|
||||
file,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
target,
|
||||
workers,
|
||||
writeTarget,
|
||||
});
|
||||
|
||||
const events = logger.perf('execute_scenario', () =>
|
||||
scenario({ from: bucketFrom, to: bucketTo })
|
||||
generate({ from: bucketFrom, to: bucketTo })
|
||||
);
|
||||
|
||||
return uploadEvents({
|
||||
|
@ -47,7 +74,6 @@ async function uploadNextBatch() {
|
|||
client,
|
||||
clientWorkers,
|
||||
batchSize,
|
||||
writeTargets,
|
||||
logger,
|
||||
});
|
||||
}
|
||||
|
@ -56,6 +82,11 @@ uploadNextBatch()
|
|||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(() => {
|
||||
process.exit(1);
|
||||
.catch((error) => {
|
||||
// eslint-disable-next-line
|
||||
console.log(error);
|
||||
// make sure error shows up in console before process is killed
|
||||
setTimeout(() => {
|
||||
process.exit(1);
|
||||
}, 100);
|
||||
});
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Fields } from '../lib/entity';
|
||||
import { toElasticsearchOutput } from '../lib/output/to_elasticsearch_output';
|
||||
import { apmEventsToElasticsearchOutput } from '../lib/apm/utils/apm_events_to_elasticsearch_output';
|
||||
import { ApmFields } from '../lib/apm/apm_fields';
|
||||
|
||||
const writeTargets = {
|
||||
transaction: 'apm-8.0.0-transaction',
|
||||
|
@ -16,8 +16,8 @@ const writeTargets = {
|
|||
error: 'apm-8.0.0-error',
|
||||
};
|
||||
|
||||
describe('output to elasticsearch', () => {
|
||||
let event: Fields;
|
||||
describe('output apm events to elasticsearch', () => {
|
||||
let event: ApmFields;
|
||||
|
||||
beforeEach(() => {
|
||||
event = {
|
||||
|
@ -29,13 +29,13 @@ describe('output to elasticsearch', () => {
|
|||
});
|
||||
|
||||
it('properly formats @timestamp', () => {
|
||||
const doc = toElasticsearchOutput({ events: [event], writeTargets })[0] as any;
|
||||
const doc = apmEventsToElasticsearchOutput({ events: [event], writeTargets })[0] as any;
|
||||
|
||||
expect(doc._source['@timestamp']).toEqual('2020-12-31T23:00:00.000Z');
|
||||
});
|
||||
|
||||
it('formats a nested object', () => {
|
||||
const doc = toElasticsearchOutput({ events: [event], writeTargets })[0] as any;
|
||||
const doc = apmEventsToElasticsearchOutput({ events: [event], writeTargets })[0] as any;
|
||||
|
||||
expect(doc._source.processor).toEqual({
|
||||
event: 'transaction',
|
||||
|
@ -44,7 +44,7 @@ describe('output to elasticsearch', () => {
|
|||
});
|
||||
|
||||
it('formats all fields consistently', () => {
|
||||
const doc = toElasticsearchOutput({ events: [event], writeTargets })[0] as any;
|
||||
const doc = apmEventsToElasticsearchOutput({ events: [event], writeTargets })[0] as any;
|
||||
|
||||
expect(doc._source).toMatchInlineSnapshot(`
|
||||
Object {
|
|
@ -6,14 +6,14 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { service } from '../../lib/service';
|
||||
import { apm } from '../../lib/apm';
|
||||
import { timerange } from '../../lib/timerange';
|
||||
|
||||
describe('simple trace', () => {
|
||||
let events: Array<Record<string, any>>;
|
||||
|
||||
beforeEach(() => {
|
||||
const javaService = service('opbeans-java', 'production', 'java');
|
||||
const javaService = apm.service('opbeans-java', 'production', 'java');
|
||||
const javaInstance = javaService.instance('instance-1');
|
||||
|
||||
const range = timerange(
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { service } from '../../lib/service';
|
||||
import { apm } from '../../lib/apm';
|
||||
import { timerange } from '../../lib/timerange';
|
||||
import { getTransactionMetrics } from '../../lib/utils/get_transaction_metrics';
|
||||
import { getTransactionMetrics } from '../../lib/apm/utils/get_transaction_metrics';
|
||||
|
||||
describe('transaction metrics', () => {
|
||||
let events: Array<Record<string, any>>;
|
||||
|
||||
beforeEach(() => {
|
||||
const javaService = service('opbeans-java', 'production', 'java');
|
||||
const javaService = apm.service('opbeans-java', 'production', 'java');
|
||||
const javaInstance = javaService.instance('instance-1');
|
||||
|
||||
const range = timerange(
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { service } from '../../lib/service';
|
||||
import { apm } from '../../lib/apm';
|
||||
import { timerange } from '../../lib/timerange';
|
||||
import { getSpanDestinationMetrics } from '../../lib/utils/get_span_destination_metrics';
|
||||
import { getSpanDestinationMetrics } from '../../lib/apm/utils/get_span_destination_metrics';
|
||||
|
||||
describe('span destination metrics', () => {
|
||||
let events: Array<Record<string, any>>;
|
||||
|
||||
beforeEach(() => {
|
||||
const javaService = service('opbeans-java', 'production', 'java');
|
||||
const javaService = apm.service('opbeans-java', 'production', 'java');
|
||||
const javaInstance = javaService.instance('instance-1');
|
||||
|
||||
const range = timerange(
|
||||
|
|
|
@ -6,13 +6,13 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
import { sumBy } from 'lodash';
|
||||
import { Fields } from '../../lib/entity';
|
||||
import { service } from '../../lib/service';
|
||||
import { apm } from '../../lib/apm';
|
||||
import { timerange } from '../../lib/timerange';
|
||||
import { getBreakdownMetrics } from '../../lib/utils/get_breakdown_metrics';
|
||||
import { getBreakdownMetrics } from '../../lib/apm/utils/get_breakdown_metrics';
|
||||
import { ApmFields } from '../../lib/apm/apm_fields';
|
||||
|
||||
describe('breakdown metrics', () => {
|
||||
let events: Fields[];
|
||||
let events: ApmFields[];
|
||||
|
||||
const LIST_RATE = 2;
|
||||
const LIST_SPANS = 2;
|
||||
|
@ -21,7 +21,7 @@ describe('breakdown metrics', () => {
|
|||
const INTERVALS = 6;
|
||||
|
||||
beforeEach(() => {
|
||||
const javaService = service('opbeans-java', 'production', 'java');
|
||||
const javaService = apm.service('opbeans-java', 'production', 'java');
|
||||
const javaInstance = javaService.instance('instance-1');
|
||||
|
||||
const start = new Date('2021-01-01T00:00:00.000Z').getTime();
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
import { pick } from 'lodash';
|
||||
import { service } from '../../index';
|
||||
import { Instance } from '../../lib/instance';
|
||||
import { apm } from '../../lib/apm';
|
||||
import { Instance } from '../../lib/apm/instance';
|
||||
|
||||
describe('transactions with errors', () => {
|
||||
let instance: Instance;
|
||||
const timestamp = new Date('2021-01-01T00:00:00.000Z').getTime();
|
||||
|
||||
beforeEach(() => {
|
||||
instance = service('opbeans-java', 'production', 'java').instance('instance');
|
||||
instance = apm.service('opbeans-java', 'production', 'java').instance('instance');
|
||||
});
|
||||
it('generates error events', () => {
|
||||
const events = instance
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
import { pick } from 'lodash';
|
||||
import { service } from '../../index';
|
||||
import { Instance } from '../../lib/instance';
|
||||
import { apm } from '../../lib/apm';
|
||||
import { Instance } from '../../lib/apm/instance';
|
||||
|
||||
describe('application metrics', () => {
|
||||
let instance: Instance;
|
||||
const timestamp = new Date('2021-01-01T00:00:00.000Z').getTime();
|
||||
|
||||
beforeEach(() => {
|
||||
instance = service('opbeans-java', 'production', 'java').instance('instance');
|
||||
instance = apm.service('opbeans-java', 'production', 'java').instance('instance');
|
||||
});
|
||||
it('generates application metricsets', () => {
|
||||
const events = instance
|
||||
|
|
|
@ -4,28 +4,24 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import {
|
||||
service,
|
||||
browser,
|
||||
timerange,
|
||||
getChromeUserAgentDefaults,
|
||||
} from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
|
||||
export function opbeans({ from, to }: { from: number; to: number }) {
|
||||
const range = timerange(from, to);
|
||||
|
||||
const opbeansJava = service('opbeans-java', 'production', 'java')
|
||||
const opbeansJava = apm
|
||||
.service('opbeans-java', 'production', 'java')
|
||||
.instance('opbeans-java-prod-1')
|
||||
.podId('opbeans-java-prod-1-pod');
|
||||
|
||||
const opbeansNode = service('opbeans-node', 'production', 'nodejs').instance(
|
||||
'opbeans-node-prod-1'
|
||||
);
|
||||
const opbeansNode = apm
|
||||
.service('opbeans-node', 'production', 'nodejs')
|
||||
.instance('opbeans-node-prod-1');
|
||||
|
||||
const opbeansRum = browser(
|
||||
const opbeansRum = apm.browser(
|
||||
'opbeans-rum',
|
||||
'production',
|
||||
getChromeUserAgentDefaults()
|
||||
apm.getChromeUserAgentDefaults()
|
||||
);
|
||||
|
||||
return [
|
||||
|
|
|
@ -4,18 +4,19 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
|
||||
export function generateData({ from, to }: { from: number; to: number }) {
|
||||
const range = timerange(from, to);
|
||||
|
||||
const opbeansJava = service('opbeans-java', 'production', 'java')
|
||||
const opbeansJava = apm
|
||||
.service('opbeans-java', 'production', 'java')
|
||||
.instance('opbeans-java-prod-1')
|
||||
.podId('opbeans-java-prod-1-pod');
|
||||
|
||||
const opbeansNode = service('opbeans-node', 'production', 'nodejs').instance(
|
||||
'opbeans-node-prod-1'
|
||||
);
|
||||
const opbeansNode = apm
|
||||
.service('opbeans-node', 'production', 'nodejs')
|
||||
.instance('opbeans-node-prod-1');
|
||||
|
||||
return [
|
||||
...range
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
|
||||
export function generateData({
|
||||
from,
|
||||
|
@ -17,13 +17,14 @@ export function generateData({
|
|||
}) {
|
||||
const range = timerange(from, to);
|
||||
|
||||
const service1 = service(specialServiceName, 'production', 'java')
|
||||
const service1 = apm
|
||||
.service(specialServiceName, 'production', 'java')
|
||||
.instance('service-1-prod-1')
|
||||
.podId('service-1-prod-1-pod');
|
||||
|
||||
const opbeansNode = service('opbeans-node', 'production', 'nodejs').instance(
|
||||
'opbeans-node-prod-1'
|
||||
);
|
||||
const opbeansNode = apm
|
||||
.service('opbeans-node', 'production', 'nodejs')
|
||||
.instance('opbeans-node-prod-1');
|
||||
|
||||
return [
|
||||
...range
|
||||
|
|
|
@ -6,8 +6,7 @@
|
|||
*/
|
||||
import Fs from 'fs';
|
||||
import { Client, HttpConnection } from '@elastic/elasticsearch';
|
||||
import { SynthtraceEsClient } from '@elastic/apm-synthtrace';
|
||||
import { createLogger, LogLevel } from '@elastic/apm-synthtrace';
|
||||
import { apm, createLogger, LogLevel } from '@elastic/apm-synthtrace';
|
||||
import { CA_CERT_PATH } from '@kbn/dev-utils';
|
||||
|
||||
// ***********************************************************
|
||||
|
@ -41,7 +40,7 @@ const plugin: Cypress.PluginConfig = (on, config) => {
|
|||
...(isCloud ? { tls: { ca: Fs.readFileSync(CA_CERT_PATH, 'utf-8') } } : {}),
|
||||
});
|
||||
|
||||
const synthtraceEsClient = new SynthtraceEsClient(
|
||||
const synthtraceEsClient = new apm.ApmSynthtraceEsClient(
|
||||
client,
|
||||
createLogger(LogLevel.info)
|
||||
);
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { SynthtraceEsClient, createLogger, LogLevel } from '@elastic/apm-synthtrace';
|
||||
import { apm, createLogger, LogLevel } from '@elastic/apm-synthtrace';
|
||||
import { InheritedFtrProviderContext } from './ftr_provider_context';
|
||||
|
||||
export async function synthtraceEsClientService(context: InheritedFtrProviderContext) {
|
||||
const es = context.getService('es');
|
||||
|
||||
return new SynthtraceEsClient(es, createLogger(LogLevel.info));
|
||||
return new apm.ApmSynthtraceEsClient(es, createLogger(LogLevel.info));
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import type { SynthtraceEsClient } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import type { ApmSynthtraceEsClient } from '@elastic/apm-synthtrace';
|
||||
|
||||
export const dataConfig = {
|
||||
rate: 20,
|
||||
|
@ -26,11 +26,11 @@ export async function generateData({
|
|||
start,
|
||||
end,
|
||||
}: {
|
||||
synthtraceEsClient: SynthtraceEsClient;
|
||||
synthtraceEsClient: ApmSynthtraceEsClient;
|
||||
start: number;
|
||||
end: number;
|
||||
}) {
|
||||
const instance = service('synth-go', 'production', 'go').instance('instance-a');
|
||||
const instance = apm.service('synth-go', 'production', 'go').instance('instance-a');
|
||||
const { rate, transaction, span } = dataConfig;
|
||||
|
||||
await synthtraceEsClient.index(
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { mean, meanBy, sumBy } from 'lodash';
|
||||
import { LatencyAggregationType } from '../../../../plugins/apm/common/latency_aggregation_types';
|
||||
|
@ -121,9 +121,9 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const GO_PROD_ID_RATE = 50;
|
||||
const GO_PROD_ID_ERROR_RATE = 50;
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service(serviceName, 'production', 'go')
|
||||
.instance('instance-a');
|
||||
|
||||
const transactionNameProductList = 'GET /api/product/list';
|
||||
const transactionNameProductId = 'GET /api/product/:id';
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
import expect from '@kbn/expect';
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import {
|
||||
APIClientRequestParamsOf,
|
||||
APIReturnType,
|
||||
|
@ -72,7 +72,9 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
};
|
||||
|
||||
before(async () => {
|
||||
const serviceInstance = service(serviceName, 'production', 'go').instance('instance-a');
|
||||
const serviceInstance = apm
|
||||
.service(serviceName, 'production', 'go')
|
||||
.instance('instance-a');
|
||||
|
||||
await synthtraceEsClient.index([
|
||||
...timerange(start, end)
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, SynthtraceEsClient, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import type { ApmSynthtraceEsClient } from '@elastic/apm-synthtrace';
|
||||
|
||||
export const config = {
|
||||
appleTransaction: {
|
||||
|
@ -25,12 +26,12 @@ export async function generateData({
|
|||
start,
|
||||
end,
|
||||
}: {
|
||||
synthtraceEsClient: SynthtraceEsClient;
|
||||
synthtraceEsClient: ApmSynthtraceEsClient;
|
||||
serviceName: string;
|
||||
start: number;
|
||||
end: number;
|
||||
}) {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance('instance-a');
|
||||
const serviceGoProdInstance = apm.service(serviceName, 'production', 'go').instance('instance-a');
|
||||
|
||||
const interval = '1m';
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { meanBy, sumBy } from 'lodash';
|
||||
import { LatencyAggregationType } from '../../../../plugins/apm/common/latency_aggregation_types';
|
||||
|
@ -123,12 +123,13 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const GO_PROD_DURATION = 1000;
|
||||
const GO_DEV_DURATION = 500;
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceGoDevInstance = service(serviceName, 'development', 'go').instance(
|
||||
'instance-b'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service(serviceName, 'production', 'go')
|
||||
.instance('instance-a');
|
||||
const serviceGoDevInstance = apm
|
||||
.service(serviceName, 'development', 'go')
|
||||
.instance('instance-b');
|
||||
|
||||
await synthtraceEsClient.index([
|
||||
...timerange(start, end)
|
||||
.interval('1m')
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { meanBy, sumBy } from 'lodash';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
@ -92,15 +92,16 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const GO_DEV_RATE = 5;
|
||||
const JAVA_PROD_RATE = 45;
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service('synth-go', 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceGoDevInstance = service('synth-go', 'development', 'go').instance(
|
||||
'instance-b'
|
||||
);
|
||||
const serviceJavaInstance = service('synth-java', 'production', 'java').instance(
|
||||
'instance-c'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service('synth-go', 'production', 'go')
|
||||
.instance('instance-a');
|
||||
const serviceGoDevInstance = apm
|
||||
.service('synth-go', 'development', 'go')
|
||||
.instance('instance-b');
|
||||
|
||||
const serviceJavaInstance = apm
|
||||
.service('synth-java', 'production', 'java')
|
||||
.instance('instance-c');
|
||||
|
||||
await synthtraceEsClient.index([
|
||||
...timerange(start, end)
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { pick, sortBy } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import { APIReturnType } from '../../../../plugins/apm/public/services/rest/createCallApmApi';
|
||||
import { isFiniteNumber } from '../../../../plugins/apm/common/utils/is_finite_number';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
@ -298,8 +298,8 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const rangeEnd = new Date('2021-01-01T12:15:00.000Z').getTime() - 1;
|
||||
|
||||
before(async () => {
|
||||
const goService = service('opbeans-go', 'production', 'go');
|
||||
const javaService = service('opbeans-java', 'production', 'java');
|
||||
const goService = apm.service('opbeans-go', 'production', 'go');
|
||||
const javaService = apm.service('opbeans-java', 'production', 'java');
|
||||
|
||||
const goInstanceA = goService.instance('go-instance-a');
|
||||
const goInstanceB = goService.instance('go-instance-b');
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import type { SynthtraceEsClient } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import type { ApmSynthtraceEsClient } from '@elastic/apm-synthtrace';
|
||||
|
||||
export const config = {
|
||||
PROD_LIST_RATE: 75,
|
||||
|
@ -22,12 +22,12 @@ export async function generateData({
|
|||
start,
|
||||
end,
|
||||
}: {
|
||||
synthtraceEsClient: SynthtraceEsClient;
|
||||
synthtraceEsClient: ApmSynthtraceEsClient;
|
||||
serviceName: string;
|
||||
start: number;
|
||||
end: number;
|
||||
}) {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance('instance-a');
|
||||
const serviceGoProdInstance = apm.service(serviceName, 'production', 'go').instance('instance-a');
|
||||
|
||||
const transactionNameProductList = 'GET /api/product/list';
|
||||
const transactionNameProductId = 'GET /api/product/:id';
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { first, last, meanBy } from 'lodash';
|
||||
import moment from 'moment';
|
||||
|
@ -73,16 +73,16 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const JAVA_PROD_RATE = 45;
|
||||
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceGoDevInstance = service(serviceName, 'development', 'go').instance(
|
||||
'instance-b'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service(serviceName, 'production', 'go')
|
||||
.instance('instance-a');
|
||||
const serviceGoDevInstance = apm
|
||||
.service(serviceName, 'development', 'go')
|
||||
.instance('instance-b');
|
||||
|
||||
const serviceJavaInstance = service('synth-java', 'development', 'java').instance(
|
||||
'instance-c'
|
||||
);
|
||||
const serviceJavaInstance = apm
|
||||
.service('synth-java', 'development', 'java')
|
||||
.instance('instance-c');
|
||||
|
||||
await synthtraceEsClient.index([
|
||||
...timerange(start, end)
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import expect from '@kbn/expect';
|
||||
import { sortBy } from 'lodash';
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import { APIReturnType } from '../../../../plugins/apm/public/services/rest/createCallApmApi';
|
||||
import { PromiseReturnType } from '../../../../plugins/observability/typings/common';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
@ -65,21 +65,17 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const transactionInterval = range.interval('1s');
|
||||
const metricInterval = range.interval('30s');
|
||||
|
||||
const multipleEnvServiceProdInstance = service(
|
||||
'multiple-env-service',
|
||||
'production',
|
||||
'go'
|
||||
).instance('multiple-env-service-production');
|
||||
const multipleEnvServiceProdInstance = apm
|
||||
.service('multiple-env-service', 'production', 'go')
|
||||
.instance('multiple-env-service-production');
|
||||
|
||||
const multipleEnvServiceDevInstance = service(
|
||||
'multiple-env-service',
|
||||
'development',
|
||||
'go'
|
||||
).instance('multiple-env-service-development');
|
||||
const multipleEnvServiceDevInstance = apm
|
||||
.service('multiple-env-service', 'development', 'go')
|
||||
.instance('multiple-env-service-development');
|
||||
|
||||
const metricOnlyInstance = service('metric-only-service', 'production', 'java').instance(
|
||||
'metric-only-production'
|
||||
);
|
||||
const metricOnlyInstance = apm
|
||||
.service('metric-only-service', 'production', 'java')
|
||||
.instance('metric-only-production');
|
||||
|
||||
const config = {
|
||||
multiple: {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { meanBy, sumBy } from 'lodash';
|
||||
import { BackendNode, ServiceNode } from '../../../../plugins/apm/common/connections';
|
||||
|
@ -94,12 +94,12 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const GO_PROD_RATE = 75;
|
||||
const JAVA_PROD_RATE = 25;
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service('synth-go', 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceJavaInstance = service('synth-java', 'development', 'java').instance(
|
||||
'instance-c'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service('synth-go', 'production', 'go')
|
||||
.instance('instance-a');
|
||||
const serviceJavaInstance = apm
|
||||
.service('synth-java', 'development', 'java')
|
||||
.instance('instance-c');
|
||||
|
||||
await synthtraceEsClient.index([
|
||||
...timerange(start, end)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { meanBy, sumBy } from 'lodash';
|
||||
import { LatencyAggregationType } from '../../../../plugins/apm/common/latency_aggregation_types';
|
||||
|
@ -109,12 +109,13 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const GO_PROD_RATE = 80;
|
||||
const GO_DEV_RATE = 20;
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceGoDevInstance = service(serviceName, 'development', 'go').instance(
|
||||
'instance-b'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service(serviceName, 'production', 'go')
|
||||
.instance('instance-a');
|
||||
const serviceGoDevInstance = apm
|
||||
.service(serviceName, 'development', 'go')
|
||||
.instance('instance-b');
|
||||
|
||||
await synthtraceEsClient.index([
|
||||
...timerange(start, end)
|
||||
.interval('1m')
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { service, timerange } from '@elastic/apm-synthtrace';
|
||||
import { apm, timerange } from '@elastic/apm-synthtrace';
|
||||
import expect from '@kbn/expect';
|
||||
import { first, isEmpty, last, meanBy } from 'lodash';
|
||||
import moment from 'moment';
|
||||
|
@ -84,9 +84,9 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
const GO_PROD_RATE = 75;
|
||||
const GO_PROD_ERROR_RATE = 25;
|
||||
before(async () => {
|
||||
const serviceGoProdInstance = service(serviceName, 'production', 'go').instance(
|
||||
'instance-a'
|
||||
);
|
||||
const serviceGoProdInstance = apm
|
||||
.service(serviceName, 'production', 'go')
|
||||
.instance('instance-a');
|
||||
|
||||
const transactionName = 'GET /api/product/list';
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue