mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[scalability testing] filtering out setup/teardown related APM transactions (#134697)
* [packages/kbn-performance-testing-dataset-extractor] filter server transactions by journey time range * check only 1 ftr transaction for run is found * fixes
This commit is contained in:
parent
f59fd4a343
commit
e57f207aad
3 changed files with 100 additions and 53 deletions
|
@ -41,6 +41,7 @@ RUNTIME_DEPS = [
|
|||
"//packages/kbn-test",
|
||||
"//packages/kbn-tooling-log",
|
||||
"@npm//@elastic/elasticsearch",
|
||||
"@npm//moment",
|
||||
]
|
||||
|
||||
# In this array place dependencies necessary to build the types, which will include the
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
*/
|
||||
|
||||
import { Client } from '@elastic/elasticsearch';
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
||||
|
||||
interface ClientOptions {
|
||||
node: string;
|
||||
|
@ -33,6 +34,7 @@ interface Transaction {
|
|||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
duration: { us: number };
|
||||
}
|
||||
|
||||
export interface Document {
|
||||
|
@ -52,6 +54,33 @@ export interface Document {
|
|||
transaction: Transaction;
|
||||
}
|
||||
|
||||
const addBooleanFilter = (filter: { field: string; value: string }): QueryDslQueryContainer => {
|
||||
return {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
[filter.field]: filter.value,
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const addRangeFilter = (range: { startTime: string; endTime: string }): QueryDslQueryContainer => {
|
||||
return {
|
||||
range: {
|
||||
'@timestamp': {
|
||||
format: 'strict_date_optional_time',
|
||||
gte: range.startTime,
|
||||
lte: range.endTime,
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export function initClient(options: ClientOptions) {
|
||||
const client = new Client({
|
||||
node: options.node,
|
||||
|
@ -62,7 +91,34 @@ export function initClient(options: ClientOptions) {
|
|||
});
|
||||
|
||||
return {
|
||||
async getTransactions(buildId: string, journeyName: string) {
|
||||
async getKibanaServerTransactions(
|
||||
buildId: string,
|
||||
journeyName: string,
|
||||
range?: { startTime: string; endTime: string }
|
||||
) {
|
||||
const filters = [
|
||||
{ field: 'transaction.type', value: 'request' },
|
||||
{ field: 'processor.event', value: 'transaction' },
|
||||
{ field: 'labels.testBuildId', value: buildId },
|
||||
{ field: 'labels.journeyName', value: journeyName },
|
||||
];
|
||||
const queryFilters = filters.map((filter) => addBooleanFilter(filter));
|
||||
if (range) {
|
||||
queryFilters.push(addRangeFilter(range));
|
||||
}
|
||||
return await this.getTransactions(queryFilters);
|
||||
},
|
||||
async getFtrTransactions(buildId: string, journeyName: string) {
|
||||
const filters = [
|
||||
{ field: 'service.name', value: 'functional test runner' },
|
||||
{ field: 'processor.event', value: 'transaction' },
|
||||
{ field: 'labels.testBuildId', value: buildId },
|
||||
{ field: 'labels.journeyName', value: journeyName },
|
||||
];
|
||||
const queryFilters = filters.map((filter) => addBooleanFilter(filter));
|
||||
return await this.getTransactions(queryFilters);
|
||||
},
|
||||
async getTransactions(queryFilters: QueryDslQueryContainer[]) {
|
||||
const result = await client.search<Document>({
|
||||
body: {
|
||||
track_total_hits: true,
|
||||
|
@ -83,56 +139,7 @@ export function initClient(options: ClientOptions) {
|
|||
filter: [
|
||||
{
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
'transaction.type': 'request',
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
'processor.event': 'transaction',
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
'labels.testBuildId': buildId,
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
match_phrase: {
|
||||
'labels.journeyName': journeyName,
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
filter: queryFilters,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -7,11 +7,15 @@
|
|||
*/
|
||||
|
||||
import fs from 'fs/promises';
|
||||
import moment from 'moment';
|
||||
import { existsSync } from 'fs';
|
||||
import path from 'path';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { SearchHit } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { initClient, Document } from './es_client';
|
||||
|
||||
const DATE_FORMAT = `YYYY-MM-DD'T'HH:mm:ss.SSS'Z'`;
|
||||
|
||||
interface CLIParams {
|
||||
param: {
|
||||
journeyName: string;
|
||||
|
@ -39,6 +43,24 @@ export interface ScalabilitySetup {
|
|||
maxDuration: string;
|
||||
}
|
||||
|
||||
const parsePayload = (payload: string, traceId: string, log: ToolingLog): string | undefined => {
|
||||
let body;
|
||||
try {
|
||||
body = JSON.parse(payload);
|
||||
} catch (error) {
|
||||
log.error(`Failed to parse payload - trace_id: '${traceId}'`);
|
||||
}
|
||||
return body;
|
||||
};
|
||||
|
||||
const calculateTransactionTimeRage = (hit: SearchHit<Document>) => {
|
||||
const trSource = hit._source as Document;
|
||||
const startTime = trSource['@timestamp'];
|
||||
const duration = trSource.transaction.duration.us / 1000; // convert microseconds to milliseconds
|
||||
const endTime = moment(startTime, DATE_FORMAT).add(duration, 'milliseconds').toISOString();
|
||||
return { startTime, endTime };
|
||||
};
|
||||
|
||||
export const extractor = async ({ param, client, log }: CLIParams) => {
|
||||
const authOptions = {
|
||||
node: client.baseURL,
|
||||
|
@ -50,7 +72,23 @@ export const extractor = async ({ param, client, log }: CLIParams) => {
|
|||
`Searching transactions with 'labels.testBuildId=${buildId}' and 'labels.journeyName=${journeyName}'`
|
||||
);
|
||||
const esClient = initClient(authOptions);
|
||||
const hits = await esClient.getTransactions(buildId, journeyName);
|
||||
const ftrTransactionHits = await esClient.getFtrTransactions(buildId, journeyName);
|
||||
if (!ftrTransactionHits || ftrTransactionHits.length === 0) {
|
||||
log.warning(
|
||||
`No transactions found. Can't calculate journey time range, output file won't be generated.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// There should be a single top-level transaction, representing journey browser starting time and session duration.
|
||||
if (ftrTransactionHits.length > 1) {
|
||||
log.warning(`Filtering doesn't work, more than 1 'functional test runner' transaction found`);
|
||||
return;
|
||||
}
|
||||
|
||||
const timeRange = calculateTransactionTimeRage(ftrTransactionHits[0]);
|
||||
// Filtering out setup/teardown related transactions by time range from 'functional test runner' transaction
|
||||
const hits = await esClient.getKibanaServerTransactions(buildId, journeyName, timeRange);
|
||||
if (!hits || hits.length === 0) {
|
||||
log.warning(`No transactions found. Output file won't be generated.`);
|
||||
return;
|
||||
|
@ -62,6 +100,7 @@ export const extractor = async ({ param, client, log }: CLIParams) => {
|
|||
const data = hits
|
||||
.map((hit) => hit!._source as Document)
|
||||
.map((hit) => {
|
||||
const payload = hit.http.request?.body?.original;
|
||||
return {
|
||||
processor: hit.processor,
|
||||
traceId: hit.trace.id,
|
||||
|
@ -71,7 +110,7 @@ export const extractor = async ({ param, client, log }: CLIParams) => {
|
|||
url: { path: hit.url.path },
|
||||
headers: hit.http.request.headers,
|
||||
method: hit.http.request.method,
|
||||
body: hit.http.request.body ? JSON.parse(hit.http.request.body.original) : '',
|
||||
body: payload ? parsePayload(payload, hit.trace.id, log) : undefined,
|
||||
},
|
||||
response: { statusCode: hit.http.response.status_code },
|
||||
transaction: {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue