mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
# Backport This will backport the following commits from `main` to `8.17`: - [[APM] Migrate traces tests to deployment agnostic (#200561)](https://github.com/elastic/kibana/pull/200561) <!--- Backport version: 8.9.8 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Carlos Crespo","email":"crespocarlos@users.noreply.github.com"},"sourceCommit":{"committedDate":"2024-11-22T13:47:06Z","message":"[APM] Migrate traces tests to deployment agnostic (#200561)\n\n## Summary\r\n\r\nCloses [#198995](https://github.com/elastic/kibana/issues/198995)\r\nCloses https://github.com/elastic/kibana/issues/198996\r\nPart of https://github.com/elastic/kibana/issues/193245\r\n\r\nThis PR contains the changes to migrate `traces` test folder to\r\nDeployment-agnostic testing strategy.\r\nIt also adds support for deployment-agnostic snapshots validation\r\n\r\n### How to test\r\n\r\n- Serverless\r\n\r\n```\r\nnode scripts/functional_tests_server --config x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\r\nnode scripts/functional_test_runner --config x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts --grep=\"APM\"\r\n```\r\n\r\nIt's recommended to be run against\r\n[MKI](https://github.com/crespocarlos/kibana/blob/main/x-pack/test_serverless/README.md#run-tests-on-mki)\r\n\r\n- Stateful\r\n```\r\nnode scripts/functional_tests_server --config x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\r\nnode scripts/functional_test_runner --config x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts --grep=\"APM\"\r\n```\r\n\r\n- [ ] ~(OPTIONAL, only if a test has been unskipped) Run flaky test\r\nsuite~\r\n- [x] local run for serverless\r\n- [x] local run for stateful\r\n- [x] MKI run for serverless\r\n\r\n---------\r\n\r\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>","sha":"3917d02c51e9c4a83013fe83c87b77fb12bd1fbb","branchLabelMapping":{"^v9.0.0$":"main","^v8.18.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","backport:prev-minor","ci:project-deploy-observability","Team:obs-ux-infra_services","v8.18.0"],"number":200561,"url":"https://github.com/elastic/kibana/pull/200561","mergeCommit":{"message":"[APM] Migrate traces tests to deployment agnostic (#200561)\n\n## Summary\r\n\r\nCloses [#198995](https://github.com/elastic/kibana/issues/198995)\r\nCloses https://github.com/elastic/kibana/issues/198996\r\nPart of https://github.com/elastic/kibana/issues/193245\r\n\r\nThis PR contains the changes to migrate `traces` test folder to\r\nDeployment-agnostic testing strategy.\r\nIt also adds support for deployment-agnostic snapshots validation\r\n\r\n### How to test\r\n\r\n- Serverless\r\n\r\n```\r\nnode scripts/functional_tests_server --config x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\r\nnode scripts/functional_test_runner --config x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts --grep=\"APM\"\r\n```\r\n\r\nIt's recommended to be run against\r\n[MKI](https://github.com/crespocarlos/kibana/blob/main/x-pack/test_serverless/README.md#run-tests-on-mki)\r\n\r\n- Stateful\r\n```\r\nnode scripts/functional_tests_server --config x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\r\nnode scripts/functional_test_runner --config x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts --grep=\"APM\"\r\n```\r\n\r\n- [ ] ~(OPTIONAL, only if a test has been unskipped) Run flaky test\r\nsuite~\r\n- [x] local run for serverless\r\n- [x] local run for stateful\r\n- [x] MKI run for serverless\r\n\r\n---------\r\n\r\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>","sha":"3917d02c51e9c4a83013fe83c87b77fb12bd1fbb"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.0.0","labelRegex":"^v9.0.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/200561","number":200561,"mergeCommit":{"message":"[APM] Migrate traces tests to deployment agnostic (#200561)\n\n## Summary\r\n\r\nCloses [#198995](https://github.com/elastic/kibana/issues/198995)\r\nCloses https://github.com/elastic/kibana/issues/198996\r\nPart of https://github.com/elastic/kibana/issues/193245\r\n\r\nThis PR contains the changes to migrate `traces` test folder to\r\nDeployment-agnostic testing strategy.\r\nIt also adds support for deployment-agnostic snapshots validation\r\n\r\n### How to test\r\n\r\n- Serverless\r\n\r\n```\r\nnode scripts/functional_tests_server --config x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\r\nnode scripts/functional_test_runner --config x-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts --grep=\"APM\"\r\n```\r\n\r\nIt's recommended to be run against\r\n[MKI](https://github.com/crespocarlos/kibana/blob/main/x-pack/test_serverless/README.md#run-tests-on-mki)\r\n\r\n- Stateful\r\n```\r\nnode scripts/functional_tests_server --config x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\r\nnode scripts/functional_test_runner --config x-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts --grep=\"APM\"\r\n```\r\n\r\n- [ ] ~(OPTIONAL, only if a test has been unskipped) Run flaky test\r\nsuite~\r\n- [x] local run for serverless\r\n- [x] local run for stateful\r\n- [x] MKI run for serverless\r\n\r\n---------\r\n\r\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>","sha":"3917d02c51e9c4a83013fe83c87b77fb12bd1fbb"}},{"branch":"8.x","label":"v8.18.0","labelRegex":"^v8.18.0$","isSourceBranch":false,"url":"https://github.com/elastic/kibana/pull/201386","number":201386,"state":"MERGED","mergeCommit":{"sha":"e127a2902a0bb93731aa02d5825fbdeee60bdfb9","message":"[8.x] [APM] Migrate traces tests to deployment agnostic (#200561) (#201386)\n\n# Backport\n\nThis will backport the following commits from `main` to `8.x`:\n- [[APM] Migrate traces tests to deployment agnostic\n(#200561)](https://github.com/elastic/kibana/pull/200561)\n\n<!--- Backport version: 9.4.3 -->\n\n### Questions ?\nPlease refer to the [Backport tool\ndocumentation](https://github.com/sqren/backport)\n\n<!--BACKPORT [{\"author\":{\"name\":\"Carlos\nCrespo\",\"email\":\"crespocarlos@users.noreply.github.com\"},\"sourceCommit\":{\"committedDate\":\"2024-11-22T13:47:06Z\",\"message\":\"[APM]\nMigrate traces tests to deployment agnostic (#200561)\\n\\n##\nSummary\\r\\n\\r\\nCloses\n[#198995](https://github.com/elastic/kibana/issues/198995)\\r\\nCloses\nhttps://github.com/elastic/kibana/issues/198996\\r\\nPart of\nhttps://github.com/elastic/kibana/issues/193245\\r\\n\\r\\nThis PR contains\nthe changes to migrate `traces` test folder to\\r\\nDeployment-agnostic\ntesting strategy.\\r\\nIt also adds support for deployment-agnostic\nsnapshots validation\\r\\n\\r\\n### How to test\\r\\n\\r\\n-\nServerless\\r\\n\\r\\n```\\r\\nnode scripts/functional_tests_server --config\nx-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\\r\\nnode\nscripts/functional_test_runner --config\nx-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\n--grep=\\\"APM\\\"\\r\\n```\\r\\n\\r\\nIt's recommended to be run\nagainst\\r\\n[MKI](https://github.com/crespocarlos/kibana/blob/main/x-pack/test_serverless/README.md#run-tests-on-mki)\\r\\n\\r\\n-\nStateful\\r\\n```\\r\\nnode scripts/functional_tests_server --config\nx-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\\r\\nnode\nscripts/functional_test_runner --config\nx-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\n--grep=\\\"APM\\\"\\r\\n```\\r\\n\\r\\n- [ ] ~(OPTIONAL, only if a test has been\nunskipped) Run flaky test\\r\\nsuite~\\r\\n- [x] local run for\nserverless\\r\\n- [x] local run for stateful\\r\\n- [x] MKI run for\nserverless\\r\\n\\r\\n---------\\r\\n\\r\\nCo-authored-by: Elastic Machine\n<elasticmachine@users.noreply.github.com>\",\"sha\":\"3917d02c51e9c4a83013fe83c87b77fb12bd1fbb\",\"branchLabelMapping\":{\"^v9.0.0$\":\"main\",\"^v8.18.0$\":\"8.x\",\"^v(\\\\d+).(\\\\d+).\\\\d+$\":\"$1.$2\"}},\"sourcePullRequest\":{\"labels\":[\"release_note:skip\",\"v9.0.0\",\"backport:prev-minor\",\"ci:project-deploy-observability\",\"Team:obs-ux-infra_services\"],\"title\":\"[APM]\nMigrate traces tests to deployment\nagnostic\",\"number\":200561,\"url\":\"https://github.com/elastic/kibana/pull/200561\",\"mergeCommit\":{\"message\":\"[APM]\nMigrate traces tests to deployment agnostic (#200561)\\n\\n##\nSummary\\r\\n\\r\\nCloses\n[#198995](https://github.com/elastic/kibana/issues/198995)\\r\\nCloses\nhttps://github.com/elastic/kibana/issues/198996\\r\\nPart of\nhttps://github.com/elastic/kibana/issues/193245\\r\\n\\r\\nThis PR contains\nthe changes to migrate `traces` test folder to\\r\\nDeployment-agnostic\ntesting strategy.\\r\\nIt also adds support for deployment-agnostic\nsnapshots validation\\r\\n\\r\\n### How to test\\r\\n\\r\\n-\nServerless\\r\\n\\r\\n```\\r\\nnode scripts/functional_tests_server --config\nx-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\\r\\nnode\nscripts/functional_test_runner --config\nx-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\n--grep=\\\"APM\\\"\\r\\n```\\r\\n\\r\\nIt's recommended to be run\nagainst\\r\\n[MKI](https://github.com/crespocarlos/kibana/blob/main/x-pack/test_serverless/README.md#run-tests-on-mki)\\r\\n\\r\\n-\nStateful\\r\\n```\\r\\nnode scripts/functional_tests_server --config\nx-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\\r\\nnode\nscripts/functional_test_runner --config\nx-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\n--grep=\\\"APM\\\"\\r\\n```\\r\\n\\r\\n- [ ] ~(OPTIONAL, only if a test has been\nunskipped) Run flaky test\\r\\nsuite~\\r\\n- [x] local run for\nserverless\\r\\n- [x] local run for stateful\\r\\n- [x] MKI run for\nserverless\\r\\n\\r\\n---------\\r\\n\\r\\nCo-authored-by: Elastic Machine\n<elasticmachine@users.noreply.github.com>\",\"sha\":\"3917d02c51e9c4a83013fe83c87b77fb12bd1fbb\"}},\"sourceBranch\":\"main\",\"suggestedTargetBranches\":[],\"targetPullRequestStates\":[{\"branch\":\"main\",\"label\":\"v9.0.0\",\"branchLabelMappingKey\":\"^v9.0.0$\",\"isSourceBranch\":true,\"state\":\"MERGED\",\"url\":\"https://github.com/elastic/kibana/pull/200561\",\"number\":200561,\"mergeCommit\":{\"message\":\"[APM]\nMigrate traces tests to deployment agnostic (#200561)\\n\\n##\nSummary\\r\\n\\r\\nCloses\n[#198995](https://github.com/elastic/kibana/issues/198995)\\r\\nCloses\nhttps://github.com/elastic/kibana/issues/198996\\r\\nPart of\nhttps://github.com/elastic/kibana/issues/193245\\r\\n\\r\\nThis PR contains\nthe changes to migrate `traces` test folder to\\r\\nDeployment-agnostic\ntesting strategy.\\r\\nIt also adds support for deployment-agnostic\nsnapshots validation\\r\\n\\r\\n### How to test\\r\\n\\r\\n-\nServerless\\r\\n\\r\\n```\\r\\nnode scripts/functional_tests_server --config\nx-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\\r\\nnode\nscripts/functional_test_runner --config\nx-pack/test/api_integration/deployment_agnostic/configs/serverless/oblt.serverless.config.ts\n--grep=\\\"APM\\\"\\r\\n```\\r\\n\\r\\nIt's recommended to be run\nagainst\\r\\n[MKI](https://github.com/crespocarlos/kibana/blob/main/x-pack/test_serverless/README.md#run-tests-on-mki)\\r\\n\\r\\n-\nStateful\\r\\n```\\r\\nnode scripts/functional_tests_server --config\nx-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\\r\\nnode\nscripts/functional_test_runner --config\nx-pack/test/api_integration/deployment_agnostic/configs/stateful/oblt.stateful.config.ts\n--grep=\\\"APM\\\"\\r\\n```\\r\\n\\r\\n- [ ] ~(OPTIONAL, only if a test has been\nunskipped) Run flaky test\\r\\nsuite~\\r\\n- [x] local run for\nserverless\\r\\n- [x] local run for stateful\\r\\n- [x] MKI run for\nserverless\\r\\n\\r\\n---------\\r\\n\\r\\nCo-authored-by: Elastic Machine\n<elasticmachine@users.noreply.github.com>\",\"sha\":\"3917d02c51e9c4a83013fe83c87b77fb12bd1fbb\"}}]}]\nBACKPORT-->\n\nCo-authored-by: Carlos Crespo <crespocarlos@users.noreply.github.com>\nCo-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>"}}]}] BACKPORT--> Co-authored-by: Carlos Crespo <crespocarlos@users.noreply.github.com>
This commit is contained in:
parent
02b4376048
commit
4688d25dfa
22 changed files with 1572 additions and 1885 deletions
|
@ -57,7 +57,11 @@ export const loadTests = ({
|
|||
updateBaselines,
|
||||
};
|
||||
|
||||
decorateSnapshotUi({ lifecycle, updateSnapshots, isCi: !!process.env.CI });
|
||||
decorateSnapshotUi({
|
||||
lifecycle,
|
||||
updateSnapshots,
|
||||
isCi: !!process.env.CI,
|
||||
});
|
||||
|
||||
function loadTestFile(path: string) {
|
||||
if (typeof path !== 'string' || !isAbsolute(path)) {
|
||||
|
|
|
@ -33,11 +33,13 @@ const globalState: {
|
|||
registered: boolean;
|
||||
currentTest: Test | null;
|
||||
snapshotStates: Record<string, ISnapshotState>;
|
||||
deploymentAgnostic: boolean;
|
||||
} = {
|
||||
updateSnapshot: 'none',
|
||||
registered: false,
|
||||
currentTest: null,
|
||||
snapshotStates: {},
|
||||
deploymentAgnostic: false,
|
||||
};
|
||||
|
||||
const modifyStackTracePrepareOnce = once(() => {
|
||||
|
@ -125,7 +127,7 @@ export function decorateSnapshotUi({
|
|||
const snapshotState = globalState.snapshotStates[file];
|
||||
|
||||
if (snapshotState && !test.isPassed()) {
|
||||
snapshotState.markSnapshotsAsCheckedForTest(test.fullTitle());
|
||||
snapshotState.markSnapshotsAsCheckedForTest(getTestTitle(test));
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -194,7 +196,7 @@ export function expectSnapshot(received: any) {
|
|||
|
||||
const context: SnapshotContext = {
|
||||
snapshotState,
|
||||
currentTestName: test.fullTitle(),
|
||||
currentTestName: getTestTitle(test),
|
||||
};
|
||||
|
||||
return {
|
||||
|
@ -204,6 +206,18 @@ export function expectSnapshot(received: any) {
|
|||
};
|
||||
}
|
||||
|
||||
function getTestTitle(test: Test) {
|
||||
return (
|
||||
test
|
||||
.fullTitle()
|
||||
// remove deployment type from test title so that a single snapshot can be used for all deployment types
|
||||
.replace(
|
||||
/^(Serverless|Stateful)\s+([^\-]+)\s*-?\s*Deployment-agnostic/g,
|
||||
'Deployment-agnostic'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function expectToMatchSnapshot(snapshotContext: SnapshotContext, received: any) {
|
||||
const matcher = toMatchSnapshot.bind(snapshotContext as any);
|
||||
const result = matcher(received) as SyncExpectationResult;
|
||||
|
|
|
@ -40,6 +40,7 @@ export default function apmApiIntegrationTests({
|
|||
loadTestFile(require.resolve('./suggestions'));
|
||||
loadTestFile(require.resolve('./throughput'));
|
||||
loadTestFile(require.resolve('./time_range_metadata'));
|
||||
loadTestFile(require.resolve('./traces'));
|
||||
loadTestFile(require.resolve('./transactions'));
|
||||
});
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -10,13 +10,13 @@ import expect from '@kbn/expect';
|
|||
import { Assign } from '@kbn/utility-types';
|
||||
import { compact, invert, sortBy, uniq } from 'lodash';
|
||||
import { Readable } from 'stream';
|
||||
import { SupertestReturnType } from '../../common/apm_api_supertest';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import type { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { SupertestReturnType } from '../../../../services/apm_api';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const apmSynthtraceEsClient = getService('apmSynthtraceEsClient');
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
@ -33,79 +33,86 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
children: FormattedNode[];
|
||||
}
|
||||
|
||||
// format tree in somewhat concise format for easier testing
|
||||
function formatTree(nodes: HydratedNode[]): FormattedNode[] {
|
||||
return sortBy(
|
||||
nodes.map((node) => {
|
||||
const name =
|
||||
node.metadata?.['processor.event'] === 'transaction'
|
||||
? node.metadata['transaction.name']
|
||||
: node.metadata?.['span.name'] || 'root';
|
||||
return { name, value: node.countExclusive, children: formatTree(node.children) };
|
||||
}),
|
||||
(node) => node.name
|
||||
);
|
||||
}
|
||||
describe('Aggregated critical path', () => {
|
||||
let apmSynthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
async function fetchAndBuildCriticalPathTree(
|
||||
options: { fn: () => SynthtraceGenerator<ApmFields> } & (
|
||||
| { serviceName: string; transactionName: string }
|
||||
| {}
|
||||
)
|
||||
) {
|
||||
const { fn } = options;
|
||||
// format tree in somewhat concise format for easier testing
|
||||
function formatTree(nodes: HydratedNode[]): FormattedNode[] {
|
||||
return sortBy(
|
||||
nodes.map((node) => {
|
||||
const name =
|
||||
node.metadata?.['processor.event'] === 'transaction'
|
||||
? node.metadata['transaction.name']
|
||||
: node.metadata?.['span.name'] || 'root';
|
||||
return { name, value: node.countExclusive, children: formatTree(node.children) };
|
||||
}),
|
||||
(node) => node.name
|
||||
);
|
||||
}
|
||||
|
||||
const generator = fn();
|
||||
async function fetchAndBuildCriticalPathTree(
|
||||
options: { fn: () => SynthtraceGenerator<ApmFields> } & (
|
||||
| { serviceName: string; transactionName: string }
|
||||
| {}
|
||||
)
|
||||
) {
|
||||
const { fn } = options;
|
||||
|
||||
const unserialized = Array.from(generator);
|
||||
const generator = fn();
|
||||
|
||||
const serialized = unserialized.flatMap((event) => event.serialize());
|
||||
const unserialized = Array.from(generator);
|
||||
|
||||
const traceIds = compact(uniq(serialized.map((event) => event['trace.id'])));
|
||||
const serialized = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
const traceIds = compact(uniq(serialized.map((event) => event['trace.id'])));
|
||||
|
||||
return apmApiClient
|
||||
.readUser({
|
||||
endpoint: 'POST /internal/apm/traces/aggregated_critical_path',
|
||||
params: {
|
||||
body: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
traceIds,
|
||||
serviceName: 'serviceName' in options ? options.serviceName : null,
|
||||
transactionName: 'transactionName' in options ? options.transactionName : null,
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
|
||||
return apmApiClient
|
||||
.readUser({
|
||||
endpoint: 'POST /internal/apm/traces/aggregated_critical_path',
|
||||
params: {
|
||||
body: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
traceIds,
|
||||
serviceName: 'serviceName' in options ? options.serviceName : null,
|
||||
transactionName: 'transactionName' in options ? options.transactionName : null,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
.then((response) => {
|
||||
const criticalPath = response.body.criticalPath!;
|
||||
})
|
||||
.then((response) => {
|
||||
const criticalPath = response.body.criticalPath!;
|
||||
|
||||
const nodeIdByOperationId = invert(criticalPath.operationIdByNodeId);
|
||||
const nodeIdByOperationId = invert(criticalPath.operationIdByNodeId);
|
||||
|
||||
const { rootNodes, maxDepth } = getAggregatedCriticalPathRootNodes({
|
||||
criticalPath,
|
||||
});
|
||||
const { rootNodes, maxDepth } = getAggregatedCriticalPathRootNodes({
|
||||
criticalPath,
|
||||
});
|
||||
|
||||
function hydrateNode(node: Node): HydratedNode {
|
||||
return {
|
||||
...node,
|
||||
metadata: criticalPath.metadata[criticalPath.operationIdByNodeId[node.nodeId]],
|
||||
children: node.children.map(hydrateNode),
|
||||
};
|
||||
}
|
||||
|
||||
function hydrateNode(node: Node): HydratedNode {
|
||||
return {
|
||||
...node,
|
||||
metadata: criticalPath.metadata[criticalPath.operationIdByNodeId[node.nodeId]],
|
||||
children: node.children.map(hydrateNode),
|
||||
rootNodes: rootNodes.map(hydrateNode),
|
||||
maxDepth,
|
||||
criticalPath,
|
||||
nodeIdByOperationId,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
rootNodes: rootNodes.map(hydrateNode),
|
||||
maxDepth,
|
||||
criticalPath,
|
||||
nodeIdByOperationId,
|
||||
};
|
||||
});
|
||||
}
|
||||
before(async () => {
|
||||
apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient();
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/177542
|
||||
registry.when('Aggregated critical path', { config: 'basic', archives: [] }, () => {
|
||||
it('builds up the correct tree for a single transaction', async () => {
|
||||
const java = apm
|
||||
.service({ name: 'java', environment: 'production', agentName: 'java' })
|
||||
|
@ -427,7 +434,5 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
});
|
||||
}
|
|
@ -0,0 +1,232 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { TraceSearchType } from '@kbn/apm-plugin/common/trace_explorer';
|
||||
import { Environment } from '@kbn/apm-plugin/common/environment_rt';
|
||||
import { ENVIRONMENT_ALL } from '@kbn/apm-plugin/common/environment_filter_values';
|
||||
import { sortBy } from 'lodash';
|
||||
import type { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { ApmApiError } from '../../../../services/apm_api';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
import { generateTrace } from './generate_trace';
|
||||
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
// for EQL sequences to work, events need a slight time offset,
|
||||
// as ES will sort based on @timestamp. to acommodate this offset
|
||||
// we also add a little bit of a buffer to the requested time range
|
||||
const endWithOffset = end + 100000;
|
||||
|
||||
describe('Find traces', () => {
|
||||
let apmSynthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
async function fetchTraceSamples({
|
||||
query,
|
||||
type,
|
||||
environment,
|
||||
}: {
|
||||
query: string;
|
||||
type: TraceSearchType;
|
||||
environment: Environment;
|
||||
}) {
|
||||
return apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/find`,
|
||||
params: {
|
||||
query: {
|
||||
query,
|
||||
type,
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(endWithOffset).toISOString(),
|
||||
environment,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTraces(traceSamples: Array<{ traceId: string; transactionId: string }>) {
|
||||
if (!traceSamples.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Promise.all(
|
||||
traceSamples.map(async ({ traceId, transactionId }) => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}`,
|
||||
params: {
|
||||
path: { traceId },
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(endWithOffset).toISOString(),
|
||||
entryTransactionId: transactionId,
|
||||
},
|
||||
},
|
||||
});
|
||||
return response.body.traceItems.traceDocs;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
before(async () => {
|
||||
apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient();
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('when traces do not exist', () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await fetchTraceSamples({
|
||||
query: '',
|
||||
type: TraceSearchType.kql,
|
||||
environment: ENVIRONMENT_ALL.value,
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({
|
||||
traceSamples: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when traces exist', () => {
|
||||
before(() => {
|
||||
const java = apm
|
||||
.service({ name: 'java', environment: 'production', agentName: 'java' })
|
||||
.instance('java');
|
||||
|
||||
const node = apm
|
||||
.service({ name: 'node', environment: 'development', agentName: 'nodejs' })
|
||||
.instance('node');
|
||||
|
||||
const python = apm
|
||||
.service({ name: 'python', environment: 'production', agentName: 'python' })
|
||||
.instance('python');
|
||||
|
||||
return apmSynthtraceEsClient.index(
|
||||
timerange(start, end)
|
||||
.interval('15m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
generateTrace(timestamp, [java, node]),
|
||||
generateTrace(timestamp, [node, java], 'redis'),
|
||||
generateTrace(timestamp, [python], 'redis'),
|
||||
generateTrace(timestamp, [python, node, java], 'elasticsearch'),
|
||||
generateTrace(timestamp, [java, python, node]),
|
||||
];
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('when using KQL', () => {
|
||||
describe('and the query is empty', () => {
|
||||
it('returns all trace samples', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: '',
|
||||
type: TraceSearchType.kql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
expect(traceSamples.length).to.eql(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and query is set', () => {
|
||||
it('returns the relevant traces', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: 'span.destination.service.resource:elasticsearch',
|
||||
type: TraceSearchType.kql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
expect(traceSamples.length).to.eql(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when using EQL', () => {
|
||||
describe('and the query is invalid', () => {
|
||||
it.skip('returns a 400', async function () {
|
||||
try {
|
||||
await fetchTraceSamples({
|
||||
query: '',
|
||||
type: TraceSearchType.eql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
this.fail();
|
||||
} catch (error: unknown) {
|
||||
const apiError = error as ApmApiError;
|
||||
expect(apiError.res.status).to.eql(400);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('and the query is set', () => {
|
||||
it('returns the correct trace samples for transaction sequences', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: `sequence by trace.id
|
||||
[ transaction where service.name == "java" ]
|
||||
[ transaction where service.name == "node" ]`,
|
||||
type: TraceSearchType.eql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
const traces = await fetchTraces(traceSamples);
|
||||
|
||||
expect(traces.length).to.eql(2);
|
||||
|
||||
const mapped = traces.map((traceDocs) => {
|
||||
return sortBy(traceDocs, '@timestamp')
|
||||
.filter((doc) => doc.processor.event === 'transaction')
|
||||
.map((doc) => doc.service.name);
|
||||
});
|
||||
|
||||
expect(mapped).to.eql([
|
||||
['java', 'node'],
|
||||
['java', 'python', 'node'],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns the correct trace samples for join sequences', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: `sequence by trace.id
|
||||
[ span where service.name == "java" ] by span.id
|
||||
[ transaction where service.name == "python" ] by parent.id`,
|
||||
type: TraceSearchType.eql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
const traces = await fetchTraces(traceSamples);
|
||||
|
||||
expect(traces.length).to.eql(1);
|
||||
|
||||
const mapped = traces.map((traceDocs) => {
|
||||
return sortBy(traceDocs, '@timestamp')
|
||||
.filter((doc) => doc.processor.event === 'transaction')
|
||||
.map((doc) => doc.service.name);
|
||||
});
|
||||
|
||||
expect(mapped).to.eql([['java', 'python', 'node']]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
export default function ({ loadTestFile }: DeploymentAgnosticFtrProviderContext) {
|
||||
describe('Traces', () => {
|
||||
loadTestFile(require.resolve('./large_trace/large_trace.spec.ts'));
|
||||
loadTestFile(require.resolve('./critical_path.spec.ts'));
|
||||
loadTestFile(require.resolve('./find_traces.spec.ts'));
|
||||
loadTestFile(require.resolve('./span_details.spec.ts'));
|
||||
loadTestFile(require.resolve('./top_traces.spec.ts'));
|
||||
loadTestFile(require.resolve('./trace_by_id.spec.ts'));
|
||||
loadTestFile(require.resolve('./transaction_details.spec.ts'));
|
||||
});
|
||||
}
|
|
@ -14,8 +14,9 @@ import {
|
|||
import type { Client } from '@elastic/elasticsearch';
|
||||
import type { APIReturnType } from '@kbn/apm-plugin/public/services/rest/create_call_apm_api';
|
||||
import expect from '@kbn/expect';
|
||||
import { ApmApiClient } from '../../../common/config';
|
||||
import { FtrProviderContext } from '../../../common/ftr_provider_context';
|
||||
import type { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { ApmApiClient } from '../../../../../services/apm_api';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context';
|
||||
import { generateLargeTrace } from './generate_large_trace';
|
||||
|
||||
const start = new Date('2023-01-01T00:00:00.000Z').getTime();
|
||||
|
@ -23,16 +24,17 @@ const end = new Date('2023-01-01T00:01:00.000Z').getTime() - 1;
|
|||
const rootTransactionName = 'Long trace';
|
||||
const environment = 'long_trace_scenario';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const apmSynthtraceEsClient = getService('apmSynthtraceEsClient');
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
const es = getService('es');
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/177660
|
||||
registry.when('Large trace', { config: 'basic', archives: [] }, () => {
|
||||
describe('Large trace', () => {
|
||||
let apmSynthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
describe('when the trace is large (>15.000 items)', () => {
|
||||
before(() => {
|
||||
before(async () => {
|
||||
apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient();
|
||||
return generateLargeTrace({
|
||||
start,
|
||||
end,
|
|
@ -0,0 +1,134 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { Readable } from 'stream';
|
||||
import type { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
describe('Span details', () => {
|
||||
let apmSynthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
async function fetchSpanDetails({
|
||||
traceId,
|
||||
spanId,
|
||||
parentTransactionId,
|
||||
}: {
|
||||
traceId: string;
|
||||
spanId: string;
|
||||
parentTransactionId?: string;
|
||||
}) {
|
||||
return await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}/spans/{spanId}`,
|
||||
params: {
|
||||
path: { traceId, spanId },
|
||||
query: {
|
||||
parentTransactionId,
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
before(async () => {
|
||||
apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient();
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('when data is not loaded', () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await fetchSpanDetails({
|
||||
traceId: 'foo',
|
||||
spanId: 'bar',
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when data is loaded', () => {
|
||||
let traceId: string;
|
||||
let spanId: string;
|
||||
let parentTransactionId: string;
|
||||
before(async () => {
|
||||
const instanceJava = apm
|
||||
.service({ name: 'synth-apple', environment: 'production', agentName: 'java' })
|
||||
.instance('instance-b');
|
||||
const events = timerange(start, end)
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
instanceJava
|
||||
.transaction({ transactionName: 'GET /apple 🍏' })
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instanceJava
|
||||
.error({ message: '[ResponseError] index_not_found_exception' })
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.children(
|
||||
instanceJava
|
||||
.span({
|
||||
spanName: 'get_green_apple_🍏',
|
||||
spanType: 'db',
|
||||
spanSubtype: 'elasticsearch',
|
||||
})
|
||||
.timestamp(timestamp + 50)
|
||||
.duration(900)
|
||||
.success()
|
||||
),
|
||||
];
|
||||
});
|
||||
|
||||
const unserialized = Array.from(events);
|
||||
|
||||
const entities = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
const span = entities.find((entity) => {
|
||||
return entity['processor.event'] === 'span';
|
||||
});
|
||||
spanId = span?.['span.id']!;
|
||||
parentTransactionId = span?.['parent.id']!;
|
||||
traceId = span?.['trace.id']!;
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('span details', () => {
|
||||
let spanDetails: Awaited<ReturnType<typeof fetchSpanDetails>>['body'];
|
||||
before(async () => {
|
||||
const response = await fetchSpanDetails({
|
||||
traceId,
|
||||
spanId,
|
||||
parentTransactionId,
|
||||
});
|
||||
expect(response.status).to.eql(200);
|
||||
spanDetails = response.body;
|
||||
});
|
||||
it('returns span details', () => {
|
||||
expect(spanDetails.span?.span.name).to.eql('get_green_apple_🍏');
|
||||
expect(spanDetails.parentTransaction?.transaction.name).to.eql('GET /apple 🍏');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { sortBy } from 'lodash';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
import archives_metadata from '../constants/archives_metadata';
|
||||
import { ARCHIVER_ROUTES } from '../constants/archiver';
|
||||
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const esArchiver = getService('esArchiver');
|
||||
|
||||
const archiveName = '8.0.0';
|
||||
const metadata = archives_metadata[archiveName];
|
||||
|
||||
// url parameters
|
||||
const { start, end } = metadata;
|
||||
|
||||
describe('Top traces', () => {
|
||||
describe('when data is not loaded', () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces`,
|
||||
params: {
|
||||
query: {
|
||||
start,
|
||||
end,
|
||||
kuery: '',
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
probability: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body.items.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when data is loaded', () => {
|
||||
let response: any;
|
||||
before(async () => {
|
||||
await esArchiver.load(ARCHIVER_ROUTES[archiveName]);
|
||||
response = await apmApiClient.readUser({
|
||||
endpoint: 'GET /internal/apm/traces',
|
||||
params: {
|
||||
query: {
|
||||
start,
|
||||
end,
|
||||
kuery: '',
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
probability: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await esArchiver.unload(ARCHIVER_ROUTES[archiveName]);
|
||||
});
|
||||
|
||||
it('returns the correct status code', () => {
|
||||
expect(response.status).to.be(200);
|
||||
});
|
||||
|
||||
it('returns the correct number of buckets', () => {
|
||||
expectSnapshot(response.body.items.length).toMatchInline(`59`);
|
||||
});
|
||||
|
||||
it('returns the correct buckets', () => {
|
||||
const sortedItems = sortBy(response.body.items, 'impact');
|
||||
|
||||
const firstItem = sortedItems[0];
|
||||
const lastItem = sortedItems[sortedItems.length - 1];
|
||||
|
||||
const groups = sortedItems.map((item) => item.key).slice(0, 5);
|
||||
|
||||
expectSnapshot(sortedItems).toMatch();
|
||||
|
||||
expectSnapshot(firstItem).toMatchInline(`
|
||||
Object {
|
||||
"agentName": "ruby",
|
||||
"averageResponseTime": 5664,
|
||||
"impact": 0,
|
||||
"key": Object {
|
||||
"service.name": "opbeans-ruby",
|
||||
"transaction.name": "Api::OrdersController#create",
|
||||
},
|
||||
"serviceName": "opbeans-ruby",
|
||||
"transactionName": "Api::OrdersController#create",
|
||||
"transactionType": "request",
|
||||
"transactionsPerMinute": 0.0166666666666667,
|
||||
}
|
||||
`);
|
||||
|
||||
expectSnapshot(lastItem).toMatchInline(`
|
||||
Object {
|
||||
"agentName": "nodejs",
|
||||
"averageResponseTime": 1077989.66666667,
|
||||
"impact": 100,
|
||||
"key": Object {
|
||||
"service.name": "opbeans-node",
|
||||
"transaction.name": "Process payment",
|
||||
},
|
||||
"serviceName": "opbeans-node",
|
||||
"transactionName": "Process payment",
|
||||
"transactionType": "Worker",
|
||||
"transactionsPerMinute": 0.7,
|
||||
}
|
||||
`);
|
||||
|
||||
expectSnapshot(groups).toMatchInline(`
|
||||
Array [
|
||||
Object {
|
||||
"service.name": "opbeans-ruby",
|
||||
"transaction.name": "Api::OrdersController#create",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-java",
|
||||
"transaction.name": "APIRestController#products",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-java",
|
||||
"transaction.name": "APIRestController#orders",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-java",
|
||||
"transaction.name": "APIRestController#product",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-node",
|
||||
"transaction.name": "POST /api",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import type { APIReturnType } from '@kbn/apm-plugin/public/services/rest/create_call_apm_api';
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { Readable } from 'stream';
|
||||
import type { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
describe('Trace by ID', () => {
|
||||
describe('Trace does not exist', () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}`,
|
||||
params: {
|
||||
path: { traceId: 'foo' },
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
entryTransactionId: 'foo',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({
|
||||
traceItems: {
|
||||
exceedsMax: false,
|
||||
traceDocs: [],
|
||||
errorDocs: [],
|
||||
spanLinksCountById: {},
|
||||
traceDocsTotal: 0,
|
||||
maxTraceItems: 5000,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Trace exists', () => {
|
||||
let entryTransactionId: string;
|
||||
let serviceATraceId: string;
|
||||
let apmSynthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
before(async () => {
|
||||
apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient();
|
||||
const instanceJava = apm
|
||||
.service({ name: 'synth-apple', environment: 'production', agentName: 'java' })
|
||||
.instance('instance-b');
|
||||
const events = timerange(start, end)
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
instanceJava
|
||||
.transaction({ transactionName: 'GET /apple 🍏' })
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instanceJava
|
||||
.error({ message: '[ResponseError] index_not_found_exception' })
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.children(
|
||||
instanceJava
|
||||
.span({
|
||||
spanName: 'get_green_apple_🍏',
|
||||
spanType: 'db',
|
||||
spanSubtype: 'elasticsearch',
|
||||
})
|
||||
.timestamp(timestamp + 50)
|
||||
.duration(900)
|
||||
.success()
|
||||
),
|
||||
];
|
||||
});
|
||||
const unserialized = Array.from(events);
|
||||
|
||||
const serialized = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
entryTransactionId = serialized[0]['transaction.id']!;
|
||||
serviceATraceId = serialized[0]['trace.id']!;
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('return trace', () => {
|
||||
let traces: APIReturnType<'GET /internal/apm/traces/{traceId}'>;
|
||||
before(async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}`,
|
||||
params: {
|
||||
path: { traceId: serviceATraceId },
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
entryTransactionId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).to.eql(200);
|
||||
traces = response.body;
|
||||
});
|
||||
|
||||
it('returns some errors', () => {
|
||||
expect(traces.traceItems.errorDocs.length).to.be.greaterThan(0);
|
||||
expect(traces.traceItems.errorDocs[0].error.exception?.[0].message).to.eql(
|
||||
'[ResponseError] index_not_found_exception'
|
||||
);
|
||||
});
|
||||
|
||||
it('returns some trace docs', () => {
|
||||
expect(traces.traceItems.traceDocs.length).to.be.greaterThan(0);
|
||||
expect(
|
||||
traces.traceItems.traceDocs.map((item) => {
|
||||
if (item.span && 'name' in item.span) {
|
||||
return item.span.name;
|
||||
}
|
||||
if (item.transaction && 'name' in item.transaction) {
|
||||
return item.transaction.name;
|
||||
}
|
||||
})
|
||||
).to.eql(['GET /apple 🍏', 'get_green_apple_🍏']);
|
||||
});
|
||||
|
||||
it('returns entry transaction details', () => {
|
||||
expect(traces.entryTransaction).to.not.be(undefined);
|
||||
expect(traces.entryTransaction?.transaction.id).to.equal(entryTransactionId);
|
||||
expect(traces.entryTransaction?.transaction.name).to.equal('GET /apple 🍏');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { Readable } from 'stream';
|
||||
import type { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
|
||||
const apmApiClient = getService('apmApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
async function fetchTransactionDetails({
|
||||
traceId,
|
||||
transactionId,
|
||||
}: {
|
||||
traceId: string;
|
||||
transactionId: string;
|
||||
}) {
|
||||
return await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}/transactions/{transactionId}`,
|
||||
params: {
|
||||
path: {
|
||||
traceId,
|
||||
transactionId,
|
||||
},
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
describe('Transaction details', () => {
|
||||
describe('when data is not loaded', () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await fetchTransactionDetails({
|
||||
traceId: 'foo',
|
||||
transactionId: 'bar',
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when data is loaded', () => {
|
||||
let traceId: string;
|
||||
let transactionId: string;
|
||||
let apmSynthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
before(async () => {
|
||||
apmSynthtraceEsClient = await synthtrace.createApmSynthtraceEsClient();
|
||||
|
||||
const instanceJava = apm
|
||||
.service({ name: 'synth-apple', environment: 'production', agentName: 'java' })
|
||||
.instance('instance-b');
|
||||
const events = timerange(start, end)
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
instanceJava
|
||||
.transaction({ transactionName: 'GET /apple 🍏' })
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instanceJava
|
||||
.error({ message: '[ResponseError] index_not_found_exception' })
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.children(
|
||||
instanceJava
|
||||
.span({
|
||||
spanName: 'get_green_apple_🍏',
|
||||
spanType: 'db',
|
||||
spanSubtype: 'elasticsearch',
|
||||
})
|
||||
.timestamp(timestamp + 50)
|
||||
.duration(900)
|
||||
.success()
|
||||
),
|
||||
];
|
||||
});
|
||||
|
||||
const unserialized = Array.from(events);
|
||||
|
||||
const entities = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
const transaction = entities[0];
|
||||
transactionId = transaction?.['transaction.id']!;
|
||||
traceId = transaction?.['trace.id']!;
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('transaction details', () => {
|
||||
let transactionDetails: Awaited<ReturnType<typeof fetchTransactionDetails>>['body'];
|
||||
before(async () => {
|
||||
const response = await fetchTransactionDetails({
|
||||
traceId,
|
||||
transactionId,
|
||||
});
|
||||
expect(response.status).to.eql(200);
|
||||
transactionDetails = response.body;
|
||||
});
|
||||
it('returns transaction details', () => {
|
||||
expect(transactionDetails.transaction.name).to.eql('GET /apple 🍏');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -124,7 +124,6 @@ export function createStatefulTestConfig<T extends DeploymentAgnosticCommonServi
|
|||
path.resolve(REPO_ROOT, STATEFUL_ROLES_ROOT_PATH, 'roles.yml'),
|
||||
],
|
||||
},
|
||||
|
||||
kbnTestServer: {
|
||||
...xPackAPITestsConfig.get('kbnTestServer'),
|
||||
serverArgs: [
|
||||
|
|
|
@ -1,225 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { TraceSearchType } from '@kbn/apm-plugin/common/trace_explorer';
|
||||
import { Environment } from '@kbn/apm-plugin/common/environment_rt';
|
||||
import { ENVIRONMENT_ALL } from '@kbn/apm-plugin/common/environment_filter_values';
|
||||
import { sortBy } from 'lodash';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import { ApmApiError } from '../../common/apm_api_supertest';
|
||||
import { generateTrace } from './generate_trace';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const apmSynthtraceEsClient = getService('apmSynthtraceEsClient');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
// for EQL sequences to work, events need a slight time offset,
|
||||
// as ES will sort based on @timestamp. to acommodate this offset
|
||||
// we also add a little bit of a buffer to the requested time range
|
||||
const endWithOffset = end + 100000;
|
||||
|
||||
async function fetchTraceSamples({
|
||||
query,
|
||||
type,
|
||||
environment,
|
||||
}: {
|
||||
query: string;
|
||||
type: TraceSearchType;
|
||||
environment: Environment;
|
||||
}) {
|
||||
return apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/find`,
|
||||
params: {
|
||||
query: {
|
||||
query,
|
||||
type,
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(endWithOffset).toISOString(),
|
||||
environment,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function fetchTraces(traceSamples: Array<{ traceId: string; transactionId: string }>) {
|
||||
if (!traceSamples.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Promise.all(
|
||||
traceSamples.map(async ({ traceId, transactionId }) => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}`,
|
||||
params: {
|
||||
path: { traceId },
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(endWithOffset).toISOString(),
|
||||
entryTransactionId: transactionId,
|
||||
},
|
||||
},
|
||||
});
|
||||
return response.body.traceItems.traceDocs;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
registry.when('Find traces when traces do not exist', { config: 'basic', archives: [] }, () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await fetchTraceSamples({
|
||||
query: '',
|
||||
type: TraceSearchType.kql,
|
||||
environment: ENVIRONMENT_ALL.value,
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({
|
||||
traceSamples: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/177543
|
||||
registry.when('Find traces when traces exist', { config: 'basic', archives: [] }, () => {
|
||||
before(() => {
|
||||
const java = apm
|
||||
.service({ name: 'java', environment: 'production', agentName: 'java' })
|
||||
.instance('java');
|
||||
|
||||
const node = apm
|
||||
.service({ name: 'node', environment: 'development', agentName: 'nodejs' })
|
||||
.instance('node');
|
||||
|
||||
const python = apm
|
||||
.service({ name: 'python', environment: 'production', agentName: 'python' })
|
||||
.instance('python');
|
||||
|
||||
return apmSynthtraceEsClient.index(
|
||||
timerange(start, end)
|
||||
.interval('15m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
generateTrace(timestamp, [java, node]),
|
||||
generateTrace(timestamp, [node, java], 'redis'),
|
||||
generateTrace(timestamp, [python], 'redis'),
|
||||
generateTrace(timestamp, [python, node, java], 'elasticsearch'),
|
||||
generateTrace(timestamp, [java, python, node]),
|
||||
];
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
describe('when using KQL', () => {
|
||||
describe('and the query is empty', () => {
|
||||
it('returns all trace samples', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: '',
|
||||
type: TraceSearchType.kql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
expect(traceSamples.length).to.eql(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and query is set', () => {
|
||||
it('returns the relevant traces', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: 'span.destination.service.resource:elasticsearch',
|
||||
type: TraceSearchType.kql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
expect(traceSamples.length).to.eql(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when using EQL', () => {
|
||||
describe('and the query is invalid', () => {
|
||||
it.skip('returns a 400', async function () {
|
||||
try {
|
||||
await fetchTraceSamples({
|
||||
query: '',
|
||||
type: TraceSearchType.eql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
this.fail();
|
||||
} catch (error: unknown) {
|
||||
const apiError = error as ApmApiError;
|
||||
expect(apiError.res.status).to.eql(400);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('and the query is set', () => {
|
||||
it('returns the correct trace samples for transaction sequences', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: `sequence by trace.id
|
||||
[ transaction where service.name == "java" ]
|
||||
[ transaction where service.name == "node" ]`,
|
||||
type: TraceSearchType.eql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
const traces = await fetchTraces(traceSamples);
|
||||
|
||||
expect(traces.length).to.eql(2);
|
||||
|
||||
const mapped = traces.map((traceDocs) => {
|
||||
return sortBy(traceDocs, '@timestamp')
|
||||
.filter((doc) => doc.processor.event === 'transaction')
|
||||
.map((doc) => doc.service.name);
|
||||
});
|
||||
|
||||
expect(mapped).to.eql([
|
||||
['java', 'node'],
|
||||
['java', 'python', 'node'],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('returns the correct trace samples for join sequences', async () => {
|
||||
const {
|
||||
body: { traceSamples },
|
||||
} = await fetchTraceSamples({
|
||||
query: `sequence by trace.id
|
||||
[ span where service.name == "java" ] by span.id
|
||||
[ transaction where service.name == "python" ] by parent.id`,
|
||||
type: TraceSearchType.eql,
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
});
|
||||
|
||||
const traces = await fetchTraces(traceSamples);
|
||||
|
||||
expect(traces.length).to.eql(1);
|
||||
|
||||
const mapped = traces.map((traceDocs) => {
|
||||
return sortBy(traceDocs, '@timestamp')
|
||||
.filter((doc) => doc.processor.event === 'transaction')
|
||||
.map((doc) => doc.service.name);
|
||||
});
|
||||
|
||||
expect(mapped).to.eql([['java', 'python', 'node']]);
|
||||
});
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
});
|
||||
}
|
|
@ -1,125 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { Readable } from 'stream';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const apmSynthtraceEsClient = getService('apmSynthtraceEsClient');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
async function fetchSpanDetails({
|
||||
traceId,
|
||||
spanId,
|
||||
parentTransactionId,
|
||||
}: {
|
||||
traceId: string;
|
||||
spanId: string;
|
||||
parentTransactionId?: string;
|
||||
}) {
|
||||
return await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}/spans/{spanId}`,
|
||||
params: {
|
||||
path: { traceId, spanId },
|
||||
query: {
|
||||
parentTransactionId,
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
registry.when('Span details dont exist', { config: 'basic', archives: [] }, () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await fetchSpanDetails({
|
||||
traceId: 'foo',
|
||||
spanId: 'bar',
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({});
|
||||
});
|
||||
});
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/177544
|
||||
registry.when('Span details', { config: 'basic', archives: [] }, () => {
|
||||
let traceId: string;
|
||||
let spanId: string;
|
||||
let parentTransactionId: string;
|
||||
before(async () => {
|
||||
const instanceJava = apm
|
||||
.service({ name: 'synth-apple', environment: 'production', agentName: 'java' })
|
||||
.instance('instance-b');
|
||||
const events = timerange(start, end)
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
instanceJava
|
||||
.transaction({ transactionName: 'GET /apple 🍏' })
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instanceJava
|
||||
.error({ message: '[ResponseError] index_not_found_exception' })
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.children(
|
||||
instanceJava
|
||||
.span({
|
||||
spanName: 'get_green_apple_🍏',
|
||||
spanType: 'db',
|
||||
spanSubtype: 'elasticsearch',
|
||||
})
|
||||
.timestamp(timestamp + 50)
|
||||
.duration(900)
|
||||
.success()
|
||||
),
|
||||
];
|
||||
});
|
||||
|
||||
const unserialized = Array.from(events);
|
||||
|
||||
const entities = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
const span = entities.find((entity) => {
|
||||
return entity['processor.event'] === 'span';
|
||||
});
|
||||
spanId = span?.['span.id']!;
|
||||
parentTransactionId = span?.['parent.id']!;
|
||||
traceId = span?.['trace.id']!;
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('span details', () => {
|
||||
let spanDetails: Awaited<ReturnType<typeof fetchSpanDetails>>['body'];
|
||||
before(async () => {
|
||||
const response = await fetchSpanDetails({
|
||||
traceId,
|
||||
spanId,
|
||||
parentTransactionId,
|
||||
});
|
||||
expect(response.status).to.eql(200);
|
||||
spanDetails = response.body;
|
||||
});
|
||||
it('returns span details', () => {
|
||||
expect(spanDetails.span?.span.name).to.eql('get_green_apple_🍏');
|
||||
expect(spanDetails.parentTransaction?.transaction.name).to.eql('GET /apple 🍏');
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { sortBy } from 'lodash';
|
||||
import archives_metadata from '../../common/fixtures/es_archiver/archives_metadata';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
|
||||
const archiveName = 'apm_8.0.0';
|
||||
const metadata = archives_metadata[archiveName];
|
||||
|
||||
// url parameters
|
||||
const { start, end } = metadata;
|
||||
|
||||
registry.when('Top traces when data is not loaded', { config: 'basic', archives: [] }, () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces`,
|
||||
params: {
|
||||
query: {
|
||||
start,
|
||||
end,
|
||||
kuery: '',
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
probability: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body.items.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
registry.when(
|
||||
'Top traces when data is loaded',
|
||||
{ config: 'basic', archives: [archiveName] },
|
||||
() => {
|
||||
let response: any;
|
||||
before(async () => {
|
||||
response = await apmApiClient.readUser({
|
||||
endpoint: 'GET /internal/apm/traces',
|
||||
params: {
|
||||
query: {
|
||||
start,
|
||||
end,
|
||||
kuery: '',
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
probability: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns the correct status code', async () => {
|
||||
expect(response.status).to.be(200);
|
||||
});
|
||||
|
||||
it('returns the correct number of buckets', async () => {
|
||||
expectSnapshot(response.body.items.length).toMatchInline(`81`);
|
||||
});
|
||||
|
||||
it('returns the correct buckets', async () => {
|
||||
const sortedItems = sortBy(response.body.items, 'impact');
|
||||
|
||||
const firstItem = sortedItems[0];
|
||||
const lastItem = sortedItems[sortedItems.length - 1];
|
||||
|
||||
const groups = sortedItems.map((item) => item.key).slice(0, 5);
|
||||
|
||||
expectSnapshot(sortedItems).toMatch();
|
||||
|
||||
expectSnapshot(firstItem).toMatchInline(`
|
||||
Object {
|
||||
"agentName": "java",
|
||||
"averageResponseTime": 1639,
|
||||
"impact": 0,
|
||||
"key": Object {
|
||||
"service.name": "opbeans-java",
|
||||
"transaction.name": "DispatcherServlet#doPost",
|
||||
},
|
||||
"serviceName": "opbeans-java",
|
||||
"transactionName": "DispatcherServlet#doPost",
|
||||
"transactionType": "request",
|
||||
"transactionsPerMinute": 0.0333333333333333,
|
||||
}
|
||||
`);
|
||||
|
||||
expectSnapshot(lastItem).toMatchInline(`
|
||||
Object {
|
||||
"agentName": "dotnet",
|
||||
"averageResponseTime": 5963775,
|
||||
"impact": 100,
|
||||
"key": Object {
|
||||
"service.name": "opbeans-dotnet",
|
||||
"transaction.name": "GET Orders/Get",
|
||||
},
|
||||
"serviceName": "opbeans-dotnet",
|
||||
"transactionName": "GET Orders/Get",
|
||||
"transactionType": "request",
|
||||
"transactionsPerMinute": 0.633333333333333,
|
||||
}
|
||||
`);
|
||||
|
||||
expectSnapshot(groups).toMatchInline(`
|
||||
Array [
|
||||
Object {
|
||||
"service.name": "opbeans-java",
|
||||
"transaction.name": "DispatcherServlet#doPost",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-node",
|
||||
"transaction.name": "POST /api/orders",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-node",
|
||||
"transaction.name": "GET /api/products/:id",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-dotnet",
|
||||
"transaction.name": "POST Orders/Post",
|
||||
},
|
||||
Object {
|
||||
"service.name": "opbeans-python",
|
||||
"transaction.name": "GET opbeans.views.product",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
|
@ -1,145 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import type { APIReturnType } from '@kbn/apm-plugin/public/services/rest/create_call_apm_api';
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { Readable } from 'stream';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const apmSynthtraceEsClient = getService('apmSynthtraceEsClient');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
registry.when('Trace does not exist', { config: 'basic', archives: [] }, () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}`,
|
||||
params: {
|
||||
path: { traceId: 'foo' },
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
entryTransactionId: 'foo',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({
|
||||
traceItems: {
|
||||
exceedsMax: false,
|
||||
traceDocs: [],
|
||||
errorDocs: [],
|
||||
spanLinksCountById: {},
|
||||
traceDocsTotal: 0,
|
||||
maxTraceItems: 5000,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/177545
|
||||
registry.when('Trace exists', { config: 'basic', archives: [] }, () => {
|
||||
let entryTransactionId: string;
|
||||
let serviceATraceId: string;
|
||||
|
||||
before(async () => {
|
||||
const instanceJava = apm
|
||||
.service({ name: 'synth-apple', environment: 'production', agentName: 'java' })
|
||||
.instance('instance-b');
|
||||
const events = timerange(start, end)
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
instanceJava
|
||||
.transaction({ transactionName: 'GET /apple 🍏' })
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instanceJava
|
||||
.error({ message: '[ResponseError] index_not_found_exception' })
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.children(
|
||||
instanceJava
|
||||
.span({
|
||||
spanName: 'get_green_apple_🍏',
|
||||
spanType: 'db',
|
||||
spanSubtype: 'elasticsearch',
|
||||
})
|
||||
.timestamp(timestamp + 50)
|
||||
.duration(900)
|
||||
.success()
|
||||
),
|
||||
];
|
||||
});
|
||||
const unserialized = Array.from(events);
|
||||
|
||||
const serialized = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
entryTransactionId = serialized[0]['transaction.id']!;
|
||||
serviceATraceId = serialized[0]['trace.id']!;
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('return trace', () => {
|
||||
let traces: APIReturnType<'GET /internal/apm/traces/{traceId}'>;
|
||||
before(async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}`,
|
||||
params: {
|
||||
path: { traceId: serviceATraceId },
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
entryTransactionId,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(response.status).to.eql(200);
|
||||
traces = response.body;
|
||||
});
|
||||
|
||||
it('returns some errors', () => {
|
||||
expect(traces.traceItems.errorDocs.length).to.be.greaterThan(0);
|
||||
expect(traces.traceItems.errorDocs[0].error.exception?.[0].message).to.eql(
|
||||
'[ResponseError] index_not_found_exception'
|
||||
);
|
||||
});
|
||||
|
||||
it('returns some trace docs', () => {
|
||||
expect(traces.traceItems.traceDocs.length).to.be.greaterThan(0);
|
||||
expect(
|
||||
traces.traceItems.traceDocs.map((item) => {
|
||||
if (item.span && 'name' in item.span) {
|
||||
return item.span.name;
|
||||
}
|
||||
if (item.transaction && 'name' in item.transaction) {
|
||||
return item.transaction.name;
|
||||
}
|
||||
})
|
||||
).to.eql(['GET /apple 🍏', 'get_green_apple_🍏']);
|
||||
});
|
||||
|
||||
it('returns entry transaction details', () => {
|
||||
expect(traces.entryTransaction).to.not.be(undefined);
|
||||
expect(traces.entryTransaction?.transaction.id).to.equal(entryTransactionId);
|
||||
expect(traces.entryTransaction?.transaction.name).to.equal('GET /apple 🍏');
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { apm, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import expect from '@kbn/expect';
|
||||
import { Readable } from 'stream';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const registry = getService('registry');
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const apmSynthtraceEsClient = getService('apmSynthtraceEsClient');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
async function fetchTransactionDetails({
|
||||
traceId,
|
||||
transactionId,
|
||||
}: {
|
||||
traceId: string;
|
||||
transactionId: string;
|
||||
}) {
|
||||
return await apmApiClient.readUser({
|
||||
endpoint: `GET /internal/apm/traces/{traceId}/transactions/{transactionId}`,
|
||||
params: {
|
||||
path: {
|
||||
traceId,
|
||||
transactionId,
|
||||
},
|
||||
query: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
registry.when('Transaction details dont exist', { config: 'basic', archives: [] }, () => {
|
||||
it('handles empty state', async () => {
|
||||
const response = await fetchTransactionDetails({
|
||||
traceId: 'foo',
|
||||
transactionId: 'bar',
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body).to.eql({});
|
||||
});
|
||||
});
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/177546
|
||||
registry.when('Transaction details', { config: 'basic', archives: [] }, () => {
|
||||
let traceId: string;
|
||||
let transactionId: string;
|
||||
before(async () => {
|
||||
const instanceJava = apm
|
||||
.service({ name: 'synth-apple', environment: 'production', agentName: 'java' })
|
||||
.instance('instance-b');
|
||||
const events = timerange(start, end)
|
||||
.interval('1m')
|
||||
.rate(1)
|
||||
.generator((timestamp) => {
|
||||
return [
|
||||
instanceJava
|
||||
.transaction({ transactionName: 'GET /apple 🍏' })
|
||||
.timestamp(timestamp)
|
||||
.duration(1000)
|
||||
.failure()
|
||||
.errors(
|
||||
instanceJava
|
||||
.error({ message: '[ResponseError] index_not_found_exception' })
|
||||
.timestamp(timestamp + 50)
|
||||
)
|
||||
.children(
|
||||
instanceJava
|
||||
.span({
|
||||
spanName: 'get_green_apple_🍏',
|
||||
spanType: 'db',
|
||||
spanSubtype: 'elasticsearch',
|
||||
})
|
||||
.timestamp(timestamp + 50)
|
||||
.duration(900)
|
||||
.success()
|
||||
),
|
||||
];
|
||||
});
|
||||
|
||||
const unserialized = Array.from(events);
|
||||
|
||||
const entities = unserialized.flatMap((event) => event.serialize());
|
||||
|
||||
const transaction = entities[0];
|
||||
transactionId = transaction?.['transaction.id']!;
|
||||
traceId = transaction?.['trace.id']!;
|
||||
|
||||
await apmSynthtraceEsClient.index(Readable.from(unserialized));
|
||||
});
|
||||
|
||||
after(() => apmSynthtraceEsClient.clean());
|
||||
|
||||
describe('transaction details', () => {
|
||||
let transactionDetails: Awaited<ReturnType<typeof fetchTransactionDetails>>['body'];
|
||||
before(async () => {
|
||||
const response = await fetchTransactionDetails({
|
||||
traceId,
|
||||
transactionId,
|
||||
});
|
||||
expect(response.status).to.eql(200);
|
||||
transactionDetails = response.body;
|
||||
});
|
||||
it('returns transaction details', () => {
|
||||
expect(transactionDetails.transaction.name).to.eql('GET /apple 🍏');
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from 'expect';
|
||||
import { apm, ApmFields, SynthtraceGenerator, timerange } from '@kbn/apm-synthtrace-client';
|
||||
import { compact, uniq } from 'lodash';
|
||||
import { Readable } from 'stream';
|
||||
import { ApmSynthtraceEsClient } from '@kbn/apm-synthtrace';
|
||||
import type { InternalRequestHeader, RoleCredentials } from '../../../../../shared/services';
|
||||
import { APMFtrContextProvider } from '../common/services';
|
||||
|
||||
export default function ({ getService }: APMFtrContextProvider) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const svlUserManager = getService('svlUserManager');
|
||||
const svlCommonApi = getService('svlCommonApi');
|
||||
const synthtrace = getService('synthtrace');
|
||||
|
||||
const start = new Date('2022-01-01T00:00:00.000Z').getTime();
|
||||
const end = new Date('2022-01-01T00:15:00.000Z').getTime() - 1;
|
||||
|
||||
async function fetchAndBuildCriticalPathTree(
|
||||
synthtraceEsClient: ApmSynthtraceEsClient,
|
||||
options: {
|
||||
fn: () => SynthtraceGenerator<ApmFields>;
|
||||
roleAuthc: RoleCredentials;
|
||||
internalReqHeader: InternalRequestHeader;
|
||||
} & ({ serviceName: string; transactionName: string } | {})
|
||||
) {
|
||||
const { fn, roleAuthc, internalReqHeader } = options;
|
||||
|
||||
const generator = fn();
|
||||
|
||||
const unserialized = Array.from(generator);
|
||||
const serialized = unserialized.flatMap((event) => event.serialize());
|
||||
const traceIds = compact(uniq(serialized.map((event) => event['trace.id'])));
|
||||
|
||||
await synthtraceEsClient.index(Readable.from(unserialized));
|
||||
|
||||
return apmApiClient.slsUser({
|
||||
endpoint: 'POST /internal/apm/traces/aggregated_critical_path',
|
||||
params: {
|
||||
body: {
|
||||
start: new Date(start).toISOString(),
|
||||
end: new Date(end).toISOString(),
|
||||
traceIds,
|
||||
serviceName: 'serviceName' in options ? options.serviceName : null,
|
||||
transactionName: 'transactionName' in options ? options.transactionName : null,
|
||||
},
|
||||
},
|
||||
roleAuthc,
|
||||
internalReqHeader,
|
||||
});
|
||||
}
|
||||
|
||||
describe('APM Aggregated critical path', () => {
|
||||
let roleAuthc: RoleCredentials;
|
||||
let internalReqHeader: InternalRequestHeader;
|
||||
let synthtraceEsClient: ApmSynthtraceEsClient;
|
||||
|
||||
before(async () => {
|
||||
synthtraceEsClient = await synthtrace.createSynthtraceEsClient();
|
||||
internalReqHeader = svlCommonApi.getInternalRequestHeader();
|
||||
roleAuthc = await svlUserManager.createM2mApiKeyWithRoleScope('admin');
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await svlUserManager.invalidateM2mApiKeyWithRoleScope(roleAuthc);
|
||||
return synthtraceEsClient.clean();
|
||||
});
|
||||
|
||||
it('returns service map elements', async () => {
|
||||
const java = apm
|
||||
.service({ name: 'java', environment: 'production', agentName: 'java' })
|
||||
.instance('java');
|
||||
|
||||
const duration = 1000;
|
||||
const rate = 10;
|
||||
|
||||
const response = await fetchAndBuildCriticalPathTree(synthtraceEsClient, {
|
||||
fn: () =>
|
||||
timerange(start, end)
|
||||
.interval('15m')
|
||||
.rate(rate)
|
||||
.generator((timestamp) => {
|
||||
return java.transaction('GET /api').timestamp(timestamp).duration(duration);
|
||||
}),
|
||||
roleAuthc,
|
||||
internalReqHeader,
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.criticalPath).not.toBeUndefined();
|
||||
});
|
||||
});
|
||||
}
|
|
@ -12,7 +12,6 @@ export default function ({ loadTestFile }: FtrProviderContext) {
|
|||
this.tags(['esGate']);
|
||||
|
||||
loadTestFile(require.resolve('./apm_api_integration/feature_flags.ts'));
|
||||
loadTestFile(require.resolve('./apm_api_integration/traces/critical_path'));
|
||||
loadTestFile(require.resolve('./cases'));
|
||||
loadTestFile(require.resolve('./synthetics'));
|
||||
loadTestFile(require.resolve('./dataset_quality_api_integration'));
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue