mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 10:40:07 -04:00
[Profiling] Improve coverage for stacktrace-related tests (#163108)
## Summary This PR improves coverage for our stacktrace-related tests and confidence in future modifications. This is a precursor to an upcoming fix for https://github.com/elastic/prodfiler/issues/3115. The PR updates include: - add production-derived fixtures for stacktrace responses (replacing prior hand-built fixtures) - refactor, move, and rename tests as needed - add additional tests to verify properties of stacktrace-related data structures in pipeline from Elasticsearch to Kibana UI There should be no changes to the Kibana server endpoints and the Kibana UI. ### Checklist - [x] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios ### For maintainers - [x] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
This commit is contained in:
parent
bd3b54c51d
commit
11958042bf
18 changed files with 507 additions and 398 deletions
17
x-pack/plugins/profiling/common/__fixtures__/README.md
Normal file
17
x-pack/plugins/profiling/common/__fixtures__/README.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
The stacktrace fixtures in this directory are originally from Elasticsearch's
|
||||
`POST /_profiling/stacktraces` endpoint. They were subsequently filtered
|
||||
through the `shrink_stacktrace_response.js` command in `x-pack/plugins/profiling/scripts/`
|
||||
to reduce the size without losing sampling fidelity (see the script for further
|
||||
details).
|
||||
|
||||
The naming convention for each stacktrace fixture follows this pattern:
|
||||
|
||||
```
|
||||
stacktraces_{seconds}s_{upsampling rate}x.json
|
||||
```
|
||||
|
||||
where `seconds` is the time span of the original query and `upsampling rate` is
|
||||
the reciprocal of the sampling rate returned from the original query.
|
||||
|
||||
To add a new stacktrace fixture to the test suite, update `stacktraces.ts`
|
||||
appropriately.
|
|
@ -5,192 +5,20 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createStackFrameID } from '../profiling';
|
||||
import { StackTraceResponse } from '../stack_traces';
|
||||
|
||||
enum stackTraceID {
|
||||
A = 'yU2Oct2ct0HkxJ7-pRcPkg==',
|
||||
B = 'Xt8aKN70PDXpMDLCOmojzQ==',
|
||||
C = '8OauxYq2WK4_tBqM4xkIwA==',
|
||||
D = 'nQWGdRxvqVjwlLmQWH1Phw==',
|
||||
E = '2KciEEWALlol3b6x95PHcw==',
|
||||
F = 'BxRgiXa4h9Id6BjdPPHK8Q==',
|
||||
}
|
||||
import stackTraces1x from './stacktraces_60s_1x.json';
|
||||
import stackTraces5x from './stacktraces_3600s_5x.json';
|
||||
import stackTraces125x from './stacktraces_86400s_125x.json';
|
||||
import stackTraces625x from './stacktraces_604800s_625x.json';
|
||||
|
||||
enum fileID {
|
||||
A = 'Ncujji3wC1nL73TTEyFBhA==',
|
||||
B = 'T2vdys5d7j85az1aP86zCg==',
|
||||
C = 'jMaTVVjYv7cecd0C4HguGw==',
|
||||
D = 'RLkjnlfcvSJN2Wph9WUuOQ==',
|
||||
E = 'gnEsgxvvEODj6iFYMQWYlA==',
|
||||
F = 'Gf4xoLc8QuAHU49Ch_CFOA==',
|
||||
G = 'ZCOCZlls7r2cbG1HchkbVg==',
|
||||
H = 'Og7kGWGe9qiCunkaXDffHQ==',
|
||||
I = 'WAE6T1TeDsjDMOuwX4Ynxg==',
|
||||
J = 'ZNiZco1zgh0nJI6hPllMaQ==',
|
||||
K = 'abl5r8Vvvb2Y7NaDZW1QLQ==',
|
||||
}
|
||||
|
||||
enum addressOrLine {
|
||||
A = 26278522,
|
||||
B = 6712518,
|
||||
C = 105806025,
|
||||
D = 105806854,
|
||||
E = 107025202,
|
||||
F = 107044169,
|
||||
G = 18353156,
|
||||
H = 3027,
|
||||
I = 5201,
|
||||
J = 67384048,
|
||||
K = 8888,
|
||||
}
|
||||
|
||||
const frameID: Record<string, string> = {
|
||||
A: createStackFrameID(fileID.A, addressOrLine.A),
|
||||
B: createStackFrameID(fileID.B, addressOrLine.B),
|
||||
C: createStackFrameID(fileID.C, addressOrLine.C),
|
||||
D: createStackFrameID(fileID.D, addressOrLine.D),
|
||||
E: createStackFrameID(fileID.E, addressOrLine.C),
|
||||
F: createStackFrameID(fileID.E, addressOrLine.D),
|
||||
G: createStackFrameID(fileID.E, addressOrLine.E),
|
||||
H: createStackFrameID(fileID.E, addressOrLine.F),
|
||||
I: createStackFrameID(fileID.E, addressOrLine.G),
|
||||
J: createStackFrameID(fileID.F, addressOrLine.H),
|
||||
K: createStackFrameID(fileID.F, addressOrLine.I),
|
||||
L: createStackFrameID(fileID.F, addressOrLine.J),
|
||||
M: createStackFrameID(fileID.F, addressOrLine.K),
|
||||
N: createStackFrameID(fileID.G, addressOrLine.G),
|
||||
O: createStackFrameID(fileID.H, addressOrLine.H),
|
||||
P: createStackFrameID(fileID.I, addressOrLine.I),
|
||||
Q: createStackFrameID(fileID.F, addressOrLine.A),
|
||||
R: createStackFrameID(fileID.E, addressOrLine.B),
|
||||
S: createStackFrameID(fileID.E, addressOrLine.C),
|
||||
};
|
||||
|
||||
export const events = new Map([
|
||||
[stackTraceID.A, 16],
|
||||
[stackTraceID.B, 9],
|
||||
[stackTraceID.C, 7],
|
||||
[stackTraceID.D, 5],
|
||||
[stackTraceID.E, 2],
|
||||
[stackTraceID.F, 1],
|
||||
]);
|
||||
|
||||
export const stackTraces = new Map([
|
||||
[
|
||||
stackTraceID.A,
|
||||
{
|
||||
FileIDs: [fileID.D, fileID.C, fileID.B, fileID.A],
|
||||
AddressOrLines: [addressOrLine.D, addressOrLine.C, addressOrLine.B, addressOrLine.A],
|
||||
FrameIDs: [frameID.D, frameID.C, frameID.B, frameID.A],
|
||||
Types: [3, 3, 3, 3],
|
||||
},
|
||||
],
|
||||
[
|
||||
stackTraceID.B,
|
||||
{
|
||||
FileIDs: [fileID.E, fileID.E, fileID.E, fileID.E, fileID.E],
|
||||
AddressOrLines: [
|
||||
addressOrLine.G,
|
||||
addressOrLine.F,
|
||||
addressOrLine.E,
|
||||
addressOrLine.D,
|
||||
addressOrLine.C,
|
||||
],
|
||||
FrameIDs: [frameID.I, frameID.H, frameID.G, frameID.F, frameID.E],
|
||||
Types: [3, 3, 3, 3, 3],
|
||||
},
|
||||
],
|
||||
[
|
||||
stackTraceID.C,
|
||||
{
|
||||
FileIDs: [fileID.F, fileID.F, fileID.F, fileID.F],
|
||||
AddressOrLines: [addressOrLine.K, addressOrLine.J, addressOrLine.I, addressOrLine.H],
|
||||
FrameIDs: [frameID.M, frameID.L, frameID.K, frameID.J],
|
||||
Types: [3, 3, 3, 3],
|
||||
},
|
||||
],
|
||||
[
|
||||
stackTraceID.D,
|
||||
{
|
||||
FileIDs: [fileID.I, fileID.H, fileID.G],
|
||||
AddressOrLines: [addressOrLine.I, addressOrLine.H, addressOrLine.G],
|
||||
FrameIDs: [frameID.P, frameID.O, frameID.N],
|
||||
Types: [3, 8, 8],
|
||||
},
|
||||
],
|
||||
[
|
||||
stackTraceID.E,
|
||||
{
|
||||
FileIDs: [fileID.F, fileID.F, fileID.F],
|
||||
AddressOrLines: [addressOrLine.K, addressOrLine.J, addressOrLine.I],
|
||||
FrameIDs: [frameID.M, frameID.L, frameID.K],
|
||||
Types: [3, 3, 3],
|
||||
},
|
||||
],
|
||||
[
|
||||
stackTraceID.F,
|
||||
{
|
||||
FileIDs: [fileID.E, fileID.E],
|
||||
AddressOrLines: [addressOrLine.F, addressOrLine.E],
|
||||
FrameIDs: [frameID.H, frameID.G],
|
||||
Types: [3, 3],
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
const defaultStackFrame = {
|
||||
FileName: '',
|
||||
FunctionName: '',
|
||||
FunctionOffset: 0,
|
||||
LineNumber: 0,
|
||||
Inline: false,
|
||||
};
|
||||
|
||||
export const stackFrames = new Map([
|
||||
[
|
||||
frameID.A,
|
||||
{
|
||||
FileName: 'ThreadPoolExecutor.java',
|
||||
FunctionName: 'java.lang.Runnable java.util.concurrent.ThreadPoolExecutor.getTask()',
|
||||
FunctionOffset: 26,
|
||||
LineNumber: 1061,
|
||||
Inline: false,
|
||||
},
|
||||
],
|
||||
[
|
||||
frameID.B,
|
||||
{ FileName: '', FunctionName: 'sock_sendmsg', FunctionOffset: 0, LineNumber: 0, Inline: false },
|
||||
],
|
||||
[frameID.C, defaultStackFrame],
|
||||
[frameID.D, defaultStackFrame],
|
||||
[frameID.E, defaultStackFrame],
|
||||
[frameID.F, defaultStackFrame],
|
||||
[frameID.G, defaultStackFrame],
|
||||
[frameID.H, defaultStackFrame],
|
||||
[frameID.I, defaultStackFrame],
|
||||
[frameID.J, defaultStackFrame],
|
||||
[frameID.K, defaultStackFrame],
|
||||
[
|
||||
frameID.L,
|
||||
{ FileName: '', FunctionName: 'udp_sendmsg', FunctionOffset: 0, LineNumber: 0, Inline: false },
|
||||
],
|
||||
[frameID.M, defaultStackFrame],
|
||||
[frameID.N, defaultStackFrame],
|
||||
[frameID.O, defaultStackFrame],
|
||||
[frameID.P, defaultStackFrame],
|
||||
[frameID.Q, defaultStackFrame],
|
||||
[frameID.R, defaultStackFrame],
|
||||
[frameID.S, defaultStackFrame],
|
||||
]);
|
||||
|
||||
export const executables = new Map([
|
||||
[fileID.A, { FileName: '' }],
|
||||
[fileID.B, { FileName: '' }],
|
||||
[fileID.C, { FileName: '' }],
|
||||
[fileID.D, { FileName: 'libglapi.so.0.0.0' }],
|
||||
[fileID.E, { FileName: '' }],
|
||||
[fileID.F, { FileName: '' }],
|
||||
[fileID.G, { FileName: '' }],
|
||||
[fileID.H, { FileName: '' }],
|
||||
[fileID.I, { FileName: '' }],
|
||||
]);
|
||||
export const stackTraceFixtures: Array<{
|
||||
response: StackTraceResponse;
|
||||
seconds: number;
|
||||
upsampledBy: number;
|
||||
}> = [
|
||||
{ response: stackTraces1x, seconds: 60, upsampledBy: 1 },
|
||||
{ response: stackTraces5x, seconds: 3600, upsampledBy: 5 },
|
||||
{ response: stackTraces125x, seconds: 86400, upsampledBy: 125 },
|
||||
{ response: stackTraces625x, seconds: 604800, upsampledBy: 625 },
|
||||
];
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -6,32 +6,46 @@
|
|||
*/
|
||||
|
||||
import { sum } from 'lodash';
|
||||
|
||||
import { createCalleeTree } from './callee';
|
||||
import { decodeStackTraceResponse } from './stack_traces';
|
||||
|
||||
import { events, stackTraces, stackFrames, executables } from './__fixtures__/stacktraces';
|
||||
|
||||
const totalSamples = sum([...events.values()]);
|
||||
const totalFrames = sum([...stackTraces.values()].map((trace) => trace.FrameIDs.length));
|
||||
const tree = createCalleeTree(events, stackTraces, stackFrames, executables, totalFrames, 1.0);
|
||||
import { stackTraceFixtures } from './__fixtures__/stacktraces';
|
||||
|
||||
describe('Callee operations', () => {
|
||||
test('inclusive count of root equals total sampled stacktraces', () => {
|
||||
expect(tree.CountInclusive[0]).toEqual(totalSamples);
|
||||
});
|
||||
|
||||
test('inclusive count for each node should be greater than or equal to its children', () => {
|
||||
const allGreaterThanOrEqual = tree.Edges.map(
|
||||
(children, i) =>
|
||||
tree.CountInclusive[i] >= sum([...children.values()].map((j) => tree.CountInclusive[j]))
|
||||
stackTraceFixtures.forEach(({ response, seconds, upsampledBy }) => {
|
||||
const { events, stackTraces, stackFrames, executables, totalFrames, samplingRate } =
|
||||
decodeStackTraceResponse(response);
|
||||
const tree = createCalleeTree(
|
||||
events,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
totalFrames,
|
||||
samplingRate
|
||||
);
|
||||
expect(allGreaterThanOrEqual).toBeTruthy();
|
||||
});
|
||||
|
||||
test('exclusive count of root is zero', () => {
|
||||
expect(tree.CountExclusive[0]).toEqual(0);
|
||||
});
|
||||
describe(`stacktraces from ${seconds} seconds and upsampled by ${upsampledBy}`, () => {
|
||||
test('inclusive count of root to be less than or equal total sampled stacktraces', () => {
|
||||
const totalAdjustedSamples = Math.ceil(sum([...events.values()]) / samplingRate);
|
||||
expect(tree.CountInclusive[0]).toBeLessThanOrEqual(totalAdjustedSamples);
|
||||
});
|
||||
|
||||
test('tree de-duplicates sibling nodes', () => {
|
||||
expect(tree.Size).toEqual(totalFrames - 2);
|
||||
test('inclusive count for each node should be greater than or equal to its children', () => {
|
||||
const allGreaterThanOrEqual = tree.Edges.map(
|
||||
(children, i) =>
|
||||
tree.CountInclusive[i] >= sum([...children.values()].map((j) => tree.CountInclusive[j]))
|
||||
);
|
||||
expect(allGreaterThanOrEqual).toBeTruthy();
|
||||
});
|
||||
|
||||
test('exclusive count of root is zero', () => {
|
||||
expect(tree.CountExclusive[0]).toEqual(0);
|
||||
});
|
||||
|
||||
test('tree de-duplicates sibling nodes', () => {
|
||||
expect(tree.Size).toBeLessThan(totalFrames);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,27 +6,108 @@
|
|||
*/
|
||||
|
||||
import { sum } from 'lodash';
|
||||
|
||||
import { createCalleeTree } from './callee';
|
||||
import { createColumnarViewModel } from './columnar_view_model';
|
||||
import { createBaseFlameGraph, createFlameGraph } from './flamegraph';
|
||||
import { decodeStackTraceResponse } from './stack_traces';
|
||||
|
||||
import { events, stackTraces, stackFrames, executables } from './__fixtures__/stacktraces';
|
||||
|
||||
const totalFrames = sum([...stackTraces.values()].map((trace) => trace.FrameIDs.length));
|
||||
|
||||
const tree = createCalleeTree(events, stackTraces, stackFrames, executables, totalFrames, 1.0);
|
||||
const graph = createFlameGraph(createBaseFlameGraph(tree, 1.0, 60));
|
||||
import { stackTraceFixtures } from './__fixtures__/stacktraces';
|
||||
|
||||
describe('Columnar view model operations', () => {
|
||||
test('color values are generated by default', () => {
|
||||
const viewModel = createColumnarViewModel(graph);
|
||||
stackTraceFixtures.forEach(({ response, seconds, upsampledBy }) => {
|
||||
const { events, stackTraces, stackFrames, executables, totalFrames, samplingRate } =
|
||||
decodeStackTraceResponse(response);
|
||||
const tree = createCalleeTree(
|
||||
events,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
totalFrames,
|
||||
samplingRate
|
||||
);
|
||||
const graph = createFlameGraph(createBaseFlameGraph(tree, samplingRate, seconds));
|
||||
|
||||
expect(sum(viewModel.color)).toBeGreaterThan(0);
|
||||
});
|
||||
describe(`stacktraces from ${seconds} seconds and upsampled by ${upsampledBy}`, () => {
|
||||
describe('color values are generated by default', () => {
|
||||
const viewModel = createColumnarViewModel(graph);
|
||||
|
||||
test('color values are not generated when disabled', () => {
|
||||
const viewModel = createColumnarViewModel(graph, false);
|
||||
test('length of colors is equal to length of labels multipled by 4', () => {
|
||||
expect(viewModel.color.length).toEqual(viewModel.label.length * 4);
|
||||
});
|
||||
|
||||
expect(sum(viewModel.color)).toEqual(0);
|
||||
test('length of position0 is equal to length of labels multipled by 2', () => {
|
||||
expect(viewModel.position0.length).toEqual(viewModel.label.length * 2);
|
||||
});
|
||||
|
||||
test('length of position1 is equal to length of labels multipled by 2', () => {
|
||||
expect(viewModel.position1.length).toEqual(viewModel.label.length * 2);
|
||||
});
|
||||
|
||||
test('length of size0 is equal to length of labels', () => {
|
||||
expect(viewModel.size0.length).toEqual(viewModel.label.length);
|
||||
});
|
||||
|
||||
test('length of size1 is equal to length of labels', () => {
|
||||
expect(viewModel.size1.length).toEqual(viewModel.label.length);
|
||||
});
|
||||
|
||||
test('length of values is equal to length of labels', () => {
|
||||
expect(viewModel.value.length).toEqual(viewModel.label.length);
|
||||
});
|
||||
|
||||
test('both position arrays are equal', () => {
|
||||
expect(viewModel.position0).toEqual(viewModel.position1);
|
||||
});
|
||||
|
||||
test('both size arrays are equal', () => {
|
||||
expect(viewModel.size0).toEqual(viewModel.size1);
|
||||
});
|
||||
|
||||
test('sum of colors is greater than zero', () => {
|
||||
expect(sum(viewModel.color)).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('color values are not generated when disabled', () => {
|
||||
const viewModel = createColumnarViewModel(graph, false);
|
||||
|
||||
test('length of colors is equal to length of labels multipled by 4', () => {
|
||||
expect(viewModel.color.length).toEqual(viewModel.label.length * 4);
|
||||
});
|
||||
|
||||
test('length of position0 is equal to length of labels multipled by 2', () => {
|
||||
expect(viewModel.position0.length).toEqual(viewModel.label.length * 2);
|
||||
});
|
||||
|
||||
test('length of position1 is equal to length of labels multipled by 2', () => {
|
||||
expect(viewModel.position1.length).toEqual(viewModel.label.length * 2);
|
||||
});
|
||||
|
||||
test('length of size0 is equal to length of labels', () => {
|
||||
expect(viewModel.size0.length).toEqual(viewModel.label.length);
|
||||
});
|
||||
|
||||
test('length of size1 is equal to length of labels', () => {
|
||||
expect(viewModel.size1.length).toEqual(viewModel.label.length);
|
||||
});
|
||||
|
||||
test('length of values is equal to length of labels', () => {
|
||||
expect(viewModel.value.length).toEqual(viewModel.label.length);
|
||||
});
|
||||
|
||||
test('both position arrays are equal', () => {
|
||||
expect(viewModel.position0).toEqual(viewModel.position1);
|
||||
});
|
||||
|
||||
test('both size arrays are equal', () => {
|
||||
expect(viewModel.size0).toEqual(viewModel.size1);
|
||||
});
|
||||
|
||||
test('sum of colors is equal to zero', () => {
|
||||
expect(sum(viewModel.color)).toEqual(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,38 +5,50 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { sum } from 'lodash';
|
||||
import { createCalleeTree } from './callee';
|
||||
import { createBaseFlameGraph, createFlameGraph } from './flamegraph';
|
||||
import { decodeStackTraceResponse } from './stack_traces';
|
||||
|
||||
import { events, stackTraces, stackFrames, executables } from './__fixtures__/stacktraces';
|
||||
|
||||
const totalFrames = sum([...stackTraces.values()].map((trace) => trace.FrameIDs.length));
|
||||
const tree = createCalleeTree(events, stackTraces, stackFrames, executables, totalFrames, 1.0);
|
||||
const baseFlamegraph = createBaseFlameGraph(tree, 1.0, 60);
|
||||
const flamegraph = createFlameGraph(baseFlamegraph);
|
||||
import { stackTraceFixtures } from './__fixtures__/stacktraces';
|
||||
|
||||
describe('Flamegraph operations', () => {
|
||||
test('base flamegraph has non-zero total seconds', () => {
|
||||
expect(baseFlamegraph.TotalSeconds).toEqual(60);
|
||||
});
|
||||
stackTraceFixtures.forEach(({ response, seconds, upsampledBy }) => {
|
||||
const { events, stackTraces, stackFrames, executables, totalFrames, samplingRate } =
|
||||
decodeStackTraceResponse(response);
|
||||
const tree = createCalleeTree(
|
||||
events,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
totalFrames,
|
||||
samplingRate
|
||||
);
|
||||
const baseFlamegraph = createBaseFlameGraph(tree, samplingRate, seconds);
|
||||
const flamegraph = createFlameGraph(baseFlamegraph);
|
||||
|
||||
test('base flamegraph has one more node than the number of edges', () => {
|
||||
const numEdges = baseFlamegraph.Edges.flatMap((edge) => edge).length;
|
||||
describe(`stacktraces from ${seconds} seconds and upsampled by ${upsampledBy}`, () => {
|
||||
test('base flamegraph has non-zero total seconds', () => {
|
||||
expect(baseFlamegraph.TotalSeconds).toEqual(seconds);
|
||||
});
|
||||
|
||||
expect(numEdges).toEqual(baseFlamegraph.Size - 1);
|
||||
});
|
||||
test('base flamegraph has one more node than the number of edges', () => {
|
||||
const numEdges = baseFlamegraph.Edges.flatMap((edge) => edge).length;
|
||||
|
||||
test('all flamegraph IDs are the same non-zero length', () => {
|
||||
// 16 is the length of a 64-bit FNV-1a hash encoded to a hex string
|
||||
const allSameLengthIDs = flamegraph.ID.every((id) => id.length === 16);
|
||||
expect(numEdges).toEqual(baseFlamegraph.Size - 1);
|
||||
});
|
||||
|
||||
expect(allSameLengthIDs).toBeTruthy();
|
||||
});
|
||||
test('all flamegraph IDs are the same non-zero length', () => {
|
||||
// 16 is the length of a 64-bit FNV-1a hash encoded to a hex string
|
||||
const allSameLengthIDs = flamegraph.ID.every((id) => id.length === 16);
|
||||
|
||||
test('all flamegraph labels are non-empty', () => {
|
||||
const allNonEmptyLabels = flamegraph.Label.every((id) => id.length > 0);
|
||||
expect(allSameLengthIDs).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allNonEmptyLabels).toBeTruthy();
|
||||
test('all flamegraph labels are non-empty', () => {
|
||||
const allNonEmptyLabels = flamegraph.Label.every((id) => id.length > 0);
|
||||
|
||||
expect(allNonEmptyLabels).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,30 +5,55 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createTopNFunctions } from './functions';
|
||||
|
||||
import { events, stackTraces, stackFrames, executables } from './__fixtures__/stacktraces';
|
||||
import { sum } from 'lodash';
|
||||
|
||||
import { createTopNFunctions } from './functions';
|
||||
import { decodeStackTraceResponse } from './stack_traces';
|
||||
|
||||
import { stackTraceFixtures } from './__fixtures__/stacktraces';
|
||||
|
||||
describe('TopN function operations', () => {
|
||||
test('1', () => {
|
||||
const maxTopN = 5;
|
||||
const totalSamples = sum([...events.values()]);
|
||||
const topNFunctions = createTopNFunctions({
|
||||
events,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
startIndex: 0,
|
||||
endIndex: maxTopN,
|
||||
samplingRate: 1.0,
|
||||
totalSeconds: 900,
|
||||
stackTraceFixtures.forEach(({ response, seconds, upsampledBy }) => {
|
||||
const { events, stackTraces, stackFrames, executables, samplingRate } =
|
||||
decodeStackTraceResponse(response);
|
||||
|
||||
describe(`stacktraces from ${seconds} seconds and upsampled by ${upsampledBy}`, () => {
|
||||
const maxTopN = 5;
|
||||
const topNFunctions = createTopNFunctions({
|
||||
events,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
startIndex: 0,
|
||||
endIndex: maxTopN,
|
||||
samplingRate,
|
||||
totalSeconds: seconds,
|
||||
});
|
||||
const exclusiveCounts = topNFunctions.TopN.map((value) => value.CountExclusive);
|
||||
|
||||
test('samples are less than or equal to original upsampled samples', () => {
|
||||
const totalUpsampledSamples = Math.ceil(sum([...events.values()]) / samplingRate);
|
||||
expect(topNFunctions.TotalCount).toBeLessThanOrEqual(totalUpsampledSamples);
|
||||
});
|
||||
|
||||
test('number of functions is equal to maximum', () => {
|
||||
expect(topNFunctions.TopN.length).toEqual(maxTopN);
|
||||
});
|
||||
|
||||
test('all exclusive counts are numeric', () => {
|
||||
expect(typeof exclusiveCounts[0]).toBe('number');
|
||||
expect(typeof exclusiveCounts[1]).toBe('number');
|
||||
expect(typeof exclusiveCounts[2]).toBe('number');
|
||||
expect(typeof exclusiveCounts[3]).toBe('number');
|
||||
expect(typeof exclusiveCounts[4]).toBe('number');
|
||||
});
|
||||
|
||||
test('exclusive counts are sorted from highest to lowest', () => {
|
||||
expect(exclusiveCounts[0]).toBeGreaterThanOrEqual(exclusiveCounts[1]);
|
||||
expect(exclusiveCounts[1]).toBeGreaterThanOrEqual(exclusiveCounts[2]);
|
||||
expect(exclusiveCounts[2]).toBeGreaterThanOrEqual(exclusiveCounts[3]);
|
||||
expect(exclusiveCounts[3]).toBeGreaterThanOrEqual(exclusiveCounts[4]);
|
||||
});
|
||||
});
|
||||
|
||||
expect(topNFunctions.TotalCount).toEqual(totalSamples);
|
||||
expect(topNFunctions.TopN.length).toEqual(maxTopN);
|
||||
|
||||
const exclusiveCounts = topNFunctions.TopN.map((value) => value.CountExclusive);
|
||||
expect(exclusiveCounts).toEqual([16, 9, 7, 5, 2]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,8 +5,12 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { decodeStackTraceResponse, makeFrameID } from './search_stacktraces';
|
||||
import { StackTraceResponse } from '../../common/stack_traces';
|
||||
import {
|
||||
DecodedStackTraceResponse,
|
||||
decodeStackTraceResponse,
|
||||
makeFrameID,
|
||||
StackTraceResponse,
|
||||
} from './stack_traces';
|
||||
|
||||
describe('Stack trace response operations', () => {
|
||||
test('empty stack trace response', () => {
|
||||
|
@ -15,12 +19,13 @@ describe('Stack trace response operations', () => {
|
|||
sampling_rate: 1.0,
|
||||
};
|
||||
|
||||
const expected = {
|
||||
stackTraceEvents: new Map(),
|
||||
const expected: DecodedStackTraceResponse = {
|
||||
events: new Map(),
|
||||
stackTraces: new Map(),
|
||||
stackFrames: new Map(),
|
||||
executables: new Map(),
|
||||
totalFrames: 0,
|
||||
samplingRate: 1.0,
|
||||
};
|
||||
|
||||
const decoded = decodeStackTraceResponse(original);
|
||||
|
@ -34,8 +39,8 @@ describe('Stack trace response operations', () => {
|
|||
expect(decoded.stackTraces.size).toEqual(expected.stackTraces.size);
|
||||
expect(decoded.stackTraces.size).toEqual(0);
|
||||
|
||||
expect(decoded.stackTraceEvents.size).toEqual(expected.stackTraceEvents.size);
|
||||
expect(decoded.stackTraceEvents.size).toEqual(0);
|
||||
expect(decoded.events.size).toEqual(expected.events.size);
|
||||
expect(decoded.events.size).toEqual(0);
|
||||
|
||||
expect(decoded.totalFrames).toEqual(expected.totalFrames);
|
||||
expect(decoded.totalFrames).toEqual(0);
|
||||
|
@ -76,8 +81,8 @@ describe('Stack trace response operations', () => {
|
|||
sampling_rate: 1.0,
|
||||
};
|
||||
|
||||
const expected = {
|
||||
stackTraceEvents: new Map([['a', 1]]),
|
||||
const expected: DecodedStackTraceResponse = {
|
||||
events: new Map([['a', 1]]),
|
||||
stackTraces: new Map([
|
||||
[
|
||||
'a',
|
||||
|
@ -97,6 +102,7 @@ describe('Stack trace response operations', () => {
|
|||
FunctionName: 'pthread_create',
|
||||
FunctionOffset: 0,
|
||||
LineNumber: 0,
|
||||
Inline: false,
|
||||
},
|
||||
],
|
||||
[
|
||||
|
@ -106,6 +112,7 @@ describe('Stack trace response operations', () => {
|
|||
FunctionName: 'main',
|
||||
FunctionOffset: 1,
|
||||
LineNumber: 3,
|
||||
Inline: false,
|
||||
},
|
||||
],
|
||||
[
|
||||
|
@ -115,6 +122,7 @@ describe('Stack trace response operations', () => {
|
|||
FunctionName: 'inlined',
|
||||
FunctionOffset: 2,
|
||||
LineNumber: 4,
|
||||
Inline: false,
|
||||
},
|
||||
],
|
||||
]),
|
||||
|
@ -137,8 +145,8 @@ describe('Stack trace response operations', () => {
|
|||
expect(decoded.stackTraces.size).toEqual(expected.stackTraces.size);
|
||||
expect(decoded.stackTraces.size).toEqual(1);
|
||||
|
||||
expect(decoded.stackTraceEvents.size).toEqual(expected.stackTraceEvents.size);
|
||||
expect(decoded.stackTraceEvents.size).toEqual(1);
|
||||
expect(decoded.events.size).toEqual(expected.events.size);
|
||||
expect(decoded.events.size).toEqual(1);
|
||||
|
||||
expect(decoded.totalFrames).toEqual(expected.totalFrames);
|
||||
expect(decoded.totalFrames).toEqual(1);
|
||||
|
@ -172,8 +180,8 @@ describe('Stack trace response operations', () => {
|
|||
total_frames: 1,
|
||||
};
|
||||
|
||||
const expected = {
|
||||
stackTraceEvents: new Map([['a', 1]]),
|
||||
const expected: DecodedStackTraceResponse = {
|
||||
events: new Map([['a', 1]]),
|
||||
stackTraces: new Map([
|
||||
[
|
||||
'a',
|
||||
|
@ -189,15 +197,17 @@ describe('Stack trace response operations', () => {
|
|||
[
|
||||
'abc',
|
||||
{
|
||||
FileName: null,
|
||||
FileName: '',
|
||||
FunctionName: 'pthread_create',
|
||||
FunctionOffset: null,
|
||||
LineNumber: null,
|
||||
FunctionOffset: 0,
|
||||
LineNumber: 0,
|
||||
Inline: false,
|
||||
},
|
||||
],
|
||||
]),
|
||||
executables: new Map([['abc', { FileName: 'pthread.c' }]]),
|
||||
totalFrames: 1,
|
||||
samplingRate: 1.0,
|
||||
};
|
||||
|
||||
const decoded = decodeStackTraceResponse(original);
|
||||
|
@ -211,8 +221,8 @@ describe('Stack trace response operations', () => {
|
|||
expect(decoded.stackTraces.size).toEqual(expected.stackTraces.size);
|
||||
expect(decoded.stackTraces.size).toEqual(1);
|
||||
|
||||
expect(decoded.stackTraceEvents.size).toEqual(expected.stackTraceEvents.size);
|
||||
expect(decoded.stackTraceEvents.size).toEqual(1);
|
||||
expect(decoded.events.size).toEqual(expected.events.size);
|
||||
expect(decoded.events.size).toEqual(1);
|
||||
|
||||
expect(decoded.totalFrames).toEqual(expected.totalFrames);
|
||||
expect(decoded.totalFrames).toEqual(1);
|
|
@ -6,6 +6,14 @@
|
|||
*/
|
||||
|
||||
import { ProfilingESField } from './elasticsearch';
|
||||
import {
|
||||
Executable,
|
||||
FileID,
|
||||
StackFrame,
|
||||
StackFrameID,
|
||||
StackTrace,
|
||||
StackTraceID,
|
||||
} from './profiling';
|
||||
|
||||
export interface ProfilingStatusResponse {
|
||||
profiling: {
|
||||
|
@ -58,6 +66,104 @@ export interface StackTraceResponse {
|
|||
['sampling_rate']: number;
|
||||
}
|
||||
|
||||
export interface DecodedStackTraceResponse {
|
||||
events: Map<StackTraceID, number>;
|
||||
stackTraces: Map<StackTraceID, StackTrace>;
|
||||
stackFrames: Map<StackFrameID, StackFrame>;
|
||||
executables: Map<FileID, Executable>;
|
||||
totalFrames: number;
|
||||
samplingRate: number;
|
||||
}
|
||||
|
||||
export const makeFrameID = (frameID: string, n: number): string => {
|
||||
return n === 0 ? frameID : frameID + ';' + n.toString();
|
||||
};
|
||||
|
||||
// createInlineTrace builds a new StackTrace with inline frames.
|
||||
const createInlineTrace = (
|
||||
trace: ProfilingStackTrace,
|
||||
frames: Map<StackFrameID, StackFrame>
|
||||
): StackTrace => {
|
||||
// The arrays need to be extended with the inline frame information.
|
||||
const frameIDs: string[] = [];
|
||||
const fileIDs: string[] = [];
|
||||
const addressOrLines: number[] = [];
|
||||
const typeIDs: number[] = [];
|
||||
|
||||
for (let i = 0; i < trace.frame_ids.length; i++) {
|
||||
const frameID = trace.frame_ids[i];
|
||||
frameIDs.push(frameID);
|
||||
fileIDs.push(trace.file_ids[i]);
|
||||
addressOrLines.push(trace.address_or_lines[i]);
|
||||
typeIDs.push(trace.type_ids[i]);
|
||||
|
||||
for (let j = 1; ; j++) {
|
||||
const inlineID = makeFrameID(frameID, j);
|
||||
const frame = frames.get(inlineID);
|
||||
if (!frame) {
|
||||
break;
|
||||
}
|
||||
frameIDs.push(inlineID);
|
||||
fileIDs.push(trace.file_ids[i]);
|
||||
addressOrLines.push(trace.address_or_lines[i]);
|
||||
typeIDs.push(trace.type_ids[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
FrameIDs: frameIDs,
|
||||
FileIDs: fileIDs,
|
||||
AddressOrLines: addressOrLines,
|
||||
Types: typeIDs,
|
||||
} as StackTrace;
|
||||
};
|
||||
|
||||
export function decodeStackTraceResponse(response: StackTraceResponse): DecodedStackTraceResponse {
|
||||
const stackTraceEvents: Map<StackTraceID, number> = new Map();
|
||||
for (const [key, value] of Object.entries(response.stack_trace_events ?? {})) {
|
||||
stackTraceEvents.set(key, value);
|
||||
}
|
||||
|
||||
const stackFrames: Map<StackFrameID, StackFrame> = new Map();
|
||||
for (const [frameID, frame] of Object.entries(response.stack_frames ?? {})) {
|
||||
// Each field in a stackframe is represented by an array. This is
|
||||
// necessary to support inline frames.
|
||||
//
|
||||
// We store the inlined frames with a modified (and unique) ID.
|
||||
// We can do so since we don't display the frame IDs.
|
||||
for (let i = 0; i < frame.function_name.length; i++) {
|
||||
stackFrames.set(makeFrameID(frameID, i), {
|
||||
FileName: frame.file_name[i],
|
||||
FunctionName: frame.function_name[i],
|
||||
FunctionOffset: frame.function_offset[i],
|
||||
LineNumber: frame.line_number[i],
|
||||
Inline: i > 0,
|
||||
} as StackFrame);
|
||||
}
|
||||
}
|
||||
|
||||
const stackTraces: Map<StackTraceID, StackTrace> = new Map();
|
||||
for (const [traceID, trace] of Object.entries(response.stack_traces ?? {})) {
|
||||
stackTraces.set(traceID, createInlineTrace(trace, stackFrames));
|
||||
}
|
||||
|
||||
const executables: Map<FileID, Executable> = new Map();
|
||||
for (const [key, value] of Object.entries(response.executables ?? {})) {
|
||||
executables.set(key, {
|
||||
FileName: value,
|
||||
} as Executable);
|
||||
}
|
||||
|
||||
return {
|
||||
events: stackTraceEvents,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
totalFrames: response.total_frames,
|
||||
samplingRate: response.sampling_rate,
|
||||
};
|
||||
}
|
||||
|
||||
export enum StackTracesDisplayOption {
|
||||
StackTraces = 'stackTraces',
|
||||
Percentage = 'percentage',
|
||||
|
|
115
x-pack/plugins/profiling/scripts/shrink_stacktrace_response.js
Normal file
115
x-pack/plugins/profiling/scripts/shrink_stacktrace_response.js
Normal file
|
@ -0,0 +1,115 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
const commander = require('commander');
|
||||
const fs = require('fs');
|
||||
|
||||
// Reduce response by keeping only one stacktrace event for a given
|
||||
// depth and adding each count for the remaining stacktrace events to
|
||||
// the count for that stacktrace event.
|
||||
//
|
||||
// This has the effect of reducing the overall response without losing
|
||||
// sampling fidelity. This should only be used for testing purposes.
|
||||
//
|
||||
// For example, given the following stacktrace events, where the key
|
||||
// represents the name of the stacktrace event and the value is the
|
||||
// list of frames:
|
||||
// 1. A => [frame1, frame2, frame3]
|
||||
// 2. B => [frame1, frame2, frame3]
|
||||
// 3. C => [frame1, frame2]
|
||||
// 4. D => [frame1, frame2, frame3]
|
||||
//
|
||||
// In the above example, this function will return two events:
|
||||
// 1. A with a count of 3
|
||||
// 2. C with a count of 2
|
||||
function mergeStackTracesByDepth(response) {
|
||||
const eventsByFrameDepth = {};
|
||||
|
||||
Object.keys(response.stack_traces).forEach((event) => {
|
||||
const numFrames = response.stack_traces[event].frame_ids.length;
|
||||
const numEvents = response.stack_trace_events[event];
|
||||
if (eventsByFrameDepth[numFrames]) {
|
||||
const value = eventsByFrameDepth[numFrames];
|
||||
eventsByFrameDepth[numFrames] = {
|
||||
event: value.event,
|
||||
count: value.count + numEvents,
|
||||
};
|
||||
} else {
|
||||
eventsByFrameDepth[numFrames] = {
|
||||
event: event,
|
||||
count: numEvents,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
let totalFrames = 0;
|
||||
const stackTraceEvents = {};
|
||||
const stackTraces = {};
|
||||
|
||||
Object.keys(eventsByFrameDepth).forEach((depth) => {
|
||||
const { event, count } = eventsByFrameDepth[depth];
|
||||
stackTraces[event] = response.stack_traces[event];
|
||||
stackTraceEvents[event] = count;
|
||||
totalFrames += stackTraces[event].frame_ids.length * count;
|
||||
});
|
||||
|
||||
return {
|
||||
stack_trace_events: stackTraceEvents,
|
||||
stack_traces: stackTraces,
|
||||
stack_frames: response.stack_frames,
|
||||
executables: response.executables,
|
||||
total_frames: totalFrames,
|
||||
sampling_rate: response.sampling_rate,
|
||||
};
|
||||
}
|
||||
|
||||
// Remove any stackframes and executables not referenced by the
|
||||
// stacktraces.
|
||||
function purgeUnusedFramesAndExecutables(response) {
|
||||
const uniqueFileIDs = new Set();
|
||||
const uniqueFrameIDs = new Set();
|
||||
|
||||
Object.keys(response.stack_traces).forEach((event) => {
|
||||
response.stack_traces[event].file_ids.forEach((fileID) => {
|
||||
uniqueFileIDs.add(fileID);
|
||||
});
|
||||
response.stack_traces[event].frame_ids.forEach((frameID) => {
|
||||
uniqueFrameIDs.add(frameID);
|
||||
});
|
||||
});
|
||||
|
||||
const stackFrames = {};
|
||||
[...uniqueFrameIDs].forEach((frameID) => {
|
||||
stackFrames[frameID] = response.stack_frames[frameID];
|
||||
});
|
||||
|
||||
const executables = {};
|
||||
[...uniqueFileIDs].forEach((fileID) => {
|
||||
executables[fileID] = response.executables[fileID];
|
||||
});
|
||||
|
||||
return {
|
||||
stack_trace_events: response.stack_trace_events,
|
||||
stack_traces: response.stack_traces,
|
||||
stack_frames: stackFrames,
|
||||
executables: executables,
|
||||
total_frames: response.total_frames,
|
||||
sampling_rate: response.sampling_rate,
|
||||
};
|
||||
}
|
||||
|
||||
commander.version('0.0.1', '-v, --version').usage('[OPTIONS]...').parse(process.argv);
|
||||
|
||||
try {
|
||||
const data = fs.readFileSync(process.argv[2], 'utf8');
|
||||
const response = JSON.parse(data);
|
||||
const mergedResponse = mergeStackTracesByDepth(response);
|
||||
const purgedResponse = purgeUnusedFramesAndExecutables(mergedResponse);
|
||||
console.log(JSON.stringify(purgedResponse));
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
|
@ -49,22 +49,16 @@ export function registerFlameChartSearchRoute({
|
|||
});
|
||||
const totalSeconds = timeTo - timeFrom;
|
||||
|
||||
const {
|
||||
stackTraceEvents,
|
||||
stackTraces,
|
||||
executables,
|
||||
stackFrames,
|
||||
totalFrames,
|
||||
samplingRate,
|
||||
} = await searchStackTraces({
|
||||
client: profilingElasticsearchClient,
|
||||
filter,
|
||||
sampleSize: targetSampleSize,
|
||||
});
|
||||
const { events, stackTraces, executables, stackFrames, totalFrames, samplingRate } =
|
||||
await searchStackTraces({
|
||||
client: profilingElasticsearchClient,
|
||||
filter,
|
||||
sampleSize: targetSampleSize,
|
||||
});
|
||||
|
||||
const flamegraph = await withProfilingSpan('create_flamegraph', async () => {
|
||||
const tree = createCalleeTree(
|
||||
stackTraceEvents,
|
||||
events,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
|
|
|
@ -51,7 +51,7 @@ export function registerTopNFunctionsSearchRoute({
|
|||
kuery,
|
||||
});
|
||||
|
||||
const { stackTraceEvents, stackTraces, executables, stackFrames, samplingRate } =
|
||||
const { events, stackTraces, executables, stackFrames, samplingRate } =
|
||||
await searchStackTraces({
|
||||
client: profilingElasticsearchClient,
|
||||
filter,
|
||||
|
@ -61,7 +61,7 @@ export function registerTopNFunctionsSearchRoute({
|
|||
const topNFunctions = await withProfilingSpan('create_topn_functions', async () => {
|
||||
return createTopNFunctions({
|
||||
endIndex,
|
||||
events: stackTraceEvents,
|
||||
events,
|
||||
executables,
|
||||
samplingRate,
|
||||
stackFrames,
|
||||
|
|
|
@ -5,107 +5,10 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
Executable,
|
||||
FileID,
|
||||
StackFrame,
|
||||
StackFrameID,
|
||||
StackTrace,
|
||||
StackTraceID,
|
||||
} from '../../common/profiling';
|
||||
import { StackTraceResponse, ProfilingStackTrace } from '../../common/stack_traces';
|
||||
import { decodeStackTraceResponse } from '../../common/stack_traces';
|
||||
import { ProfilingESClient } from '../utils/create_profiling_es_client';
|
||||
import { ProjectTimeQuery } from './query';
|
||||
|
||||
export const makeFrameID = (frameID: string, n: number): string => {
|
||||
return n === 0 ? frameID : frameID + ';' + n.toString();
|
||||
};
|
||||
|
||||
// createInlineTrace builds a new StackTrace with inline frames.
|
||||
const createInlineTrace = (
|
||||
trace: ProfilingStackTrace,
|
||||
frames: Map<StackFrameID, StackFrame>
|
||||
): StackTrace => {
|
||||
// The arrays need to be extended with the inline frame information.
|
||||
const frameIDs: string[] = [];
|
||||
const fileIDs: string[] = [];
|
||||
const addressOrLines: number[] = [];
|
||||
const typeIDs: number[] = [];
|
||||
|
||||
for (let i = 0; i < trace.frame_ids.length; i++) {
|
||||
const frameID = trace.frame_ids[i];
|
||||
frameIDs.push(frameID);
|
||||
fileIDs.push(trace.file_ids[i]);
|
||||
addressOrLines.push(trace.address_or_lines[i]);
|
||||
typeIDs.push(trace.type_ids[i]);
|
||||
|
||||
for (let j = 1; ; j++) {
|
||||
const inlineID = makeFrameID(frameID, j);
|
||||
const frame = frames.get(inlineID);
|
||||
if (!frame) {
|
||||
break;
|
||||
}
|
||||
frameIDs.push(inlineID);
|
||||
fileIDs.push(trace.file_ids[i]);
|
||||
addressOrLines.push(trace.address_or_lines[i]);
|
||||
typeIDs.push(trace.type_ids[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
FrameIDs: frameIDs,
|
||||
FileIDs: fileIDs,
|
||||
AddressOrLines: addressOrLines,
|
||||
Types: typeIDs,
|
||||
} as StackTrace;
|
||||
};
|
||||
|
||||
export function decodeStackTraceResponse(response: StackTraceResponse) {
|
||||
const stackTraceEvents: Map<StackTraceID, number> = new Map();
|
||||
for (const [key, value] of Object.entries(response.stack_trace_events ?? {})) {
|
||||
stackTraceEvents.set(key, value);
|
||||
}
|
||||
|
||||
const stackFrames: Map<StackFrameID, StackFrame> = new Map();
|
||||
for (const [frameID, frame] of Object.entries(response.stack_frames ?? {})) {
|
||||
// Each field in a stackframe is represented by an array. This is
|
||||
// necessary to support inline frames.
|
||||
//
|
||||
// We store the inlined frames with a modified (and unique) ID.
|
||||
// We can do so since we don't display the frame IDs.
|
||||
for (let i = 0; i < frame.function_name.length; i++) {
|
||||
stackFrames.set(makeFrameID(frameID, i), {
|
||||
FileName: frame.file_name[i],
|
||||
FunctionName: frame.function_name[i],
|
||||
FunctionOffset: frame.function_offset[i],
|
||||
LineNumber: frame.line_number[i],
|
||||
Inline: i > 0,
|
||||
} as StackFrame);
|
||||
}
|
||||
}
|
||||
|
||||
const stackTraces: Map<StackTraceID, StackTrace> = new Map();
|
||||
for (const [traceID, trace] of Object.entries(response.stack_traces ?? {})) {
|
||||
stackTraces.set(traceID, createInlineTrace(trace, stackFrames));
|
||||
}
|
||||
|
||||
const executables: Map<FileID, Executable> = new Map();
|
||||
for (const [key, value] of Object.entries(response.executables ?? {})) {
|
||||
executables.set(key, {
|
||||
FileName: value,
|
||||
} as Executable);
|
||||
}
|
||||
|
||||
return {
|
||||
stackTraceEvents,
|
||||
stackTraces,
|
||||
stackFrames,
|
||||
executables,
|
||||
totalFrames: response.total_frames,
|
||||
samplingRate: response.sampling_rate,
|
||||
};
|
||||
}
|
||||
|
||||
export async function searchStackTraces({
|
||||
client,
|
||||
filter,
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
{
|
||||
"extends": "../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "target/types",
|
||||
"outDir": "target/types"
|
||||
},
|
||||
"include": [
|
||||
// add all the folders containing files to be compiled
|
||||
"index.ts",
|
||||
"common/**/*.ts",
|
||||
"common/**/*.json",
|
||||
"public/**/*.ts",
|
||||
"public/**/*.tsx",
|
||||
"server/**/*.ts",
|
||||
"server/**/*.json"
|
||||
"server/**/*.ts"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/core",
|
||||
|
@ -47,7 +47,7 @@
|
|||
"@kbn/licensing-plugin",
|
||||
"@kbn/utility-types",
|
||||
"@kbn/usage-collection-plugin",
|
||||
"@kbn/observability-ai-assistant-plugin",
|
||||
"@kbn/observability-ai-assistant-plugin"
|
||||
// add references to other TypeScript projects the plugin depends on
|
||||
|
||||
// requiredPlugins from ./kibana.json
|
||||
|
@ -69,6 +69,6 @@
|
|||
// "@kbn/kibana-utils-plugin",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
"target/**/*"
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue