mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[APM] Move chartsSelector transformation to backend (#26117)
* [APM] Move ML anomaly transformation to backend Move apmTimeseries to backend Fix tests * Update default values * Fix bug
This commit is contained in:
parent
a92c649bd2
commit
a0675468be
41 changed files with 3502 additions and 2039 deletions
|
@ -9,7 +9,8 @@ import {
|
|||
YAxis,
|
||||
HorizontalGridLines,
|
||||
LineSeries,
|
||||
AreaSeries
|
||||
AreaSeries,
|
||||
VerticalRectSeries
|
||||
} from 'react-vis';
|
||||
import PropTypes from 'prop-types';
|
||||
import React, { PureComponent } from 'react';
|
||||
|
@ -56,13 +57,14 @@ class StaticPlot extends PureComponent {
|
|||
case 'areaMaxHeight':
|
||||
const yMax = last(plotValues.yTickValues);
|
||||
const data = serie.data.map(p => ({
|
||||
x0: p.x0,
|
||||
x: p.x,
|
||||
y0: 0,
|
||||
y: p.y ? yMax : null
|
||||
y: yMax
|
||||
}));
|
||||
|
||||
return (
|
||||
<AreaSeries
|
||||
<VerticalRectSeries
|
||||
getNull={d => d.y !== null}
|
||||
key={serie.title}
|
||||
xType="time"
|
||||
|
|
|
@ -18,6 +18,10 @@ import {
|
|||
getEmptySerie
|
||||
} from '../../../../../store/selectors/chartSelectors';
|
||||
|
||||
function getXValueByIndex(index) {
|
||||
return responseWithData.responseTimes.avg[index].x;
|
||||
}
|
||||
|
||||
describe('when response has data', () => {
|
||||
let wrapper;
|
||||
let onHover;
|
||||
|
@ -26,7 +30,6 @@ describe('when response has data', () => {
|
|||
|
||||
beforeEach(() => {
|
||||
const series = getResponseTimeSeries(responseWithData);
|
||||
|
||||
onHover = jest.fn();
|
||||
onMouseLeave = jest.fn();
|
||||
onSelectionEnd = jest.fn();
|
||||
|
@ -166,7 +169,7 @@ describe('when response has data', () => {
|
|||
});
|
||||
|
||||
it('should call onHover', () => {
|
||||
expect(onHover).toHaveBeenCalledWith(responseWithData.dates[index]);
|
||||
expect(onHover).toHaveBeenCalledWith(getXValueByIndex(index));
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -178,9 +181,9 @@ describe('when response has data', () => {
|
|||
});
|
||||
|
||||
// Simulate hovering over multiple buckets
|
||||
wrapper.setProps({ hoverX: responseWithData.dates[13] });
|
||||
wrapper.setProps({ hoverX: responseWithData.dates[14] });
|
||||
wrapper.setProps({ hoverX: responseWithData.dates[15] });
|
||||
wrapper.setProps({ hoverX: getXValueByIndex(13) });
|
||||
wrapper.setProps({ hoverX: getXValueByIndex(14) });
|
||||
wrapper.setProps({ hoverX: getXValueByIndex(15) });
|
||||
});
|
||||
|
||||
it('should display tooltip', () => {
|
||||
|
|
|
@ -1,285 +1,248 @@
|
|||
{
|
||||
"dates": [
|
||||
1502282820000,
|
||||
1502282880000,
|
||||
1502282940000,
|
||||
1502283000000,
|
||||
1502283060000,
|
||||
1502283120000,
|
||||
1502283180000,
|
||||
1502283240000,
|
||||
1502283300000,
|
||||
1502283360000,
|
||||
1502283420000,
|
||||
1502283480000,
|
||||
1502283540000,
|
||||
1502283600000,
|
||||
1502283660000,
|
||||
1502283720000,
|
||||
1502283780000,
|
||||
1502283840000,
|
||||
1502283900000,
|
||||
1502283960000,
|
||||
1502284020000,
|
||||
1502284080000,
|
||||
1502284140000,
|
||||
1502284200000,
|
||||
1502284260000,
|
||||
1502284320000,
|
||||
1502284380000,
|
||||
1502284440000,
|
||||
1502284500000,
|
||||
1502284560000,
|
||||
1502284620000
|
||||
],
|
||||
"responseTimes": {
|
||||
"avg": [
|
||||
0,
|
||||
480074.48979591834,
|
||||
410277.4358974359,
|
||||
437216.1836734694,
|
||||
478028.36,
|
||||
462688.0816326531,
|
||||
506655.98076923075,
|
||||
585381.5106382979,
|
||||
465090.7073170732,
|
||||
405082.2448979592,
|
||||
480783.9090909091,
|
||||
372316.3953488372,
|
||||
504987.31111111114,
|
||||
395861.23255813954,
|
||||
462582.2291666667,
|
||||
438704.4,
|
||||
441463.5,
|
||||
570707.1774193548,
|
||||
425895.17391304346,
|
||||
438396.2075471698,
|
||||
388522.5333333333,
|
||||
482076.82608695654,
|
||||
471235.04545454547,
|
||||
390323.72,
|
||||
397531.92156862747,
|
||||
447088.89090909093,
|
||||
418634.46774193546,
|
||||
0,
|
||||
440104.2075471698,
|
||||
753710.6212121212,
|
||||
0
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 480074.48979591834 },
|
||||
{ "x": 1502282940000, "y": 410277.4358974359 },
|
||||
{ "x": 1502283000000, "y": 437216.1836734694 },
|
||||
{ "x": 1502283060000, "y": 478028.36 },
|
||||
{ "x": 1502283120000, "y": 462688.0816326531 },
|
||||
{ "x": 1502283180000, "y": 506655.98076923075 },
|
||||
{ "x": 1502283240000, "y": 585381.5106382979 },
|
||||
{ "x": 1502283300000, "y": 465090.7073170732 },
|
||||
{ "x": 1502283360000, "y": 405082.2448979592 },
|
||||
{ "x": 1502283420000, "y": 480783.9090909091 },
|
||||
{ "x": 1502283480000, "y": 372316.3953488372 },
|
||||
{ "x": 1502283540000, "y": 504987.31111111114 },
|
||||
{ "x": 1502283600000, "y": 395861.23255813954 },
|
||||
{ "x": 1502283660000, "y": 462582.2291666667 },
|
||||
{ "x": 1502283720000, "y": 438704.4 },
|
||||
{ "x": 1502283780000, "y": 441463.5 },
|
||||
{ "x": 1502283840000, "y": 570707.1774193548 },
|
||||
{ "x": 1502283900000, "y": 425895.17391304346 },
|
||||
{ "x": 1502283960000, "y": 438396.2075471698 },
|
||||
{ "x": 1502284020000, "y": 388522.5333333333 },
|
||||
{ "x": 1502284080000, "y": 482076.82608695654 },
|
||||
{ "x": 1502284140000, "y": 471235.04545454547 },
|
||||
{ "x": 1502284200000, "y": 390323.72 },
|
||||
{ "x": 1502284260000, "y": 397531.92156862747 },
|
||||
{ "x": 1502284320000, "y": 447088.89090909093 },
|
||||
{ "x": 1502284380000, "y": 418634.46774193546 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 440104.2075471698 },
|
||||
{ "x": 1502284560000, "y": 753710.6212121212 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
],
|
||||
"p95": [
|
||||
0,
|
||||
1215886,
|
||||
1244355.3000000003,
|
||||
1116243.7999999993,
|
||||
1089262.15,
|
||||
1181235.599999999,
|
||||
1066767.5499999998,
|
||||
1568896.2999999996,
|
||||
1012741,
|
||||
1069125.1999999988,
|
||||
1073778.85,
|
||||
1118314.4999999998,
|
||||
1101809.5999999999,
|
||||
1076662.7999999998,
|
||||
990067.35,
|
||||
1557383.999999999,
|
||||
1040584.3500000001,
|
||||
1733451.8499999994,
|
||||
1212304.75,
|
||||
1017966.8,
|
||||
1020771.9999999999,
|
||||
1449191.25,
|
||||
1056132.15,
|
||||
1041506.6499999998,
|
||||
998095.5,
|
||||
1327904,
|
||||
1076961.05,
|
||||
0,
|
||||
1120802.5999999999,
|
||||
2322534,
|
||||
0
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 1215886 },
|
||||
{ "x": 1502282940000, "y": 1244355.3000000003 },
|
||||
{ "x": 1502283000000, "y": 1116243.7999999993 },
|
||||
{ "x": 1502283060000, "y": 1089262.15 },
|
||||
{ "x": 1502283120000, "y": 1181235.599999999 },
|
||||
{ "x": 1502283180000, "y": 1066767.5499999998 },
|
||||
{ "x": 1502283240000, "y": 1568896.2999999996 },
|
||||
{ "x": 1502283300000, "y": 1012741 },
|
||||
{ "x": 1502283360000, "y": 1069125.1999999988 },
|
||||
{ "x": 1502283420000, "y": 1073778.85 },
|
||||
{ "x": 1502283480000, "y": 1118314.4999999998 },
|
||||
{ "x": 1502283540000, "y": 1101809.5999999999 },
|
||||
{ "x": 1502283600000, "y": 1076662.7999999998 },
|
||||
{ "x": 1502283660000, "y": 990067.35 },
|
||||
{ "x": 1502283720000, "y": 1557383.999999999 },
|
||||
{ "x": 1502283780000, "y": 1040584.3500000001 },
|
||||
{ "x": 1502283840000, "y": 1733451.8499999994 },
|
||||
{ "x": 1502283900000, "y": 1212304.75 },
|
||||
{ "x": 1502283960000, "y": 1017966.8 },
|
||||
{ "x": 1502284020000, "y": 1020771.9999999999 },
|
||||
{ "x": 1502284080000, "y": 1449191.25 },
|
||||
{ "x": 1502284140000, "y": 1056132.15 },
|
||||
{ "x": 1502284200000, "y": 1041506.6499999998 },
|
||||
{ "x": 1502284260000, "y": 998095.5 },
|
||||
{ "x": 1502284320000, "y": 1327904 },
|
||||
{ "x": 1502284380000, "y": 1076961.05 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 1120802.5999999999 },
|
||||
{ "x": 1502284560000, "y": 2322534 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
],
|
||||
"p99": [
|
||||
0,
|
||||
1494506.1599999988,
|
||||
1549055.6999999993,
|
||||
1539504.0399999986,
|
||||
1392126.2799999996,
|
||||
1601739.799999998,
|
||||
1716968.6400000001,
|
||||
1822798.7799999998,
|
||||
2068320.600000001,
|
||||
2097748.6799999983,
|
||||
1386087.6600000001,
|
||||
1509311.1599999992,
|
||||
1165877.2800000003,
|
||||
1183434.8,
|
||||
1425065.5000000007,
|
||||
1820377.1200000006,
|
||||
1996905.9000000004,
|
||||
2199604.54,
|
||||
1443694.2499999998,
|
||||
1261225.6,
|
||||
1588579.5600000003,
|
||||
2073728.899999998,
|
||||
1330845.0100000002,
|
||||
1160146.2399999998,
|
||||
1623945.5,
|
||||
1390707.1400000001,
|
||||
2067623.4500000002,
|
||||
0,
|
||||
2547299.079999993,
|
||||
4586742.89999998,
|
||||
0
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 1494506.1599999988 },
|
||||
{ "x": 1502282940000, "y": 1549055.6999999993 },
|
||||
{ "x": 1502283000000, "y": 1539504.0399999986 },
|
||||
{ "x": 1502283060000, "y": 1392126.2799999996 },
|
||||
{ "x": 1502283120000, "y": 1601739.799999998 },
|
||||
{ "x": 1502283180000, "y": 1716968.6400000001 },
|
||||
{ "x": 1502283240000, "y": 1822798.7799999998 },
|
||||
{ "x": 1502283300000, "y": 2068320.600000001 },
|
||||
{ "x": 1502283360000, "y": 2097748.6799999983 },
|
||||
{ "x": 1502283420000, "y": 1386087.6600000001 },
|
||||
{ "x": 1502283480000, "y": 1509311.1599999992 },
|
||||
{ "x": 1502283540000, "y": 1165877.2800000003 },
|
||||
{ "x": 1502283600000, "y": 1183434.8 },
|
||||
{ "x": 1502283660000, "y": 1425065.5000000007 },
|
||||
{ "x": 1502283720000, "y": 1820377.1200000006 },
|
||||
{ "x": 1502283780000, "y": 1996905.9000000004 },
|
||||
{ "x": 1502283840000, "y": 2199604.54 },
|
||||
{ "x": 1502283900000, "y": 1443694.2499999998 },
|
||||
{ "x": 1502283960000, "y": 1261225.6 },
|
||||
{ "x": 1502284020000, "y": 1588579.5600000003 },
|
||||
{ "x": 1502284080000, "y": 2073728.899999998 },
|
||||
{ "x": 1502284140000, "y": 1330845.0100000002 },
|
||||
{ "x": 1502284200000, "y": 1160146.2399999998 },
|
||||
{ "x": 1502284260000, "y": 1623945.5 },
|
||||
{ "x": 1502284320000, "y": 1390707.1400000001 },
|
||||
{ "x": 1502284380000, "y": 2067623.4500000002 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 2547299.079999993 },
|
||||
{ "x": 1502284560000, "y": 4586742.89999998 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
]
|
||||
},
|
||||
"tpmBuckets": [
|
||||
{
|
||||
"key": "2xx",
|
||||
"avg": 41.61538461538461,
|
||||
"values": [
|
||||
0,
|
||||
0,
|
||||
33,
|
||||
42,
|
||||
44,
|
||||
42,
|
||||
47,
|
||||
42,
|
||||
35,
|
||||
44,
|
||||
39,
|
||||
34,
|
||||
38,
|
||||
37,
|
||||
41,
|
||||
37,
|
||||
37,
|
||||
52,
|
||||
38,
|
||||
43,
|
||||
38,
|
||||
41,
|
||||
40,
|
||||
42,
|
||||
40,
|
||||
49,
|
||||
51,
|
||||
0,
|
||||
0,
|
||||
56,
|
||||
0
|
||||
"dataPoints": [
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 0 },
|
||||
{ "x": 1502282940000, "y": 33 },
|
||||
{ "x": 1502283000000, "y": 42 },
|
||||
{ "x": 1502283060000, "y": 44 },
|
||||
{ "x": 1502283120000, "y": 42 },
|
||||
{ "x": 1502283180000, "y": 47 },
|
||||
{ "x": 1502283240000, "y": 42 },
|
||||
{ "x": 1502283300000, "y": 35 },
|
||||
{ "x": 1502283360000, "y": 44 },
|
||||
{ "x": 1502283420000, "y": 39 },
|
||||
{ "x": 1502283480000, "y": 34 },
|
||||
{ "x": 1502283540000, "y": 38 },
|
||||
{ "x": 1502283600000, "y": 37 },
|
||||
{ "x": 1502283660000, "y": 41 },
|
||||
{ "x": 1502283720000, "y": 37 },
|
||||
{ "x": 1502283780000, "y": 37 },
|
||||
{ "x": 1502283840000, "y": 52 },
|
||||
{ "x": 1502283900000, "y": 38 },
|
||||
{ "x": 1502283960000, "y": 43 },
|
||||
{ "x": 1502284020000, "y": 38 },
|
||||
{ "x": 1502284080000, "y": 41 },
|
||||
{ "x": 1502284140000, "y": 40 },
|
||||
{ "x": 1502284200000, "y": 42 },
|
||||
{ "x": 1502284260000, "y": 40 },
|
||||
{ "x": 1502284320000, "y": 49 },
|
||||
{ "x": 1502284380000, "y": 51 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 0 },
|
||||
{ "x": 1502284560000, "y": 56 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "3xx",
|
||||
"avg": 0,
|
||||
"values": [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
"dataPoints": [
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 0 },
|
||||
{ "x": 1502282940000, "y": 0 },
|
||||
{ "x": 1502283000000, "y": 0 },
|
||||
{ "x": 1502283060000, "y": 0 },
|
||||
{ "x": 1502283120000, "y": 0 },
|
||||
{ "x": 1502283180000, "y": 0 },
|
||||
{ "x": 1502283240000, "y": 0 },
|
||||
{ "x": 1502283300000, "y": 0 },
|
||||
{ "x": 1502283360000, "y": 0 },
|
||||
{ "x": 1502283420000, "y": 0 },
|
||||
{ "x": 1502283480000, "y": 0 },
|
||||
{ "x": 1502283540000, "y": 0 },
|
||||
{ "x": 1502283600000, "y": 0 },
|
||||
{ "x": 1502283660000, "y": 0 },
|
||||
{ "x": 1502283720000, "y": 0 },
|
||||
{ "x": 1502283780000, "y": 0 },
|
||||
{ "x": 1502283840000, "y": 0 },
|
||||
{ "x": 1502283900000, "y": 0 },
|
||||
{ "x": 1502283960000, "y": 0 },
|
||||
{ "x": 1502284020000, "y": 0 },
|
||||
{ "x": 1502284080000, "y": 0 },
|
||||
{ "x": 1502284140000, "y": 0 },
|
||||
{ "x": 1502284200000, "y": 0 },
|
||||
{ "x": 1502284260000, "y": 0 },
|
||||
{ "x": 1502284320000, "y": 0 },
|
||||
{ "x": 1502284380000, "y": 0 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 0 },
|
||||
{ "x": 1502284560000, "y": 0 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "4xx",
|
||||
"avg": 1.4615384615384615,
|
||||
"values": [
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
3,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
3,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
2,
|
||||
2,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
2,
|
||||
2,
|
||||
2,
|
||||
3,
|
||||
0,
|
||||
0,
|
||||
2,
|
||||
0
|
||||
"dataPoints": [
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 0 },
|
||||
{ "x": 1502282940000, "y": 1 },
|
||||
{ "x": 1502283000000, "y": 1 },
|
||||
{ "x": 1502283060000, "y": 1 },
|
||||
{ "x": 1502283120000, "y": 3 },
|
||||
{ "x": 1502283180000, "y": 1 },
|
||||
{ "x": 1502283240000, "y": 1 },
|
||||
{ "x": 1502283300000, "y": 1 },
|
||||
{ "x": 1502283360000, "y": 1 },
|
||||
{ "x": 1502283420000, "y": 1 },
|
||||
{ "x": 1502283480000, "y": 3 },
|
||||
{ "x": 1502283540000, "y": 1 },
|
||||
{ "x": 1502283600000, "y": 1 },
|
||||
{ "x": 1502283660000, "y": 1 },
|
||||
{ "x": 1502283720000, "y": 1 },
|
||||
{ "x": 1502283780000, "y": 1 },
|
||||
{ "x": 1502283840000, "y": 2 },
|
||||
{ "x": 1502283900000, "y": 2 },
|
||||
{ "x": 1502283960000, "y": 1 },
|
||||
{ "x": 1502284020000, "y": 1 },
|
||||
{ "x": 1502284080000, "y": 1 },
|
||||
{ "x": 1502284140000, "y": 1 },
|
||||
{ "x": 1502284200000, "y": 2 },
|
||||
{ "x": 1502284260000, "y": 2 },
|
||||
{ "x": 1502284320000, "y": 2 },
|
||||
{ "x": 1502284380000, "y": 3 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 0 },
|
||||
{ "x": 1502284560000, "y": 2 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "5xx",
|
||||
"avg": 5.6923076923076925,
|
||||
"values": [
|
||||
0,
|
||||
0,
|
||||
5,
|
||||
6,
|
||||
5,
|
||||
4,
|
||||
4,
|
||||
4,
|
||||
5,
|
||||
4,
|
||||
4,
|
||||
6,
|
||||
6,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
6,
|
||||
8,
|
||||
6,
|
||||
9,
|
||||
6,
|
||||
4,
|
||||
3,
|
||||
6,
|
||||
9,
|
||||
4,
|
||||
8,
|
||||
0,
|
||||
0,
|
||||
8,
|
||||
0
|
||||
"dataPoints": [
|
||||
{ "x": 1502282820000, "y": 0 },
|
||||
{ "x": 1502282880000, "y": 0 },
|
||||
{ "x": 1502282940000, "y": 5 },
|
||||
{ "x": 1502283000000, "y": 6 },
|
||||
{ "x": 1502283060000, "y": 5 },
|
||||
{ "x": 1502283120000, "y": 4 },
|
||||
{ "x": 1502283180000, "y": 4 },
|
||||
{ "x": 1502283240000, "y": 4 },
|
||||
{ "x": 1502283300000, "y": 5 },
|
||||
{ "x": 1502283360000, "y": 4 },
|
||||
{ "x": 1502283420000, "y": 4 },
|
||||
{ "x": 1502283480000, "y": 6 },
|
||||
{ "x": 1502283540000, "y": 6 },
|
||||
{ "x": 1502283600000, "y": 5 },
|
||||
{ "x": 1502283660000, "y": 6 },
|
||||
{ "x": 1502283720000, "y": 7 },
|
||||
{ "x": 1502283780000, "y": 6 },
|
||||
{ "x": 1502283840000, "y": 8 },
|
||||
{ "x": 1502283900000, "y": 6 },
|
||||
{ "x": 1502283960000, "y": 9 },
|
||||
{ "x": 1502284020000, "y": 6 },
|
||||
{ "x": 1502284080000, "y": 4 },
|
||||
{ "x": 1502284140000, "y": 3 },
|
||||
{ "x": 1502284200000, "y": 6 },
|
||||
{ "x": 1502284260000, "y": 9 },
|
||||
{ "x": 1502284320000, "y": 4 },
|
||||
{ "x": 1502284380000, "y": 8 },
|
||||
{ "x": 1502284440000, "y": 0 },
|
||||
{ "x": 1502284500000, "y": 0 },
|
||||
{ "x": 1502284560000, "y": 8 },
|
||||
{ "x": 1502284620000, "y": 0 }
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
|
@ -10,7 +10,7 @@ import { ServiceAPIResponse } from 'x-pack/plugins/apm/server/lib/services/get_s
|
|||
import { ServiceListAPIResponse } from 'x-pack/plugins/apm/server/lib/services/get_services';
|
||||
import { TraceListAPIResponse } from 'x-pack/plugins/apm/server/lib/traces/get_top_traces';
|
||||
import { TraceAPIResponse } from 'x-pack/plugins/apm/server/lib/traces/get_trace';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts';
|
||||
import { ITransactionDistributionAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/distribution';
|
||||
import { TransactionListAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/get_top_transactions';
|
||||
import { TransactionAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/get_transaction';
|
||||
|
|
|
@ -7,23 +7,25 @@
|
|||
import React from 'react';
|
||||
import { Request, RRRRender } from 'react-redux-request';
|
||||
import { createSelector } from 'reselect';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts';
|
||||
import { loadCharts } from '../../services/rest/apm';
|
||||
import { IReduxState } from '../rootReducer';
|
||||
import { getCharts } from '../selectors/chartSelectors';
|
||||
import { getUrlParams, IUrlParams } from '../urlParams';
|
||||
|
||||
const ID = 'transactionDetailsCharts';
|
||||
const INITIAL_DATA = {
|
||||
totalHits: 0,
|
||||
dates: [],
|
||||
responseTimes: {
|
||||
avg: [],
|
||||
p95: [],
|
||||
p99: []
|
||||
const INITIAL_DATA: TimeSeriesAPIResponse = {
|
||||
apmTimeseries: {
|
||||
totalHits: 0,
|
||||
responseTimes: {
|
||||
avg: [],
|
||||
p95: [],
|
||||
p99: []
|
||||
},
|
||||
tpmBuckets: [],
|
||||
overallAvgDuration: undefined
|
||||
},
|
||||
tpmBuckets: [],
|
||||
overallAvgDuration: undefined
|
||||
anomalyTimeseries: undefined
|
||||
};
|
||||
|
||||
export const getTransactionDetailsCharts = createSelector(
|
||||
|
|
|
@ -4,27 +4,29 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { get, isEmpty } from 'lodash';
|
||||
import { get } from 'lodash';
|
||||
import React from 'react';
|
||||
import { Request, RRRRender } from 'react-redux-request';
|
||||
import { createSelector } from 'reselect';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts';
|
||||
import { loadCharts } from '../../services/rest/apm';
|
||||
import { IReduxState } from '../rootReducer';
|
||||
import { getCharts } from '../selectors/chartSelectors';
|
||||
import { getUrlParams, IUrlParams } from '../urlParams';
|
||||
|
||||
const ID = 'transactionOverviewCharts';
|
||||
const INITIAL_DATA = {
|
||||
totalHits: 0,
|
||||
dates: [],
|
||||
responseTimes: {
|
||||
avg: [],
|
||||
p95: [],
|
||||
p99: []
|
||||
const INITIAL_DATA: TimeSeriesAPIResponse = {
|
||||
apmTimeseries: {
|
||||
totalHits: 0,
|
||||
responseTimes: {
|
||||
avg: [],
|
||||
p95: [],
|
||||
p99: []
|
||||
},
|
||||
tpmBuckets: [],
|
||||
overallAvgDuration: undefined
|
||||
},
|
||||
tpmBuckets: [],
|
||||
overallAvgDuration: undefined
|
||||
anomalyTimeseries: undefined
|
||||
};
|
||||
|
||||
export const getTransactionOverviewCharts = createSelector(
|
||||
|
@ -39,11 +41,8 @@ export const getTransactionOverviewCharts = createSelector(
|
|||
);
|
||||
|
||||
export function hasDynamicBaseline(state: IReduxState) {
|
||||
return !isEmpty(
|
||||
get(
|
||||
state,
|
||||
`reactReduxRequest[${ID}].data.responseTimes.avgAnomalies.buckets`
|
||||
)
|
||||
return (
|
||||
get(state, `reactReduxRequest[${ID}].data.anomalyTimeseries`) !== undefined
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import { IUrlParams, urlParamsReducer } from './urlParams';
|
|||
export interface IReduxState {
|
||||
location: any;
|
||||
urlParams: IUrlParams;
|
||||
reactReduxRequest: StringMap;
|
||||
reactReduxRequest: StringMap<any>;
|
||||
}
|
||||
|
||||
export const rootReducer = combineReducers({
|
||||
|
|
|
@ -1,203 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`chartSelectors getResponseTimeSeries should match snapshot 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"color": "#3185fc",
|
||||
"data": Array [
|
||||
Object {
|
||||
"x": 0,
|
||||
"y": 100,
|
||||
},
|
||||
Object {
|
||||
"x": 1000,
|
||||
"y": 200,
|
||||
},
|
||||
Object {
|
||||
"x": 2000,
|
||||
"y": 150,
|
||||
},
|
||||
Object {
|
||||
"x": 3000,
|
||||
"y": 250,
|
||||
},
|
||||
Object {
|
||||
"x": 4000,
|
||||
"y": 100,
|
||||
},
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 50,
|
||||
},
|
||||
],
|
||||
"legendValue": "0 ms",
|
||||
"title": "Avg.",
|
||||
"type": "line",
|
||||
},
|
||||
Object {
|
||||
"color": "#ecae23",
|
||||
"data": Array [
|
||||
Object {
|
||||
"x": 0,
|
||||
"y": 200,
|
||||
},
|
||||
Object {
|
||||
"x": 1000,
|
||||
"y": 300,
|
||||
},
|
||||
Object {
|
||||
"x": 2000,
|
||||
"y": 250,
|
||||
},
|
||||
Object {
|
||||
"x": 3000,
|
||||
"y": 350,
|
||||
},
|
||||
Object {
|
||||
"x": 4000,
|
||||
"y": 200,
|
||||
},
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 150,
|
||||
},
|
||||
],
|
||||
"title": "95th percentile",
|
||||
"titleShort": "95th",
|
||||
"type": "line",
|
||||
},
|
||||
Object {
|
||||
"color": "#f98510",
|
||||
"data": Array [
|
||||
Object {
|
||||
"x": 0,
|
||||
"y": 300,
|
||||
},
|
||||
Object {
|
||||
"x": 1000,
|
||||
"y": 400,
|
||||
},
|
||||
Object {
|
||||
"x": 2000,
|
||||
"y": 350,
|
||||
},
|
||||
Object {
|
||||
"x": 3000,
|
||||
"y": 450,
|
||||
},
|
||||
Object {
|
||||
"x": 4000,
|
||||
"y": 100,
|
||||
},
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 50,
|
||||
},
|
||||
],
|
||||
"title": "99th percentile",
|
||||
"titleShort": "99th",
|
||||
"type": "line",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`chartSelectors getTpmSeries should match snapshot 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"color": "#00b3a4",
|
||||
"data": Array [
|
||||
Object {
|
||||
"x": 0,
|
||||
"y": 5,
|
||||
},
|
||||
Object {
|
||||
"x": 1000,
|
||||
"y": 10,
|
||||
},
|
||||
Object {
|
||||
"x": 2000,
|
||||
"y": 3,
|
||||
},
|
||||
Object {
|
||||
"x": 3000,
|
||||
"y": 8,
|
||||
},
|
||||
Object {
|
||||
"x": 4000,
|
||||
"y": 4,
|
||||
},
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 9,
|
||||
},
|
||||
],
|
||||
"legendValue": "10.0 tpm",
|
||||
"title": "HTTP 2xx",
|
||||
"type": "line",
|
||||
},
|
||||
Object {
|
||||
"color": "#f98510",
|
||||
"data": Array [
|
||||
Object {
|
||||
"x": 0,
|
||||
"y": 1,
|
||||
},
|
||||
Object {
|
||||
"x": 1000,
|
||||
"y": 2,
|
||||
},
|
||||
Object {
|
||||
"x": 2000,
|
||||
"y": 3,
|
||||
},
|
||||
Object {
|
||||
"x": 3000,
|
||||
"y": 2,
|
||||
},
|
||||
Object {
|
||||
"x": 4000,
|
||||
"y": 3,
|
||||
},
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 1,
|
||||
},
|
||||
],
|
||||
"legendValue": "2.0 tpm",
|
||||
"title": "HTTP 4xx",
|
||||
"type": "line",
|
||||
},
|
||||
Object {
|
||||
"color": "#db1374",
|
||||
"data": Array [
|
||||
Object {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
},
|
||||
Object {
|
||||
"x": 1000,
|
||||
"y": 1,
|
||||
},
|
||||
Object {
|
||||
"x": 2000,
|
||||
"y": 2,
|
||||
},
|
||||
Object {
|
||||
"x": 3000,
|
||||
"y": 1,
|
||||
},
|
||||
Object {
|
||||
"x": 4000,
|
||||
"y": 0,
|
||||
},
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 2,
|
||||
},
|
||||
],
|
||||
"legendValue": "1.0 tpm",
|
||||
"title": "HTTP 5xx",
|
||||
"type": "line",
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -4,134 +4,103 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { AvgAnomalyBucket } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_avg_response_time_anomalies/get_anomaly_aggs/transform';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import { ApmTimeSeriesResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import {
|
||||
getAnomalyBoundaryValues,
|
||||
getAnomalyScoreValues,
|
||||
getAnomalyScoreSeries,
|
||||
getResponseTimeSeries,
|
||||
getTpmSeries
|
||||
} from '../chartSelectors';
|
||||
import { anomalyData } from './mockData/anomalyData';
|
||||
|
||||
describe('chartSelectors', () => {
|
||||
describe('getAnomalyScoreValues', () => {
|
||||
it('should return anomaly score series', () => {
|
||||
const dates = [0, 1000, 2000, 3000, 4000, 5000, 6000];
|
||||
const buckets = [
|
||||
{
|
||||
anomalyScore: null
|
||||
},
|
||||
{
|
||||
anomalyScore: 80
|
||||
},
|
||||
{
|
||||
anomalyScore: 0
|
||||
},
|
||||
{
|
||||
anomalyScore: 0
|
||||
},
|
||||
{
|
||||
anomalyScore: 70
|
||||
},
|
||||
{
|
||||
anomalyScore: 80
|
||||
},
|
||||
{
|
||||
anomalyScore: 0
|
||||
}
|
||||
] as AvgAnomalyBucket[];
|
||||
|
||||
expect(getAnomalyScoreValues(dates, buckets, 1000)).toEqual([
|
||||
{ x: 1000, y: 1 },
|
||||
{ x: 2000, y: 1 },
|
||||
{ x: 3000 },
|
||||
{ x: 5000, y: 1 },
|
||||
{ x: 6000, y: 1 },
|
||||
{ x: 7000 }
|
||||
]);
|
||||
describe('getAnomalyScoreSeries', () => {
|
||||
it('should return anomalyScoreSeries', () => {
|
||||
const data = [{ x0: 0, x: 10 }];
|
||||
expect(getAnomalyScoreSeries(data)).toEqual({
|
||||
areaColor: 'rgba(146,0,0,0.1)',
|
||||
color: 'none',
|
||||
data: [{ x0: 0, x: 10 }],
|
||||
hideLegend: true,
|
||||
hideTooltipValue: true,
|
||||
title: 'Anomaly score',
|
||||
type: 'areaMaxHeight'
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getResponseTimeSeries', () => {
|
||||
const chartsData = {
|
||||
dates: [0, 1000, 2000, 3000, 4000, 5000],
|
||||
const apmTimeseries = {
|
||||
responseTimes: {
|
||||
avg: [100, 200, 150, 250, 100, 50],
|
||||
p95: [200, 300, 250, 350, 200, 150],
|
||||
p99: [300, 400, 350, 450, 100, 50]
|
||||
avg: [{ x: 0, y: 100 }, { x: 1000, y: 200 }],
|
||||
p95: [{ x: 0, y: 200 }, { x: 1000, y: 300 }],
|
||||
p99: [{ x: 0, y: 300 }, { x: 1000, y: 400 }]
|
||||
},
|
||||
overallAvgDuration: 200
|
||||
} as TimeSeriesAPIResponse;
|
||||
} as ApmTimeSeriesResponse;
|
||||
|
||||
it('should match snapshot', () => {
|
||||
expect(getResponseTimeSeries(chartsData)).toMatchSnapshot();
|
||||
expect(getResponseTimeSeries(apmTimeseries)).toEqual([
|
||||
{
|
||||
color: '#3185fc',
|
||||
data: [{ x: 0, y: 100 }, { x: 1000, y: 200 }],
|
||||
legendValue: '0 ms',
|
||||
title: 'Avg.',
|
||||
type: 'line'
|
||||
},
|
||||
{
|
||||
color: '#ecae23',
|
||||
data: [{ x: 0, y: 200 }, { x: 1000, y: 300 }],
|
||||
title: '95th percentile',
|
||||
titleShort: '95th',
|
||||
type: 'line'
|
||||
},
|
||||
{
|
||||
color: '#f98510',
|
||||
data: [{ x: 0, y: 300 }, { x: 1000, y: 400 }],
|
||||
title: '99th percentile',
|
||||
titleShort: '99th',
|
||||
type: 'line'
|
||||
}
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return 3 series', () => {
|
||||
expect(getResponseTimeSeries(chartsData).length).toBe(3);
|
||||
expect(getResponseTimeSeries(apmTimeseries).length).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTpmSeries', () => {
|
||||
const chartsData = {
|
||||
dates: [0, 1000, 2000, 3000, 4000, 5000],
|
||||
const apmTimeseries = ({
|
||||
tpmBuckets: [
|
||||
{
|
||||
key: 'HTTP 2xx',
|
||||
avg: 10,
|
||||
values: [5, 10, 3, 8, 4, 9]
|
||||
},
|
||||
{
|
||||
key: 'HTTP 4xx',
|
||||
avg: 2,
|
||||
values: [1, 2, 3, 2, 3, 1]
|
||||
},
|
||||
{
|
||||
key: 'HTTP 5xx',
|
||||
avg: 1,
|
||||
values: [0, 1, 2, 1, 0, 2]
|
||||
}
|
||||
{ key: 'HTTP 2xx', dataPoints: [{ x: 0, y: 5 }, { x: 0, y: 2 }] },
|
||||
{ key: 'HTTP 4xx', dataPoints: [{ x: 0, y: 1 }] },
|
||||
{ key: 'HTTP 5xx', dataPoints: [{ x: 0, y: 0 }] }
|
||||
]
|
||||
} as TimeSeriesAPIResponse;
|
||||
|
||||
} as any) as ApmTimeSeriesResponse;
|
||||
const transactionType = 'MyTransactionType';
|
||||
|
||||
it('should match snapshot', () => {
|
||||
expect(getTpmSeries(chartsData, transactionType)).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAnomalyBoundaryValues', () => {
|
||||
const { dates, buckets } = anomalyData;
|
||||
const bucketSize = 240000;
|
||||
|
||||
it('should return correct buckets', () => {
|
||||
expect(getAnomalyBoundaryValues(dates, buckets, bucketSize)).toEqual([
|
||||
{ x: 1530614880000, y: 54799, y0: 15669 },
|
||||
{ x: 1530615060000, y: 49874, y0: 17808 },
|
||||
{ x: 1530615300000, y: 49421, y0: 18012 },
|
||||
{ x: 1530615540000, y: 49654, y0: 17889 },
|
||||
{ x: 1530615780000, y: 50026, y0: 17713 },
|
||||
{ x: 1530616020000, y: 49371, y0: 18044 },
|
||||
{ x: 1530616260000, y: 50110, y0: 17713 },
|
||||
{ x: 1530616500000, y: 50419, y0: 17582 },
|
||||
{ x: 1530616620000, y: 50419, y0: 17582 }
|
||||
expect(getTpmSeries(apmTimeseries, transactionType)).toEqual([
|
||||
{
|
||||
color: '#00b3a4',
|
||||
data: [{ x: 0, y: 5 }, { x: 0, y: 2 }],
|
||||
legendValue: '3.5 tpm',
|
||||
title: 'HTTP 2xx',
|
||||
type: 'line'
|
||||
},
|
||||
{
|
||||
color: '#f98510',
|
||||
data: [{ x: 0, y: 1 }],
|
||||
legendValue: '1.0 tpm',
|
||||
title: 'HTTP 4xx',
|
||||
type: 'line'
|
||||
},
|
||||
{
|
||||
color: '#db1374',
|
||||
data: [{ x: 0, y: 0 }],
|
||||
legendValue: '0.0 tpm',
|
||||
title: 'HTTP 5xx',
|
||||
type: 'line'
|
||||
}
|
||||
]);
|
||||
});
|
||||
|
||||
it('should extend the last bucket with a size of bucketSize', () => {
|
||||
const [lastBucket, secondLastBuckets] = getAnomalyBoundaryValues(
|
||||
dates,
|
||||
buckets,
|
||||
bucketSize
|
||||
).reverse();
|
||||
|
||||
expect(secondLastBuckets.y).toBe(lastBucket.y);
|
||||
expect(secondLastBuckets.y0).toBe(lastBucket.y0);
|
||||
expect(
|
||||
(lastBucket.x as number) - (secondLastBuckets.x as number)
|
||||
).toBeLessThanOrEqual(bucketSize);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,24 +5,21 @@
|
|||
*/
|
||||
|
||||
import d3 from 'd3';
|
||||
import { difference, last, memoize, zipObject } from 'lodash';
|
||||
import { difference, memoize, zipObject } from 'lodash';
|
||||
import mean from 'lodash.mean';
|
||||
import { rgba } from 'polished';
|
||||
import { AvgAnomalyBucket } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_avg_response_time_anomalies/get_anomaly_aggs/transform';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import { TimeSeriesAPIResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts';
|
||||
import { AnomalyTimeSeriesResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform';
|
||||
import { ApmTimeSeriesResponse } from 'x-pack/plugins/apm/server/lib/transactions/charts/get_timeseries_data/transform';
|
||||
import { StringMap } from 'x-pack/plugins/apm/typings/common';
|
||||
import {
|
||||
Coordinate,
|
||||
RectCoordinate
|
||||
} from 'x-pack/plugins/apm/typings/timeseries';
|
||||
import { colors } from '../../style/variables';
|
||||
import { asDecimal, asMillis, tpmUnit } from '../../utils/formatters';
|
||||
import { IUrlParams } from '../urlParams';
|
||||
|
||||
interface Coordinate {
|
||||
x: number;
|
||||
y?: number | null;
|
||||
}
|
||||
|
||||
interface BoundaryCoordinate extends Coordinate {
|
||||
y0: number | null;
|
||||
}
|
||||
|
||||
export const getEmptySerie = memoize(
|
||||
(start = Date.now() - 3600000, end = Date.now()) => {
|
||||
const dates = d3.time
|
||||
|
@ -44,17 +41,18 @@ export const getEmptySerie = memoize(
|
|||
|
||||
export function getCharts(
|
||||
urlParams: IUrlParams,
|
||||
charts: TimeSeriesAPIResponse
|
||||
timeseriesResponse: TimeSeriesAPIResponse
|
||||
) {
|
||||
const { start, end, transactionType } = urlParams;
|
||||
const noHits = charts.totalHits === 0;
|
||||
const { apmTimeseries, anomalyTimeseries } = timeseriesResponse;
|
||||
const noHits = apmTimeseries.totalHits === 0;
|
||||
const tpmSeries = noHits
|
||||
? getEmptySerie(start, end)
|
||||
: getTpmSeries(charts, transactionType);
|
||||
: getTpmSeries(apmTimeseries, transactionType);
|
||||
|
||||
const responseTimeSeries = noHits
|
||||
? getEmptySerie(start, end)
|
||||
: getResponseTimeSeries(charts);
|
||||
: getResponseTimeSeries(apmTimeseries, anomalyTimeseries);
|
||||
|
||||
return {
|
||||
noHits,
|
||||
|
@ -68,21 +66,24 @@ interface TimeSerie {
|
|||
titleShort?: string;
|
||||
hideLegend?: boolean;
|
||||
hideTooltipValue?: boolean;
|
||||
data: Coordinate[];
|
||||
data: Array<Coordinate | RectCoordinate>;
|
||||
legendValue?: string;
|
||||
type: string;
|
||||
color: string;
|
||||
areaColor?: string;
|
||||
}
|
||||
|
||||
export function getResponseTimeSeries(chartsData: TimeSeriesAPIResponse) {
|
||||
const { dates, overallAvgDuration } = chartsData;
|
||||
const { avg, p95, p99, avgAnomalies } = chartsData.responseTimes;
|
||||
export function getResponseTimeSeries(
|
||||
apmTimeseries: ApmTimeSeriesResponse,
|
||||
anomalyTimeseries?: AnomalyTimeSeriesResponse
|
||||
) {
|
||||
const { overallAvgDuration } = apmTimeseries;
|
||||
const { avg, p95, p99 } = apmTimeseries.responseTimes;
|
||||
|
||||
const series: TimeSerie[] = [
|
||||
{
|
||||
title: 'Avg.',
|
||||
data: getChartValues(dates, avg),
|
||||
data: avg,
|
||||
legendValue: asMillis(overallAvgDuration),
|
||||
type: 'line',
|
||||
color: colors.apmBlue
|
||||
|
@ -90,58 +91,61 @@ export function getResponseTimeSeries(chartsData: TimeSeriesAPIResponse) {
|
|||
{
|
||||
title: '95th percentile',
|
||||
titleShort: '95th',
|
||||
data: getChartValues(dates, p95),
|
||||
data: p95,
|
||||
type: 'line',
|
||||
color: colors.apmYellow
|
||||
},
|
||||
{
|
||||
title: '99th percentile',
|
||||
titleShort: '99th',
|
||||
data: getChartValues(dates, p99),
|
||||
data: p99,
|
||||
type: 'line',
|
||||
color: colors.apmOrange
|
||||
}
|
||||
];
|
||||
|
||||
if (avgAnomalies) {
|
||||
// insert after Avg. serie
|
||||
series.splice(1, 0, {
|
||||
title: 'Anomaly Boundaries',
|
||||
hideLegend: true,
|
||||
hideTooltipValue: true,
|
||||
data: getAnomalyBoundaryValues(
|
||||
dates,
|
||||
avgAnomalies.buckets,
|
||||
avgAnomalies.bucketSizeAsMillis
|
||||
),
|
||||
type: 'area',
|
||||
color: 'none',
|
||||
areaColor: rgba(colors.apmBlue, 0.1)
|
||||
});
|
||||
|
||||
series.splice(1, 0, {
|
||||
title: 'Anomaly score',
|
||||
hideLegend: true,
|
||||
hideTooltipValue: true,
|
||||
data: getAnomalyScoreValues(
|
||||
dates,
|
||||
avgAnomalies.buckets,
|
||||
avgAnomalies.bucketSizeAsMillis
|
||||
),
|
||||
type: 'areaMaxHeight',
|
||||
color: 'none',
|
||||
areaColor: rgba(colors.apmRed, 0.1)
|
||||
});
|
||||
if (anomalyTimeseries) {
|
||||
// insert after Avg. series
|
||||
series.splice(
|
||||
1,
|
||||
0,
|
||||
getAnomalyBoundariesSeries(anomalyTimeseries.anomalyBoundaries),
|
||||
getAnomalyScoreSeries(anomalyTimeseries.anomalyScore)
|
||||
);
|
||||
}
|
||||
|
||||
return series;
|
||||
}
|
||||
|
||||
export function getAnomalyScoreSeries(data: RectCoordinate[]) {
|
||||
return {
|
||||
title: 'Anomaly score',
|
||||
hideLegend: true,
|
||||
hideTooltipValue: true,
|
||||
data,
|
||||
type: 'areaMaxHeight',
|
||||
color: 'none',
|
||||
areaColor: rgba(colors.apmRed, 0.1)
|
||||
};
|
||||
}
|
||||
|
||||
function getAnomalyBoundariesSeries(data: Coordinate[]) {
|
||||
return {
|
||||
title: 'Anomaly Boundaries',
|
||||
hideLegend: true,
|
||||
hideTooltipValue: true,
|
||||
data,
|
||||
type: 'area',
|
||||
color: 'none',
|
||||
areaColor: rgba(colors.apmBlue, 0.1)
|
||||
};
|
||||
}
|
||||
|
||||
export function getTpmSeries(
|
||||
chartsData: TimeSeriesAPIResponse,
|
||||
apmTimeseries: ApmTimeSeriesResponse,
|
||||
transactionType?: string
|
||||
) {
|
||||
const { dates, tpmBuckets } = chartsData;
|
||||
const { tpmBuckets } = apmTimeseries;
|
||||
const bucketKeys = tpmBuckets.map(({ key }) => key);
|
||||
const getColor = getColorByKey(bucketKeys);
|
||||
const getTpmLegendTitle = (bucketKey: string) => {
|
||||
|
@ -154,10 +158,11 @@ export function getTpmSeries(
|
|||
};
|
||||
|
||||
return tpmBuckets.map(bucket => {
|
||||
const avg = mean(bucket.dataPoints.map(p => p.y));
|
||||
return {
|
||||
title: getTpmLegendTitle(bucket.key),
|
||||
data: getChartValues(dates, bucket.values),
|
||||
legendValue: `${asDecimal(bucket.avg)} ${tpmUnit(transactionType || '')}`,
|
||||
data: bucket.dataPoints,
|
||||
legendValue: `${asDecimal(avg)} ${tpmUnit(transactionType || '')}`,
|
||||
type: 'line',
|
||||
color: getColor(bucket.key)
|
||||
};
|
||||
|
@ -184,85 +189,3 @@ function getColorByKey(keys: string[]) {
|
|||
|
||||
return (key: string) => assignedColors[key] || unassignedColors[key];
|
||||
}
|
||||
|
||||
function getChartValues(
|
||||
dates: number[] = [],
|
||||
buckets: Array<number | null> = []
|
||||
) {
|
||||
return dates.map((x, i) => ({
|
||||
x,
|
||||
y: buckets[i]
|
||||
}));
|
||||
}
|
||||
|
||||
export function getAnomalyScoreValues(
|
||||
dates: number[] = [],
|
||||
buckets: AvgAnomalyBucket[] = [],
|
||||
bucketSizeAsMillis: number
|
||||
) {
|
||||
const ANOMALY_THRESHOLD = 75;
|
||||
const getX = (currentX: number, i: number) =>
|
||||
currentX + bucketSizeAsMillis * i;
|
||||
|
||||
return dates
|
||||
.map((date, i) => {
|
||||
const { anomalyScore } = buckets[i];
|
||||
return {
|
||||
x: date,
|
||||
anomalyScore
|
||||
};
|
||||
})
|
||||
.filter(p => {
|
||||
const res =
|
||||
p && p.anomalyScore != null && p.anomalyScore > ANOMALY_THRESHOLD;
|
||||
return res;
|
||||
})
|
||||
.reduce<Coordinate[]>((acc, p, i, points) => {
|
||||
const nextPoint = points[i + 1] || {};
|
||||
const endX = getX(p.x, 1);
|
||||
acc.push({ x: p.x, y: 1 });
|
||||
if (nextPoint.x == null || nextPoint.x > endX) {
|
||||
acc.push(
|
||||
{
|
||||
x: endX,
|
||||
y: 1
|
||||
},
|
||||
{
|
||||
x: getX(p.x, 2)
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
export function getAnomalyBoundaryValues(
|
||||
dates: number[] = [],
|
||||
buckets: AvgAnomalyBucket[] = [],
|
||||
bucketSizeAsMillis: number
|
||||
) {
|
||||
const lastX = last(dates);
|
||||
return dates
|
||||
.map((date, i) => {
|
||||
const bucket = buckets[i];
|
||||
return {
|
||||
x: date,
|
||||
y0: bucket.lower,
|
||||
y: bucket.upper
|
||||
};
|
||||
})
|
||||
.filter(p => p.y != null)
|
||||
.reduce<BoundaryCoordinate[]>((acc, p, i, points) => {
|
||||
const isLast = last(points) === p;
|
||||
acc.push(p);
|
||||
|
||||
if (isLast) {
|
||||
acc.push({
|
||||
...p,
|
||||
x: Math.min(p.x + bucketSizeAsMillis, lastX) // avoid going beyond the last date
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
|
|
@ -27,27 +27,14 @@ Array [
|
|||
},
|
||||
"date_histogram": Object {
|
||||
"extended_bounds": Object {
|
||||
"max": 1,
|
||||
"min": 0,
|
||||
"max": 200000,
|
||||
"min": 90000,
|
||||
},
|
||||
"field": "timestamp",
|
||||
"interval": "myInterval",
|
||||
"min_doc_count": 0,
|
||||
},
|
||||
},
|
||||
"top_hits": Object {
|
||||
"top_hits": Object {
|
||||
"_source": Object {
|
||||
"includes": Array [
|
||||
"bucket_span",
|
||||
],
|
||||
},
|
||||
"size": 1,
|
||||
"sort": Array [
|
||||
"bucket_span",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
"query": Object {
|
||||
"bool": Object {
|
||||
|
@ -56,12 +43,17 @@ Array [
|
|||
"range": Object {
|
||||
"timestamp": Object {
|
||||
"format": "epoch_millis",
|
||||
"gte": 0,
|
||||
"lte": 1,
|
||||
"gte": 90000,
|
||||
"lte": 200000,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
"must": Object {
|
||||
"exists": Object {
|
||||
"field": "bucket_span",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"size": 0,
|
|
@ -0,0 +1,38 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getAnomalySeries should match snapshot 1`] = `
|
||||
Object {
|
||||
"anomalyBoundaries": Array [
|
||||
Object {
|
||||
"x": 5000,
|
||||
"y": 200,
|
||||
"y0": 20,
|
||||
},
|
||||
Object {
|
||||
"x": 15000,
|
||||
"y": 100,
|
||||
"y0": 20,
|
||||
},
|
||||
Object {
|
||||
"x": 25000,
|
||||
"y": 50,
|
||||
"y0": 10,
|
||||
},
|
||||
Object {
|
||||
"x": 30000,
|
||||
"y": 50,
|
||||
"y0": 10,
|
||||
},
|
||||
],
|
||||
"anomalyScore": Array [
|
||||
Object {
|
||||
"x": 25000,
|
||||
"x0": 15000,
|
||||
},
|
||||
Object {
|
||||
"x": 35000,
|
||||
"x0": 25000,
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,33 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`anomalySeriesTransform should match snapshot 1`] = `
|
||||
Object {
|
||||
"anomalyBoundaries": Array [
|
||||
Object {
|
||||
"x": 10000,
|
||||
"y": 200,
|
||||
"y0": 20,
|
||||
},
|
||||
Object {
|
||||
"x": 15000,
|
||||
"y": 100,
|
||||
"y0": 20,
|
||||
},
|
||||
Object {
|
||||
"x": 25000,
|
||||
"y": 50,
|
||||
"y0": 10,
|
||||
},
|
||||
],
|
||||
"anomalyScore": Array [
|
||||
Object {
|
||||
"x": 25000,
|
||||
"x0": 15000,
|
||||
},
|
||||
Object {
|
||||
"x": 25000,
|
||||
"x0": 25000,
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -4,22 +4,21 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { anomalyAggsFetcher, ESResponse } from './fetcher';
|
||||
import { anomalySeriesFetcher, ESResponse } from './fetcher';
|
||||
|
||||
describe('anomalyAggsFetcher', () => {
|
||||
describe('when ES returns valid response', () => {
|
||||
let response: ESResponse;
|
||||
let response: ESResponse | undefined;
|
||||
let clientSpy: jest.Mock;
|
||||
|
||||
beforeEach(async () => {
|
||||
clientSpy = jest.fn().mockReturnValue('ES Response');
|
||||
response = await anomalyAggsFetcher({
|
||||
response = await anomalySeriesFetcher({
|
||||
serviceName: 'myServiceName',
|
||||
transactionType: 'myTransactionType',
|
||||
intervalString: 'myInterval',
|
||||
client: clientSpy,
|
||||
start: 0,
|
||||
end: 1
|
||||
mlBucketSize: 10,
|
||||
setup: { client: clientSpy, start: 100000, end: 200000 } as any
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -38,8 +37,8 @@ describe('anomalyAggsFetcher', () => {
|
|||
const failClient = jest.fn(() => Promise.reject(httpError));
|
||||
|
||||
return expect(
|
||||
anomalyAggsFetcher({ client: failClient } as any)
|
||||
).resolves.toEqual(null);
|
||||
anomalySeriesFetcher({ setup: { client: failClient } } as any)
|
||||
).resolves.toEqual(undefined);
|
||||
});
|
||||
|
||||
it('should throw other errors', () => {
|
||||
|
@ -47,8 +46,8 @@ describe('anomalyAggsFetcher', () => {
|
|||
const failClient = jest.fn(() => Promise.reject(otherError));
|
||||
|
||||
return expect(
|
||||
anomalyAggsFetcher({
|
||||
client: failClient
|
||||
anomalySeriesFetcher({
|
||||
setup: { client: failClient }
|
||||
} as any)
|
||||
).rejects.toThrow(otherError);
|
||||
});
|
|
@ -5,21 +5,11 @@
|
|||
*/
|
||||
|
||||
import { AggregationSearchResponse } from 'elasticsearch';
|
||||
import { TopHits } from 'x-pack/plugins/apm/typings/elasticsearch';
|
||||
import { ESClient } from '../../../../helpers/setup_request';
|
||||
import { Setup } from '../../../helpers/setup_request';
|
||||
|
||||
export interface IOptions {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
intervalString: string;
|
||||
client: ESClient;
|
||||
start: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
interface Bucket {
|
||||
key_as_string: string;
|
||||
key: number;
|
||||
export interface ESBucket {
|
||||
key_as_string: string; // timestamp as string
|
||||
key: number; // timestamp
|
||||
doc_count: number;
|
||||
anomaly_score: {
|
||||
value: number | null;
|
||||
|
@ -34,34 +24,47 @@ interface Bucket {
|
|||
|
||||
interface Aggs {
|
||||
ml_avg_response_times: {
|
||||
buckets: Bucket[];
|
||||
buckets: ESBucket[];
|
||||
};
|
||||
top_hits: TopHits<{
|
||||
bucket_span: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
export type ESResponse = AggregationSearchResponse<void, Aggs> | null;
|
||||
export type ESResponse = AggregationSearchResponse<void, Aggs>;
|
||||
|
||||
export async function anomalyAggsFetcher({
|
||||
export async function anomalySeriesFetcher({
|
||||
serviceName,
|
||||
transactionType,
|
||||
intervalString,
|
||||
client,
|
||||
start,
|
||||
end
|
||||
}: IOptions): Promise<ESResponse> {
|
||||
mlBucketSize,
|
||||
setup
|
||||
}: {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
intervalString: string;
|
||||
mlBucketSize: number;
|
||||
setup: Setup;
|
||||
}) {
|
||||
const { client, start, end } = setup;
|
||||
|
||||
// move the start back with one bucket size, to ensure to get anomaly data in the beginning
|
||||
// this is required because ML has a minimum bucket size (default is 900s) so if our buckets are smaller, we might have several null buckets in the beginning
|
||||
const newStart = start - mlBucketSize * 1000;
|
||||
|
||||
const params = {
|
||||
index: `.ml-anomalies-${serviceName}-${transactionType}-high_mean_response_time`.toLowerCase(),
|
||||
body: {
|
||||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
must: {
|
||||
exists: {
|
||||
field: 'bucket_span'
|
||||
}
|
||||
},
|
||||
filter: [
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
gte: start,
|
||||
gte: newStart,
|
||||
lte: end,
|
||||
format: 'epoch_millis'
|
||||
}
|
||||
|
@ -71,20 +74,13 @@ export async function anomalyAggsFetcher({
|
|||
}
|
||||
},
|
||||
aggs: {
|
||||
top_hits: {
|
||||
top_hits: {
|
||||
sort: ['bucket_span'],
|
||||
_source: { includes: ['bucket_span'] },
|
||||
size: 1
|
||||
}
|
||||
},
|
||||
ml_avg_response_times: {
|
||||
date_histogram: {
|
||||
field: 'timestamp',
|
||||
interval: intervalString,
|
||||
min_doc_count: 0,
|
||||
extended_bounds: {
|
||||
min: start,
|
||||
min: newStart,
|
||||
max: end
|
||||
}
|
||||
},
|
||||
|
@ -103,7 +99,7 @@ export async function anomalyAggsFetcher({
|
|||
} catch (err) {
|
||||
const isHttpError = 'statusCode' in err;
|
||||
if (isHttpError) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { oc } from 'ts-optchain';
|
||||
import { Setup } from '../../../helpers/setup_request';
|
||||
|
||||
interface IOptions {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
setup: Setup;
|
||||
}
|
||||
|
||||
interface ESResponse {
|
||||
bucket_span: number;
|
||||
}
|
||||
|
||||
export async function getMlBucketSize({
|
||||
serviceName,
|
||||
transactionType,
|
||||
setup
|
||||
}: IOptions): Promise<number> {
|
||||
const { client, start, end } = setup;
|
||||
const params = {
|
||||
index: `.ml-anomalies-${serviceName}-${transactionType}-high_mean_response_time`.toLowerCase(),
|
||||
body: {
|
||||
_source: 'bucket_span',
|
||||
size: 1,
|
||||
query: {
|
||||
bool: {
|
||||
must: {
|
||||
exists: {
|
||||
field: 'bucket_span'
|
||||
}
|
||||
},
|
||||
filter: [
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
gte: start,
|
||||
lte: end,
|
||||
format: 'epoch_millis'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const resp = await client<ESResponse>('search', params);
|
||||
return oc(resp).hits.hits[0]._source.bucket_span(0);
|
||||
} catch (err) {
|
||||
const isHttpError = 'statusCode' in err;
|
||||
if (isHttpError) {
|
||||
return 0;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getAnomalySeries } from '.';
|
||||
import { mlAnomalyResponse } from './mock-responses/mlAnomalyResponse';
|
||||
import { mlBucketSpanResponse } from './mock-responses/mlBucketSpanResponse';
|
||||
import { AnomalyTimeSeriesResponse } from './transform';
|
||||
|
||||
describe('getAnomalySeries', () => {
|
||||
let avgAnomalies: AnomalyTimeSeriesResponse;
|
||||
beforeEach(async () => {
|
||||
const clientSpy = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce(mlBucketSpanResponse)
|
||||
.mockResolvedValueOnce(mlAnomalyResponse);
|
||||
|
||||
avgAnomalies = (await getAnomalySeries({
|
||||
serviceName: 'myServiceName',
|
||||
transactionType: 'myTransactionType',
|
||||
timeSeriesDates: [100, 100000],
|
||||
setup: {
|
||||
start: 0,
|
||||
end: 500000,
|
||||
client: clientSpy,
|
||||
config: {
|
||||
get: () => 'myIndex' as any
|
||||
}
|
||||
}
|
||||
})) as AnomalyTimeSeriesResponse;
|
||||
});
|
||||
|
||||
it('should remove buckets lower than threshold and outside date range from anomalyScore', () => {
|
||||
expect(avgAnomalies.anomalyScore).toEqual([
|
||||
{ x0: 15000, x: 25000 },
|
||||
{ x0: 25000, x: 35000 }
|
||||
]);
|
||||
});
|
||||
|
||||
it('should remove buckets outside date range from anomalyBoundaries', () => {
|
||||
expect(
|
||||
avgAnomalies.anomalyBoundaries.filter(
|
||||
bucket => bucket.x < 100 || bucket.x > 100000
|
||||
).length
|
||||
).toBe(0);
|
||||
});
|
||||
|
||||
it('should remove buckets with null from anomalyBoundaries', () => {
|
||||
expect(
|
||||
avgAnomalies.anomalyBoundaries.filter(p => p.y === null).length
|
||||
).toBe(0);
|
||||
});
|
||||
|
||||
it('should match snapshot', async () => {
|
||||
expect(avgAnomalies).toMatchSnapshot();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getBucketSize } from '../../../helpers/get_bucket_size';
|
||||
import { Setup } from '../../../helpers/setup_request';
|
||||
import { anomalySeriesFetcher } from './fetcher';
|
||||
import { getMlBucketSize } from './get_ml_bucket_size';
|
||||
import { anomalySeriesTransform } from './transform';
|
||||
|
||||
export async function getAnomalySeries({
|
||||
serviceName,
|
||||
transactionType,
|
||||
transactionName,
|
||||
timeSeriesDates,
|
||||
setup
|
||||
}: {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
transactionName?: string;
|
||||
timeSeriesDates: number[];
|
||||
setup: Setup;
|
||||
}) {
|
||||
// don't fetch anomalies for transaction details page
|
||||
if (transactionName) {
|
||||
return;
|
||||
}
|
||||
|
||||
const mlBucketSize = await getMlBucketSize({
|
||||
serviceName,
|
||||
transactionType,
|
||||
setup
|
||||
});
|
||||
|
||||
const { start, end } = setup;
|
||||
const { intervalString, bucketSize } = getBucketSize(start, end, 'auto');
|
||||
|
||||
const esResponse = await anomalySeriesFetcher({
|
||||
serviceName,
|
||||
transactionType,
|
||||
intervalString,
|
||||
mlBucketSize,
|
||||
setup
|
||||
});
|
||||
|
||||
return anomalySeriesTransform(
|
||||
esResponse,
|
||||
mlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
}
|
|
@ -4,9 +4,9 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ESResponse } from '../get_anomaly_aggs/fetcher';
|
||||
import { ESResponse } from '../fetcher';
|
||||
|
||||
export const mainBucketsResponse: ESResponse = {
|
||||
export const mlAnomalyResponse: ESResponse = {
|
||||
took: 3,
|
||||
timed_out: false,
|
||||
_shards: {
|
||||
|
@ -25,21 +25,21 @@ export const mainBucketsResponse: ESResponse = {
|
|||
buckets: [
|
||||
{
|
||||
key_as_string: '2018-07-02T09:16:40.000Z',
|
||||
key: 1530523000000,
|
||||
key: 0,
|
||||
doc_count: 0,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
},
|
||||
upper: {
|
||||
value: null
|
||||
value: 200
|
||||
},
|
||||
lower: {
|
||||
value: null
|
||||
value: 20
|
||||
}
|
||||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:25:00.000Z',
|
||||
key: 1530523500000,
|
||||
key: 5000,
|
||||
doc_count: 4,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
|
@ -53,7 +53,7 @@ export const mainBucketsResponse: ESResponse = {
|
|||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:33:20.000Z',
|
||||
key: 1530524000000,
|
||||
key: 10000,
|
||||
doc_count: 0,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
|
@ -67,21 +67,21 @@ export const mainBucketsResponse: ESResponse = {
|
|||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:41:40.000Z',
|
||||
key: 1530524500000,
|
||||
key: 15000,
|
||||
doc_count: 2,
|
||||
anomaly_score: {
|
||||
value: 0
|
||||
value: 90
|
||||
},
|
||||
upper: {
|
||||
value: 54158.77731018045
|
||||
value: 100
|
||||
},
|
||||
lower: {
|
||||
value: 16034.081569306454
|
||||
value: 20
|
||||
}
|
||||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:50:00.000Z',
|
||||
key: 1530525000000,
|
||||
key: 20000,
|
||||
doc_count: 0,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
|
@ -95,65 +95,33 @@ export const mainBucketsResponse: ESResponse = {
|
|||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:58:20.000Z',
|
||||
key: 1530525500000,
|
||||
key: 25000,
|
||||
doc_count: 2,
|
||||
anomaly_score: {
|
||||
value: 0
|
||||
value: 100
|
||||
},
|
||||
upper: {
|
||||
value: 54158.77731018045
|
||||
value: 50
|
||||
},
|
||||
lower: {
|
||||
value: 16034.081569306454
|
||||
}
|
||||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T10:06:40.000Z',
|
||||
key: 1530526000000,
|
||||
doc_count: 0,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
},
|
||||
upper: {
|
||||
value: null
|
||||
},
|
||||
lower: {
|
||||
value: null
|
||||
value: 10
|
||||
}
|
||||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T10:15:00.000Z',
|
||||
key: 1530526500000,
|
||||
key: 30000,
|
||||
doc_count: 2,
|
||||
anomaly_score: {
|
||||
value: 0
|
||||
},
|
||||
upper: {
|
||||
value: 54158.77731018045
|
||||
value: null
|
||||
},
|
||||
lower: {
|
||||
value: 16034.081569306454
|
||||
value: null
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
top_hits: {
|
||||
hits: {
|
||||
total: 2,
|
||||
max_score: 0,
|
||||
hits: [
|
||||
{
|
||||
_index: '.ml-anomalies-shared',
|
||||
_type: 'doc',
|
||||
_id:
|
||||
'opbeans-node-request-high_mean_response_time_model_plot_1530522900000_900_0_29791_0',
|
||||
_score: 0,
|
||||
_source: {
|
||||
bucket_span: 900
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export const mlBucketSpanResponse = {
|
||||
took: 1,
|
||||
timed_out: false,
|
||||
_shards: {
|
||||
total: 1,
|
||||
successful: 1,
|
||||
skipped: 0,
|
||||
failed: 0
|
||||
},
|
||||
hits: {
|
||||
total: 192,
|
||||
max_score: 1.0,
|
||||
hits: [
|
||||
{
|
||||
_index: '.ml-anomalies-shared',
|
||||
_type: 'doc',
|
||||
_id:
|
||||
'opbeans-go-request-high_mean_response_time_model_plot_1542636000000_900_0_29791_0',
|
||||
_score: 1.0,
|
||||
_source: {
|
||||
bucket_span: 10
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
|
@ -0,0 +1,302 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { oc } from 'ts-optchain';
|
||||
import { ESBucket, ESResponse } from './fetcher';
|
||||
import { mlAnomalyResponse } from './mock-responses/mlAnomalyResponse';
|
||||
import { anomalySeriesTransform, replaceFirstAndLastBucket } from './transform';
|
||||
|
||||
describe('anomalySeriesTransform', () => {
|
||||
it('should match snapshot', () => {
|
||||
const getMlBucketSize = 10;
|
||||
const bucketSize = 5;
|
||||
const timeSeriesDates = [10000, 25000];
|
||||
const anomalySeries = anomalySeriesTransform(
|
||||
mlAnomalyResponse,
|
||||
getMlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
expect(anomalySeries).toMatchSnapshot();
|
||||
});
|
||||
|
||||
describe('anomalyScoreSeries', () => {
|
||||
it('should only returns bucket within range and above threshold', () => {
|
||||
const esResponse = getESResponse([
|
||||
{
|
||||
key: 0,
|
||||
anomaly_score: { value: 90 }
|
||||
},
|
||||
{
|
||||
key: 5000,
|
||||
anomaly_score: { value: 0 }
|
||||
},
|
||||
{
|
||||
key: 10000,
|
||||
anomaly_score: { value: 90 }
|
||||
},
|
||||
{
|
||||
key: 15000,
|
||||
anomaly_score: { value: 0 }
|
||||
},
|
||||
{
|
||||
key: 20000,
|
||||
anomaly_score: { value: 90 }
|
||||
}
|
||||
] as ESBucket[]);
|
||||
|
||||
const getMlBucketSize = 5;
|
||||
const bucketSize = 5;
|
||||
const timeSeriesDates = [5000, 15000];
|
||||
const anomalySeries = anomalySeriesTransform(
|
||||
esResponse,
|
||||
getMlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
|
||||
const buckets = anomalySeries!.anomalyScore;
|
||||
expect(buckets).toEqual([{ x0: 10000, x: 15000 }]);
|
||||
});
|
||||
|
||||
it('should decrease the x-value to avoid going beyond last date', () => {
|
||||
const esResponse = getESResponse([
|
||||
{
|
||||
key: 0,
|
||||
anomaly_score: { value: 0 }
|
||||
},
|
||||
{
|
||||
key: 5000,
|
||||
anomaly_score: { value: 90 }
|
||||
}
|
||||
] as ESBucket[]);
|
||||
|
||||
const getMlBucketSize = 10;
|
||||
const bucketSize = 5;
|
||||
const timeSeriesDates = [0, 10000];
|
||||
const anomalySeries = anomalySeriesTransform(
|
||||
esResponse,
|
||||
getMlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
|
||||
const buckets = anomalySeries!.anomalyScore;
|
||||
expect(buckets).toEqual([{ x0: 5000, x: 10000 }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('anomalyBoundariesSeries', () => {
|
||||
it('should trim buckets to time range', () => {
|
||||
const esResponse = getESResponse([
|
||||
{
|
||||
key: 0,
|
||||
upper: { value: 15 },
|
||||
lower: { value: 10 }
|
||||
},
|
||||
{
|
||||
key: 5000,
|
||||
upper: { value: 25 },
|
||||
lower: { value: 20 }
|
||||
},
|
||||
{
|
||||
key: 10000,
|
||||
upper: { value: 35 },
|
||||
lower: { value: 30 }
|
||||
},
|
||||
{
|
||||
key: 15000,
|
||||
upper: { value: 45 },
|
||||
lower: { value: 40 }
|
||||
}
|
||||
] as ESBucket[]);
|
||||
|
||||
const mlBucketSize = 10;
|
||||
const bucketSize = 5;
|
||||
const timeSeriesDates = [5000, 10000];
|
||||
const anomalySeries = anomalySeriesTransform(
|
||||
esResponse,
|
||||
mlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
|
||||
const buckets = anomalySeries!.anomalyBoundaries;
|
||||
expect(buckets).toEqual([
|
||||
{ x: 5000, y: 25, y0: 20 },
|
||||
{ x: 10000, y: 35, y0: 30 }
|
||||
]);
|
||||
});
|
||||
|
||||
it('should replace first bucket in range', () => {
|
||||
const esResponse = getESResponse([
|
||||
{
|
||||
key: 0,
|
||||
anomaly_score: { value: 0 },
|
||||
upper: { value: 15 },
|
||||
lower: { value: 10 }
|
||||
},
|
||||
{
|
||||
key: 5000,
|
||||
anomaly_score: { value: 0 },
|
||||
upper: { value: null },
|
||||
lower: { value: null }
|
||||
},
|
||||
{
|
||||
key: 10000,
|
||||
anomaly_score: { value: 0 },
|
||||
upper: { value: 25 },
|
||||
lower: { value: 20 }
|
||||
}
|
||||
] as ESBucket[]);
|
||||
|
||||
const getMlBucketSize = 10;
|
||||
const bucketSize = 5;
|
||||
const timeSeriesDates = [5000, 10000];
|
||||
const anomalySeries = anomalySeriesTransform(
|
||||
esResponse,
|
||||
getMlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
|
||||
const buckets = anomalySeries!.anomalyBoundaries;
|
||||
expect(buckets).toEqual([
|
||||
{ x: 5000, y: 15, y0: 10 },
|
||||
{ x: 10000, y: 25, y0: 20 }
|
||||
]);
|
||||
});
|
||||
|
||||
it('should replace last bucket in range', () => {
|
||||
const esResponse = getESResponse([
|
||||
{
|
||||
key: 0,
|
||||
anomaly_score: { value: 0 },
|
||||
upper: { value: 15 },
|
||||
lower: { value: 10 }
|
||||
},
|
||||
{
|
||||
key: 5000,
|
||||
anomaly_score: { value: 0 },
|
||||
upper: { value: null },
|
||||
lower: { value: null }
|
||||
},
|
||||
{
|
||||
key: 10000,
|
||||
anomaly_score: { value: 0 },
|
||||
upper: { value: null },
|
||||
lower: { value: null }
|
||||
}
|
||||
] as ESBucket[]);
|
||||
|
||||
const getMlBucketSize = 10;
|
||||
const bucketSize = 5;
|
||||
const timeSeriesDates = [5000, 10000];
|
||||
const anomalySeries = anomalySeriesTransform(
|
||||
esResponse,
|
||||
getMlBucketSize,
|
||||
bucketSize,
|
||||
timeSeriesDates
|
||||
);
|
||||
|
||||
const buckets = anomalySeries!.anomalyBoundaries;
|
||||
expect(buckets).toEqual([
|
||||
{ x: 5000, y: 15, y0: 10 },
|
||||
{ x: 10000, y: 15, y0: 10 }
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('replaceFirstAndLastBucket', () => {
|
||||
it('should extend the first bucket', () => {
|
||||
const buckets = [
|
||||
{
|
||||
x: 0,
|
||||
lower: 10,
|
||||
upper: 20
|
||||
},
|
||||
{
|
||||
x: 5,
|
||||
lower: null,
|
||||
upper: null
|
||||
},
|
||||
{
|
||||
x: 10,
|
||||
lower: null,
|
||||
upper: null
|
||||
},
|
||||
{
|
||||
x: 15,
|
||||
lower: 30,
|
||||
upper: 40
|
||||
}
|
||||
] as any;
|
||||
|
||||
const timeSeriesDates = [10, 15];
|
||||
expect(replaceFirstAndLastBucket(buckets, timeSeriesDates)).toEqual([
|
||||
{ x: 10, lower: 10, upper: 20 },
|
||||
{ x: 15, lower: 30, upper: 40 }
|
||||
]);
|
||||
});
|
||||
|
||||
it('should extend the last bucket', () => {
|
||||
const buckets = [
|
||||
{
|
||||
x: 10,
|
||||
lower: 30,
|
||||
upper: 40
|
||||
},
|
||||
{
|
||||
x: 15,
|
||||
lower: null,
|
||||
upper: null
|
||||
},
|
||||
{
|
||||
x: 20,
|
||||
lower: null,
|
||||
upper: null
|
||||
}
|
||||
] as any;
|
||||
|
||||
const timeSeriesDates = [10, 15, 20];
|
||||
expect(replaceFirstAndLastBucket(buckets, timeSeriesDates)).toEqual([
|
||||
{ x: 10, lower: 30, upper: 40 },
|
||||
{ x: 15, lower: null, upper: null },
|
||||
{ x: 20, lower: 30, upper: 40 }
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
function getESResponse(buckets: ESBucket[]): ESResponse {
|
||||
return {
|
||||
took: 3,
|
||||
timed_out: false,
|
||||
_shards: {
|
||||
total: 5,
|
||||
successful: 5,
|
||||
skipped: 0,
|
||||
failed: 0
|
||||
},
|
||||
hits: {
|
||||
total: 10,
|
||||
max_score: 0,
|
||||
hits: []
|
||||
},
|
||||
aggregations: {
|
||||
ml_avg_response_times: {
|
||||
buckets: buckets.map(bucket => {
|
||||
return {
|
||||
...bucket,
|
||||
lower: { value: oc(bucket).lower.value(null) },
|
||||
upper: { value: oc(bucket).upper.value(null) },
|
||||
anomaly_score: { value: oc(bucket).anomaly_score.value(null) }
|
||||
};
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { first, last } from 'lodash';
|
||||
import { oc } from 'ts-optchain';
|
||||
import {
|
||||
Coordinate,
|
||||
RectCoordinate
|
||||
} from 'x-pack/plugins/apm/typings/timeseries';
|
||||
import { ESResponse } from './fetcher';
|
||||
|
||||
interface IBucket {
|
||||
x: number;
|
||||
anomalyScore: number | null;
|
||||
lower: number | null;
|
||||
upper: number | null;
|
||||
}
|
||||
|
||||
export interface AnomalyTimeSeriesResponse {
|
||||
anomalyScore: RectCoordinate[];
|
||||
anomalyBoundaries: Coordinate[];
|
||||
}
|
||||
|
||||
export function anomalySeriesTransform(
|
||||
response: ESResponse | undefined,
|
||||
mlBucketSize: number,
|
||||
bucketSize: number,
|
||||
timeSeriesDates: number[]
|
||||
): AnomalyTimeSeriesResponse | undefined {
|
||||
if (!response) {
|
||||
return;
|
||||
}
|
||||
|
||||
const buckets = oc(response)
|
||||
.aggregations.ml_avg_response_times.buckets([])
|
||||
.map(bucket => {
|
||||
return {
|
||||
x: bucket.key,
|
||||
anomalyScore: bucket.anomaly_score.value,
|
||||
lower: bucket.lower.value,
|
||||
upper: bucket.upper.value
|
||||
};
|
||||
});
|
||||
|
||||
const bucketSizeInMillis = Math.max(bucketSize, mlBucketSize) * 1000;
|
||||
|
||||
return {
|
||||
anomalyScore: getAnomalyScoreDataPoints(
|
||||
buckets,
|
||||
timeSeriesDates,
|
||||
bucketSizeInMillis
|
||||
),
|
||||
anomalyBoundaries: getAnomalyBoundaryDataPoints(buckets, timeSeriesDates)
|
||||
};
|
||||
}
|
||||
|
||||
export function getAnomalyScoreDataPoints(
|
||||
buckets: IBucket[],
|
||||
timeSeriesDates: number[],
|
||||
bucketSizeInMillis: number
|
||||
): RectCoordinate[] {
|
||||
const ANOMALY_THRESHOLD = 75;
|
||||
const firstDate = first(timeSeriesDates);
|
||||
const lastDate = last(timeSeriesDates);
|
||||
|
||||
return buckets
|
||||
.filter(
|
||||
bucket =>
|
||||
bucket.anomalyScore !== null && bucket.anomalyScore > ANOMALY_THRESHOLD
|
||||
)
|
||||
.filter(isInDateRange(firstDate, lastDate))
|
||||
.map(bucket => {
|
||||
return {
|
||||
x0: bucket.x,
|
||||
x: Math.min(bucket.x + bucketSizeInMillis, lastDate) // don't go beyond last date
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function getAnomalyBoundaryDataPoints(
|
||||
buckets: IBucket[],
|
||||
timeSeriesDates: number[]
|
||||
): Coordinate[] {
|
||||
return replaceFirstAndLastBucket(buckets, timeSeriesDates)
|
||||
.filter(bucket => bucket.lower !== null)
|
||||
.map(bucket => {
|
||||
return {
|
||||
x: bucket.x,
|
||||
y0: bucket.lower,
|
||||
y: bucket.upper
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function replaceFirstAndLastBucket(
|
||||
buckets: IBucket[],
|
||||
timeSeriesDates: number[]
|
||||
) {
|
||||
const firstDate = first(timeSeriesDates);
|
||||
const lastDate = last(timeSeriesDates);
|
||||
|
||||
const preBucketWithValue = buckets
|
||||
.filter(p => p.x <= firstDate)
|
||||
.reverse()
|
||||
.find(p => p.lower !== null);
|
||||
|
||||
const bucketsInRange = buckets.filter(isInDateRange(firstDate, lastDate));
|
||||
|
||||
// replace first bucket if it is null
|
||||
const firstBucket = first(bucketsInRange);
|
||||
if (preBucketWithValue && firstBucket && firstBucket.lower === null) {
|
||||
firstBucket.lower = preBucketWithValue.lower;
|
||||
firstBucket.upper = preBucketWithValue.upper;
|
||||
}
|
||||
|
||||
const lastBucketWithValue = [...buckets]
|
||||
.reverse()
|
||||
.find(p => p.lower !== null);
|
||||
|
||||
// replace last bucket if it is null
|
||||
const lastBucket = last(bucketsInRange);
|
||||
if (lastBucketWithValue && lastBucket && lastBucket.lower === null) {
|
||||
lastBucket.lower = lastBucketWithValue.lower;
|
||||
lastBucket.upper = lastBucketWithValue.upper;
|
||||
}
|
||||
|
||||
return bucketsInRange;
|
||||
}
|
||||
|
||||
// anomaly time series contain one or more buckets extra in the beginning
|
||||
// these extra buckets should be removed
|
||||
function isInDateRange(firstDate: number, lastDate: number) {
|
||||
return (p: IBucket) => p.x >= firstDate && p.x <= lastDate;
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`get_buckets_with_initial_anomaly_bounds should return correct buckets 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 17688.182675688193,
|
||||
"upper": 50381.01051622894,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 16034.081569306454,
|
||||
"upper": 54158.77731018045,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 16034.081569306454,
|
||||
"upper": 54158.77731018045,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 16034.081569306454,
|
||||
"upper": 54158.77731018045,
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -1,49 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`anomalyAggsTransform should match snapshot 1`] = `
|
||||
Object {
|
||||
"bucketSize": 900,
|
||||
"buckets": Array [
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 16034.081569306454,
|
||||
"upper": 54158.77731018045,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 16034.081569306454,
|
||||
"upper": 54158.77731018045,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": null,
|
||||
"lower": null,
|
||||
"upper": null,
|
||||
},
|
||||
Object {
|
||||
"anomalyScore": 0,
|
||||
"lower": 16034.081569306454,
|
||||
"upper": 54158.77731018045,
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -1,13 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { anomalyAggsFetcher, IOptions } from './fetcher';
|
||||
import { anomalyAggsTransform } from './transform';
|
||||
|
||||
export async function getAnomalyAggs(options: IOptions) {
|
||||
const response = await anomalyAggsFetcher(options);
|
||||
return anomalyAggsTransform(response);
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { mainBucketsResponse } from '../mock-responses/mainBucketsResponse';
|
||||
import { anomalyAggsTransform } from './transform';
|
||||
|
||||
describe('anomalyAggsTransform', () => {
|
||||
it('should return null if response is empty', () => {
|
||||
expect(anomalyAggsTransform(null)).toBe(null);
|
||||
});
|
||||
|
||||
it('should match snapshot', () => {
|
||||
expect(anomalyAggsTransform(mainBucketsResponse)).toMatchSnapshot();
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { oc } from 'ts-optchain';
|
||||
import { ESResponse } from './fetcher';
|
||||
|
||||
export interface AvgAnomalyBucket {
|
||||
anomalyScore: number | null;
|
||||
lower: number | null;
|
||||
upper: number | null;
|
||||
}
|
||||
|
||||
export function anomalyAggsTransform(response: ESResponse) {
|
||||
if (!response) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const buckets = oc(response)
|
||||
.aggregations.ml_avg_response_times.buckets([])
|
||||
.map(bucket => {
|
||||
return {
|
||||
anomalyScore: bucket.anomaly_score.value,
|
||||
lower: bucket.lower.value,
|
||||
upper: bucket.upper.value
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
buckets,
|
||||
bucketSize: oc(
|
||||
response
|
||||
).aggregations.top_hits.hits.hits[0]._source.bucket_span(0)
|
||||
};
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getAnomalyAggs } from './get_anomaly_aggs';
|
||||
import { AvgAnomalyBucket } from './get_anomaly_aggs/transform';
|
||||
import { getBucketWithInitialAnomalyBounds } from './get_buckets_with_initial_anomaly_bounds';
|
||||
import { firstBucketsResponse } from './mock-responses/firstBucketsResponse';
|
||||
import { mainBucketsResponse } from './mock-responses/mainBucketsResponse';
|
||||
|
||||
describe('get_buckets_with_initial_anomaly_bounds', () => {
|
||||
let buckets: AvgAnomalyBucket[];
|
||||
let mainBuckets: AvgAnomalyBucket[];
|
||||
|
||||
beforeEach(async () => {
|
||||
const response = await getAnomalyAggs({
|
||||
serviceName: 'myServiceName',
|
||||
transactionType: 'myTransactionType',
|
||||
intervalString: '',
|
||||
client: () => mainBucketsResponse as any,
|
||||
start: 0,
|
||||
end: 1
|
||||
});
|
||||
|
||||
mainBuckets = response!.buckets;
|
||||
buckets = await getBucketWithInitialAnomalyBounds({
|
||||
serviceName: 'myServiceName',
|
||||
transactionType: 'myTransactionType',
|
||||
start: 1530523322742,
|
||||
client: () => firstBucketsResponse as any,
|
||||
buckets: mainBuckets,
|
||||
bucketSize: 900
|
||||
});
|
||||
});
|
||||
|
||||
it('should return correct buckets', () => {
|
||||
expect(buckets).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should not change the number of buckets', () => {
|
||||
expect(mainBuckets.length).toEqual(buckets.length);
|
||||
});
|
||||
|
||||
it('should replace the first bucket but leave all other buckets the same', () => {
|
||||
buckets.forEach((bucket, i) => {
|
||||
if (i === 0) {
|
||||
expect(mainBuckets[0]).not.toEqual(bucket);
|
||||
} else {
|
||||
expect(mainBuckets[i]).toBe(bucket);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,66 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { last } from 'lodash';
|
||||
import { ESClient } from '../../../helpers/setup_request';
|
||||
import { getAnomalyAggs } from './get_anomaly_aggs';
|
||||
import { AvgAnomalyBucket } from './get_anomaly_aggs/transform';
|
||||
|
||||
interface Props {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
buckets: AvgAnomalyBucket[];
|
||||
bucketSize: number;
|
||||
start: number;
|
||||
client: ESClient;
|
||||
}
|
||||
|
||||
export async function getBucketWithInitialAnomalyBounds({
|
||||
serviceName,
|
||||
transactionType,
|
||||
buckets,
|
||||
bucketSize,
|
||||
start,
|
||||
client
|
||||
}: Props) {
|
||||
// abort if first bucket already has values for initial anomaly bounds
|
||||
if (buckets[0].lower || !bucketSize) {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
const newStart = start - bucketSize * 1000;
|
||||
const newEnd = start;
|
||||
|
||||
const aggs = await getAnomalyAggs({
|
||||
serviceName,
|
||||
transactionType,
|
||||
intervalString: `${bucketSize}s`,
|
||||
client,
|
||||
start: newStart,
|
||||
end: newEnd
|
||||
});
|
||||
|
||||
if (!aggs) {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
const firstBucketWithBounds = last(
|
||||
aggs.buckets.filter(bucket => bucket.lower)
|
||||
);
|
||||
|
||||
if (!firstBucketWithBounds) {
|
||||
return buckets;
|
||||
}
|
||||
|
||||
return replaceFirstItem(buckets, firstBucketWithBounds);
|
||||
}
|
||||
|
||||
// copy array and replace first item
|
||||
function replaceFirstItem<T>(array: T[], value: T) {
|
||||
const ret = array.slice(0);
|
||||
ret[0] = value;
|
||||
return ret;
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getAvgResponseTimeAnomalies } from '.';
|
||||
import { firstBucketsResponse } from './mock-responses/firstBucketsResponse';
|
||||
import { mainBucketsResponse } from './mock-responses/mainBucketsResponse';
|
||||
|
||||
describe('get_avg_response_time_anomalies', () => {
|
||||
it('', async () => {
|
||||
const clientSpy = jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce(mainBucketsResponse)
|
||||
.mockResolvedValueOnce(firstBucketsResponse);
|
||||
|
||||
const avgAnomalies = await getAvgResponseTimeAnomalies({
|
||||
serviceName: 'myServiceName',
|
||||
transactionType: 'myTransactionType',
|
||||
setup: {
|
||||
start: 1528113600000,
|
||||
end: 1528977600000,
|
||||
client: clientSpy,
|
||||
config: {
|
||||
get: () => 'myIndex' as any
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
expect(avgAnomalies).toEqual({
|
||||
bucketSizeAsMillis: 10800000,
|
||||
buckets: [
|
||||
{
|
||||
anomalyScore: 0,
|
||||
lower: 17688.182675688193,
|
||||
upper: 50381.01051622894
|
||||
},
|
||||
{ anomalyScore: null, lower: null, upper: null },
|
||||
{
|
||||
anomalyScore: 0,
|
||||
lower: 16034.081569306454,
|
||||
upper: 54158.77731018045
|
||||
},
|
||||
{ anomalyScore: null, lower: null, upper: null },
|
||||
{
|
||||
anomalyScore: 0,
|
||||
lower: 16034.081569306454,
|
||||
upper: 54158.77731018045
|
||||
},
|
||||
{ anomalyScore: null, lower: null, upper: null }
|
||||
]
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getBucketSize } from '../../../helpers/get_bucket_size';
|
||||
import { IOptions } from '../get_timeseries_data';
|
||||
import { getAnomalyAggs } from './get_anomaly_aggs';
|
||||
import { AvgAnomalyBucket } from './get_anomaly_aggs/transform';
|
||||
import { getBucketWithInitialAnomalyBounds } from './get_buckets_with_initial_anomaly_bounds';
|
||||
|
||||
export interface IAvgAnomalies {
|
||||
bucketSizeAsMillis: number;
|
||||
buckets: AvgAnomalyBucket[];
|
||||
}
|
||||
|
||||
export type IAvgAnomaliesResponse = IAvgAnomalies | undefined;
|
||||
|
||||
export async function getAvgResponseTimeAnomalies({
|
||||
serviceName,
|
||||
transactionType,
|
||||
transactionName,
|
||||
setup
|
||||
}: IOptions): Promise<IAvgAnomaliesResponse> {
|
||||
const { start, end, client } = setup;
|
||||
const { intervalString, bucketSize } = getBucketSize(start, end, 'auto');
|
||||
|
||||
// don't fetch anomalies for transaction details page
|
||||
if (transactionName) {
|
||||
return;
|
||||
}
|
||||
|
||||
const aggs = await getAnomalyAggs({
|
||||
serviceName,
|
||||
transactionType,
|
||||
intervalString,
|
||||
client,
|
||||
start,
|
||||
end
|
||||
});
|
||||
|
||||
if (!aggs) {
|
||||
return;
|
||||
}
|
||||
|
||||
const buckets = await getBucketWithInitialAnomalyBounds({
|
||||
serviceName,
|
||||
transactionType,
|
||||
buckets: aggs.buckets.slice(1, -1),
|
||||
bucketSize: aggs.bucketSize,
|
||||
start,
|
||||
client
|
||||
});
|
||||
|
||||
return {
|
||||
buckets,
|
||||
bucketSizeAsMillis: Math.max(bucketSize, aggs.bucketSize) * 1000
|
||||
};
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ESResponse } from '../get_anomaly_aggs/fetcher';
|
||||
|
||||
export const firstBucketsResponse: ESResponse = {
|
||||
took: 22,
|
||||
timed_out: false,
|
||||
_shards: {
|
||||
total: 5,
|
||||
successful: 5,
|
||||
skipped: 0,
|
||||
failed: 0
|
||||
},
|
||||
hits: {
|
||||
total: 2,
|
||||
max_score: 0,
|
||||
hits: []
|
||||
},
|
||||
aggregations: {
|
||||
ml_avg_response_times: {
|
||||
buckets: [
|
||||
{
|
||||
key_as_string: '2018-07-02T09:00:00.000Z',
|
||||
key: 1530522000000,
|
||||
doc_count: 0,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
},
|
||||
upper: {
|
||||
value: null
|
||||
},
|
||||
lower: {
|
||||
value: null
|
||||
}
|
||||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:08:20.000Z',
|
||||
key: 1530522500000,
|
||||
doc_count: 2,
|
||||
anomaly_score: {
|
||||
value: 0
|
||||
},
|
||||
upper: {
|
||||
value: 50381.01051622894
|
||||
},
|
||||
lower: {
|
||||
value: 17688.182675688193
|
||||
}
|
||||
},
|
||||
{
|
||||
key_as_string: '2018-07-02T09:16:40.000Z',
|
||||
key: 1530523000000,
|
||||
doc_count: 0,
|
||||
anomaly_score: {
|
||||
value: null
|
||||
},
|
||||
upper: {
|
||||
value: null
|
||||
},
|
||||
lower: {
|
||||
value: null
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
top_hits: {
|
||||
hits: {
|
||||
total: 2,
|
||||
max_score: 0,
|
||||
hits: [
|
||||
{
|
||||
_index: '.ml-anomalies-shared',
|
||||
_type: 'doc',
|
||||
_id:
|
||||
'opbeans-node-request-high_mean_response_time_model_plot_1530522900000_900_0_29791_0',
|
||||
_score: 0,
|
||||
_source: {
|
||||
bucket_span: 900
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
File diff suppressed because it is too large
Load diff
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
|
||||
import { AggregationSearchResponse } from 'elasticsearch';
|
||||
import { IOptions } from '.';
|
||||
import {
|
||||
SERVICE_NAME,
|
||||
TRANSACTION_DURATION,
|
||||
|
@ -14,6 +13,7 @@ import {
|
|||
TRANSACTION_TYPE
|
||||
} from '../../../../../common/constants';
|
||||
import { getBucketSize } from '../../../helpers/get_bucket_size';
|
||||
import { Setup } from '../../../helpers/setup_request';
|
||||
|
||||
interface ResponseTimeBucket {
|
||||
key_as_string: string;
|
||||
|
@ -31,11 +31,17 @@ interface ResponseTimeBucket {
|
|||
}
|
||||
|
||||
interface TransactionResultBucket {
|
||||
/**
|
||||
* transaction result eg. 2xx
|
||||
*/
|
||||
key: string;
|
||||
doc_count: number;
|
||||
timeseries: {
|
||||
buckets: Array<{
|
||||
key_as_string: string;
|
||||
/**
|
||||
* timestamp in ms
|
||||
*/
|
||||
key: number;
|
||||
doc_count: number;
|
||||
}>;
|
||||
|
@ -63,7 +69,12 @@ export function timeseriesFetcher({
|
|||
transactionType,
|
||||
transactionName,
|
||||
setup
|
||||
}: IOptions): Promise<ESResponse> {
|
||||
}: {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
transactionName?: string;
|
||||
setup: Setup;
|
||||
}): Promise<ESResponse> {
|
||||
const { start, end, esFilterQuery, client, config } = setup;
|
||||
const { intervalString } = getBucketSize(start, end, 'auto');
|
||||
|
||||
|
|
|
@ -6,26 +6,21 @@
|
|||
|
||||
import { getBucketSize } from '../../../helpers/get_bucket_size';
|
||||
import { Setup } from '../../../helpers/setup_request';
|
||||
import { getAvgResponseTimeAnomalies } from '../get_avg_response_time_anomalies';
|
||||
import { timeseriesFetcher } from './fetcher';
|
||||
import { timeseriesTransformer } from './transform';
|
||||
|
||||
export interface IOptions {
|
||||
export async function getApmTimeseriesData(options: {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
transactionName?: string;
|
||||
setup: Setup;
|
||||
}
|
||||
|
||||
export async function getTimeseriesData(options: IOptions) {
|
||||
}) {
|
||||
const { start, end } = options.setup;
|
||||
const { bucketSize } = getBucketSize(start, end, 'auto');
|
||||
|
||||
const avgAnomaliesResponse = await getAvgResponseTimeAnomalies(options);
|
||||
const timeseriesResponse = await timeseriesFetcher(options);
|
||||
return timeseriesTransformer({
|
||||
timeseriesResponse,
|
||||
avgAnomaliesResponse,
|
||||
bucketSize
|
||||
});
|
||||
}
|
||||
|
|
|
@ -7,17 +7,16 @@
|
|||
import { first, last } from 'lodash';
|
||||
import { timeseriesResponse } from './mock-responses/timeseries_response';
|
||||
import {
|
||||
ApmTimeSeriesResponse,
|
||||
getTpmBuckets,
|
||||
TimeSeriesAPIResponse,
|
||||
timeseriesTransformer
|
||||
} from './transform';
|
||||
|
||||
describe('timeseriesTransformer', () => {
|
||||
let res: TimeSeriesAPIResponse;
|
||||
let res: ApmTimeSeriesResponse;
|
||||
beforeEach(async () => {
|
||||
res = await timeseriesTransformer({
|
||||
timeseriesResponse,
|
||||
avgAnomaliesResponse: undefined,
|
||||
bucketSize: 12
|
||||
});
|
||||
});
|
||||
|
@ -27,9 +26,11 @@ describe('timeseriesTransformer', () => {
|
|||
bucket => bucket.key
|
||||
);
|
||||
|
||||
expect(res.dates).not.toContain(first(mockDates));
|
||||
expect(res.dates).not.toContain(last(mockDates));
|
||||
expect(res.tpmBuckets[0].values).toHaveLength(res.dates.length);
|
||||
expect(first(res.responseTimes.avg).x).not.toBe(first(mockDates));
|
||||
expect(last(res.responseTimes.avg).x).not.toBe(last(mockDates));
|
||||
|
||||
expect(first(res.tpmBuckets[0].dataPoints).x).not.toBe(first(mockDates));
|
||||
expect(last(res.tpmBuckets[0].dataPoints).x).not.toBe(last(mockDates));
|
||||
});
|
||||
|
||||
it('should have correct order', () => {
|
||||
|
@ -109,8 +110,8 @@ describe('getTpmBuckets', () => {
|
|||
];
|
||||
const bucketSize = 10;
|
||||
expect(getTpmBuckets(buckets, bucketSize)).toEqual([
|
||||
{ avg: 1500, key: 'HTTP 4xx', values: [1200, 1800] },
|
||||
{ avg: 1800, key: 'HTTP 5xx', values: [3000, 600] }
|
||||
{ dataPoints: [{ x: 1, y: 1200 }, { x: 2, y: 1800 }], key: 'HTTP 4xx' },
|
||||
{ dataPoints: [{ x: 1, y: 3000 }, { x: 2, y: 600 }], key: 'HTTP 5xx' }
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,59 +5,47 @@
|
|||
*/
|
||||
|
||||
import { isNumber, round, sortBy } from 'lodash';
|
||||
import mean from 'lodash.mean';
|
||||
import { oc } from 'ts-optchain';
|
||||
import { IAvgAnomaliesResponse } from '../get_avg_response_time_anomalies';
|
||||
import { Coordinate } from 'x-pack/plugins/apm/typings/timeseries';
|
||||
import { ESResponse } from './fetcher';
|
||||
|
||||
type MaybeNumber = number | null;
|
||||
|
||||
export interface TimeSeriesAPIResponse {
|
||||
export interface ApmTimeSeriesResponse {
|
||||
totalHits: number;
|
||||
dates: number[];
|
||||
responseTimes: {
|
||||
avg: MaybeNumber[];
|
||||
p95: MaybeNumber[];
|
||||
p99: MaybeNumber[];
|
||||
avgAnomalies?: IAvgAnomaliesResponse;
|
||||
avg: Coordinate[];
|
||||
p95: Coordinate[];
|
||||
p99: Coordinate[];
|
||||
};
|
||||
tpmBuckets: Array<{
|
||||
key: string;
|
||||
avg: number;
|
||||
values: number[];
|
||||
dataPoints: Coordinate[];
|
||||
}>;
|
||||
overallAvgDuration?: number;
|
||||
}
|
||||
|
||||
export function timeseriesTransformer({
|
||||
timeseriesResponse,
|
||||
avgAnomaliesResponse,
|
||||
bucketSize
|
||||
}: {
|
||||
timeseriesResponse: ESResponse;
|
||||
avgAnomaliesResponse: IAvgAnomaliesResponse;
|
||||
bucketSize: number;
|
||||
}): TimeSeriesAPIResponse {
|
||||
}): ApmTimeSeriesResponse {
|
||||
const aggs = timeseriesResponse.aggregations;
|
||||
const overallAvgDuration = oc(aggs).overall_avg_duration.value();
|
||||
|
||||
const responseTimeBuckets = oc(aggs)
|
||||
.response_times.buckets([])
|
||||
.slice(1, -1);
|
||||
const dates = responseTimeBuckets.map(bucket => bucket.key);
|
||||
const { avg, p95, p99 } = getResponseTime(responseTimeBuckets);
|
||||
|
||||
const transactionResultBuckets = oc(aggs).transaction_results.buckets([]);
|
||||
const tpmBuckets = getTpmBuckets(transactionResultBuckets, bucketSize);
|
||||
|
||||
return {
|
||||
totalHits: timeseriesResponse.hits.total,
|
||||
dates,
|
||||
responseTimes: {
|
||||
avg,
|
||||
p95,
|
||||
p99,
|
||||
avgAnomalies: avgAnomaliesResponse
|
||||
p99
|
||||
},
|
||||
tpmBuckets,
|
||||
overallAvgDuration
|
||||
|
@ -69,15 +57,14 @@ export function getTpmBuckets(
|
|||
bucketSize: number
|
||||
) {
|
||||
const buckets = transactionResultBuckets.map(({ key, timeseries }) => {
|
||||
const tpmValues = timeseries.buckets
|
||||
.slice(1, -1)
|
||||
.map(bucket => round(bucket.doc_count * (60 / bucketSize), 1));
|
||||
const dataPoints = timeseries.buckets.slice(1, -1).map(bucket => {
|
||||
return {
|
||||
x: bucket.key,
|
||||
y: round(bucket.doc_count * (60 / bucketSize), 1)
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
key,
|
||||
avg: mean(tpmValues),
|
||||
values: tpmValues
|
||||
};
|
||||
return { key, dataPoints };
|
||||
});
|
||||
|
||||
return sortBy(
|
||||
|
@ -93,15 +80,15 @@ function getResponseTime(
|
|||
(acc, bucket) => {
|
||||
const { '95.0': p95, '99.0': p99 } = bucket.pct.values;
|
||||
|
||||
acc.avg.push(bucket.avg.value);
|
||||
acc.p95.push(isNumber(p95) ? p95 : null);
|
||||
acc.p99.push(isNumber(p99) ? p99 : null);
|
||||
acc.avg.push({ x: bucket.key, y: bucket.avg.value });
|
||||
acc.p95.push({ x: bucket.key, y: isNumber(p95) ? p95 : null });
|
||||
acc.p99.push({ x: bucket.key, y: isNumber(p99) ? p99 : null });
|
||||
return acc;
|
||||
},
|
||||
{
|
||||
avg: [] as MaybeNumber[],
|
||||
p95: [] as MaybeNumber[],
|
||||
p99: [] as MaybeNumber[]
|
||||
avg: [] as Coordinate[],
|
||||
p95: [] as Coordinate[],
|
||||
p99: [] as Coordinate[]
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
38
x-pack/plugins/apm/server/lib/transactions/charts/index.ts
Normal file
38
x-pack/plugins/apm/server/lib/transactions/charts/index.ts
Normal file
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Setup } from '../../helpers/setup_request';
|
||||
import { getAnomalySeries } from './get_anomaly_data';
|
||||
import { AnomalyTimeSeriesResponse } from './get_anomaly_data/transform';
|
||||
import { getApmTimeseriesData } from './get_timeseries_data';
|
||||
import { ApmTimeSeriesResponse } from './get_timeseries_data/transform';
|
||||
|
||||
export interface TimeSeriesAPIResponse {
|
||||
apmTimeseries: ApmTimeSeriesResponse;
|
||||
anomalyTimeseries?: AnomalyTimeSeriesResponse;
|
||||
}
|
||||
|
||||
function getDates(apmTimeseries: ApmTimeSeriesResponse) {
|
||||
return apmTimeseries.responseTimes.avg.map(p => p.x);
|
||||
}
|
||||
|
||||
export async function getChartsData(options: {
|
||||
serviceName: string;
|
||||
transactionType: string;
|
||||
transactionName?: string;
|
||||
setup: Setup;
|
||||
}): Promise<TimeSeriesAPIResponse> {
|
||||
const apmTimeseries = await getApmTimeseriesData(options);
|
||||
const anomalyTimeseries = await getAnomalySeries({
|
||||
...options,
|
||||
timeSeriesDates: getDates(apmTimeseries)
|
||||
});
|
||||
|
||||
return {
|
||||
apmTimeseries,
|
||||
anomalyTimeseries
|
||||
};
|
||||
}
|
|
@ -9,7 +9,7 @@ import { Server } from 'hapi';
|
|||
import Joi from 'joi';
|
||||
import { withDefaultValidators } from '../lib/helpers/input_validation';
|
||||
import { setupRequest } from '../lib/helpers/setup_request';
|
||||
import { getTimeseriesData } from '../lib/transactions/charts/get_timeseries_data';
|
||||
import { getChartsData } from '../lib/transactions/charts';
|
||||
import { getDistribution } from '../lib/transactions/distribution';
|
||||
import { getTopTransactions } from '../lib/transactions/get_top_transactions';
|
||||
import { getTransaction } from '../lib/transactions/get_transaction';
|
||||
|
@ -111,7 +111,7 @@ export function initTransactionsApi(server: Server) {
|
|||
transaction_name: string;
|
||||
};
|
||||
|
||||
return getTimeseriesData({
|
||||
return getChartsData({
|
||||
serviceName,
|
||||
transactionType,
|
||||
transactionName,
|
||||
|
|
2
x-pack/plugins/apm/typings/lodash.mean.d.ts
vendored
2
x-pack/plugins/apm/typings/lodash.mean.d.ts
vendored
|
@ -5,6 +5,6 @@
|
|||
*/
|
||||
|
||||
declare module 'lodash.mean' {
|
||||
function mean(numbers: number[]): number;
|
||||
function mean(numbers: Array<number | null | undefined>): number;
|
||||
export = mean;
|
||||
}
|
||||
|
|
15
x-pack/plugins/apm/typings/timeseries.ts
Normal file
15
x-pack/plugins/apm/typings/timeseries.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export interface Coordinate {
|
||||
x: number;
|
||||
y: number | null;
|
||||
}
|
||||
|
||||
export interface RectCoordinate {
|
||||
x: number;
|
||||
x0: number;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue