mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 18:51:07 -04:00
Fix problem with validation (#224961)
## Summary Recently, an alarm was fired in the Kibana Serverless Slack Channel due a problem with Data Usage. Issue explanation: > Kibana's data_usage plugin allows collecting misc stats about Kibana usage. > The browser side performs requests to /internal/api/data_usage/*, providing stats related to the user interaction with the UI. > Recently, in an internal customer project, on production, one of these requests (POST /internal/api/data_usage/metrics) contained a payload that was deemed invalid by the server-side validation logic. > > The handler on that endpoint logged an error message. > This error message was spotted by a Rule. > Consequently, the rule fired an alert in our Slack channel. > > We shouldn't have invalid payloads coming from browser side, so unless someone intentionally tampered with the HTTP request, this indicates a bug in our browser-side logic. > Customer Impact: This was an isolated error on an internal project, but other folks within Elastic have spotted the same error message in their projects. Due to that error, we might be missing a few data_usage metrics. The issue was caused because the plugin validation was waiting for an array for the data property, but the payload from the API was returning `null`. The validation was incorrect in the Kibana side since only [name](https://github.com/elastic/autoops-services/blob/master/monitoring/service/specs/serverless_project_metrics_api.yaml#L189) is mandatory. --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
e566fec14b
commit
ff10d70e0f
3 changed files with 114 additions and 5 deletions
|
@ -123,7 +123,9 @@ export const UsageMetricsAutoOpsResponseSchema = {
|
|||
schema.object({
|
||||
name: schema.string(),
|
||||
error: schema.nullable(schema.string()),
|
||||
data: schema.arrayOf(schema.arrayOf(schema.number(), { minSize: 2, maxSize: 2 })),
|
||||
data: schema.nullable(
|
||||
schema.arrayOf(schema.arrayOf(schema.number(), { minSize: 2, maxSize: 2 }))
|
||||
),
|
||||
})
|
||||
)
|
||||
),
|
||||
|
|
|
@ -190,6 +190,111 @@ describe('registerUsageMetricsRoute', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('when metric type data is null', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(DataUsageService.prototype, 'getMetrics').mockResolvedValue({
|
||||
ingest_rate: [
|
||||
{
|
||||
name: '.ds-1',
|
||||
error: null,
|
||||
data: null,
|
||||
},
|
||||
{
|
||||
name: '.ds-2',
|
||||
error: null,
|
||||
data: [
|
||||
[1726858530000, 12894623],
|
||||
[1726862130000, 14436905],
|
||||
],
|
||||
},
|
||||
],
|
||||
storage_retained: [
|
||||
{
|
||||
name: '.ds-1',
|
||||
error: null,
|
||||
data: [
|
||||
[1726858530000, 12576413],
|
||||
[1726862130000, 13956423],
|
||||
],
|
||||
},
|
||||
{
|
||||
name: '.ds-2',
|
||||
error: null,
|
||||
data: null,
|
||||
},
|
||||
],
|
||||
search_vcu: [],
|
||||
ingest_vcu: [],
|
||||
ml_vcu: [],
|
||||
index_latency: [],
|
||||
index_rate: [],
|
||||
search_latency: [],
|
||||
search_rate: [],
|
||||
});
|
||||
});
|
||||
it('should correctly transform response when metric type data is null', async () => {
|
||||
(await context.core).elasticsearch.client.asCurrentUser.indices.getDataStream = jest
|
||||
.fn()
|
||||
.mockResolvedValue({
|
||||
data_streams: [{ name: '.ds-1' }, { name: '.ds-2' }],
|
||||
});
|
||||
|
||||
registerUsageMetricsRoute(router, mockedDataUsageContext);
|
||||
|
||||
const mockRequest = httpServerMock.createKibanaRequest({
|
||||
body: {
|
||||
from: utcTimeRange.start,
|
||||
to: utcTimeRange.end,
|
||||
metricTypes: ['ingest_rate', 'storage_retained'],
|
||||
dataStreams: ['.ds-1', '.ds-2'],
|
||||
},
|
||||
});
|
||||
const mockResponse = httpServerMock.createResponseFactory();
|
||||
const mockRouter = mockCore.http.createRouter.mock.results[0].value;
|
||||
const [[, handler]] = mockRouter.versioned.post.mock.results[0].value.addVersion.mock.calls;
|
||||
await handler(context, mockRequest, mockResponse);
|
||||
|
||||
expect(mockResponse.ok).toHaveBeenCalledTimes(1);
|
||||
expect(mockResponse.ok.mock.calls[0][0]).toEqual({
|
||||
body: {
|
||||
ingest_rate: [
|
||||
{
|
||||
name: '.ds-1',
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
name: '.ds-2',
|
||||
data: [
|
||||
{ x: 1726858530000, y: 12894623 },
|
||||
{ x: 1726862130000, y: 14436905 },
|
||||
],
|
||||
},
|
||||
],
|
||||
storage_retained: [
|
||||
{
|
||||
name: '.ds-1',
|
||||
data: [
|
||||
{ x: 1726858530000, y: 12576413 },
|
||||
{ x: 1726862130000, y: 13956423 },
|
||||
],
|
||||
},
|
||||
{
|
||||
name: '.ds-2',
|
||||
data: [],
|
||||
},
|
||||
],
|
||||
search_vcu: [],
|
||||
ingest_vcu: [],
|
||||
ml_vcu: [],
|
||||
index_latency: [],
|
||||
index_rate: [],
|
||||
search_latency: [],
|
||||
search_rate: [],
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// TODO: fix this test
|
||||
it.skip('should throw error if error on requesting auto ops service', async () => {
|
||||
(await context.core).elasticsearch.client.asCurrentUser.indices.getDataStream = jest
|
||||
|
|
|
@ -112,10 +112,12 @@ export function transformMetricsData(
|
|||
metricType,
|
||||
series.map((metricSeries) => ({
|
||||
name: metricSeries.name,
|
||||
data: (metricSeries.data as Array<[number, number]>).map(([timestamp, value]) => ({
|
||||
x: timestamp,
|
||||
y: value,
|
||||
})),
|
||||
data: Array.isArray(metricSeries.data)
|
||||
? (metricSeries.data as Array<[number, number]>).map(([timestamp, value]) => ({
|
||||
x: timestamp,
|
||||
y: value,
|
||||
}))
|
||||
: [],
|
||||
})),
|
||||
])
|
||||
) as UsageMetricsResponseSchemaBody;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue