mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Custom threshold] Improve the custom threshold rule request/response logging (#192443)
Related to #187271 ## Summary As mentioned in this [PR](https://github.com/elastic/kibana/pull/187225), when logging a JSON stringified string for debugging or tracing, we need to wrap it in a function. This PR does this for the custom threshold rule and removes extra request/response logging, relying on alerting framework logging for that purpose.  Before, we were able to have a similar log using `plugins.observability` and `plugins.alerting.observability.rules.custom_threshold` configs (as shown above), but now we only have `plugins.alerting.observability.rules.custom_threshold` which can be enabled by either of the following configs: ``` logging: loggers: - name: plugins.alerting level: debug - name: plugins.alerting.observability.rules.custom_threshold level: trace ``` Thanks @dgieselaar for bringing this to our attention! ### How to test - Enable the trace logging config as mentioned above - Create a custom threshold rule and check the server logs; you should be able to see the logs for the request and response of the rule execution.
This commit is contained in:
parent
240c4ff15b
commit
4d5c25305b
4 changed files with 29 additions and 9 deletions
|
@ -75,6 +75,9 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {} and 5000ms requestTimeout`
|
||||
);
|
||||
expect(loggingSystemMock.collect(logger).trace[0][0]).toEqual(
|
||||
`result of executing query for rule .test-rule-type:abcdefg in space my-space: {\"body\":{},\"statusCode\":200,\"headers\":{\"x-elastic-product\":\"Elasticsearch\"},\"warnings\":[],\"meta\":{}}`
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
@ -101,6 +104,9 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {} and 5000ms requestTimeout`
|
||||
);
|
||||
expect(loggingSystemMock.collect(logger).trace[0][0]).toEqual(
|
||||
`result of executing query for rule .test-rule-type:abcdefg in space my-space: {\"body\":{},\"statusCode\":200,\"headers\":{\"x-elastic-product\":\"Elasticsearch\"},\"warnings\":[],\"meta\":{}}`
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
@ -132,6 +138,9 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {\"ignore\":[404],\"requestTimeout\":10000} and 5000ms requestTimeout`
|
||||
);
|
||||
expect(loggingSystemMock.collect(logger).trace[0][0]).toEqual(
|
||||
`result of executing query for rule .test-rule-type:abcdefg in space my-space: {\"body\":{},\"statusCode\":200,\"headers\":{\"x-elastic-product\":\"Elasticsearch\"},\"warnings\":[],\"meta\":{}}`
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
@ -154,6 +163,7 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
|
||||
);
|
||||
expect(logger.trace).not.toHaveBeenCalled();
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
|
||||
);
|
||||
|
@ -187,6 +197,9 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
|
||||
);
|
||||
expect(loggingSystemMock.collect(logger).trace[0][0]).toEqual(
|
||||
`result of executing query for rule .test-rule-type:abcdefg in space my-space: {}`
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
@ -219,6 +232,9 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
|
||||
);
|
||||
expect(loggingSystemMock.collect(logger).trace[0][0]).toEqual(
|
||||
`result of executing query for rule .test-rule-type:abcdefg in space my-space: {\"took\":333}`
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
|
@ -244,6 +260,7 @@ describe('wrapScopedClusterClient', () => {
|
|||
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
|
||||
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
|
||||
);
|
||||
expect(logger.trace).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -359,17 +359,22 @@ function getWrappedSearchFn(opts: WrapEsClientOpts) {
|
|||
const end = Date.now();
|
||||
const durationMs = end - start;
|
||||
|
||||
let took = 0;
|
||||
let body: SearchResponse<TDocument, TAggregations>;
|
||||
if (searchOptions.meta) {
|
||||
// when meta: true, response is TransportResult<SearchResponse<TDocument, TAggregations>, unknown>
|
||||
took = (result as TransportResult<SearchResponse<TDocument, TAggregations>, unknown>).body
|
||||
.took;
|
||||
body = (result as TransportResult<SearchResponse<TDocument, TAggregations>, unknown>).body;
|
||||
} else {
|
||||
// when meta: false, response is SearchResponse<TDocument, TAggregations>
|
||||
took = (result as SearchResponse<TDocument, TAggregations>).took;
|
||||
body = result as SearchResponse<TDocument, TAggregations>;
|
||||
}
|
||||
|
||||
opts.logMetricsFn({ esSearchDuration: took ?? 0, totalSearchDuration: durationMs });
|
||||
opts.logMetricsFn({ esSearchDuration: body?.took ?? 0, totalSearchDuration: durationMs });
|
||||
opts.logger.trace(
|
||||
() =>
|
||||
`result of executing query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
|
||||
opts.rule.spaceId
|
||||
}: ${JSON.stringify(body)}`
|
||||
);
|
||||
return result;
|
||||
} catch (e) {
|
||||
if (opts.abortController.signal.aborted) {
|
||||
|
|
|
@ -69,9 +69,9 @@ export const checkMissingGroups = async (
|
|||
];
|
||||
});
|
||||
|
||||
logger.trace(`Request: ${JSON.stringify({ searches })}`);
|
||||
logger.trace(() => `Request: ${JSON.stringify({ searches })}`);
|
||||
const response = await esClient.msearch({ searches });
|
||||
logger.trace(`Response: ${JSON.stringify(response)}`);
|
||||
logger.trace(() => `Response: ${JSON.stringify(response)}`);
|
||||
|
||||
const verifiedMissingGroups = response.responses
|
||||
.map((resp, index) => {
|
||||
|
|
|
@ -215,10 +215,8 @@ export const getData = async (
|
|||
fieldsExisted
|
||||
),
|
||||
};
|
||||
logger.trace(`Request: ${JSON.stringify(request)}`);
|
||||
const body = await esClient.search<undefined, ResponseAggregations>(request);
|
||||
const { aggregations, _shards } = body;
|
||||
logger.trace(`Response: ${JSON.stringify(body)}`);
|
||||
if (aggregations) {
|
||||
return handleResponse(aggregations, previousResults, _shards.successful);
|
||||
} else if (_shards.successful) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue