[Kibana logging system] Add conditional evaluation based on level for logging APIs (#187225)

## Summary

*(Yeah, the title is pretty bad I apologize, I couldn't find something
sexy. OTOH, "sexy" and "logging" are usually antonyms, like "sport car"
and "fiat panda", or "server language" and "javascript")*

### 1. Provide a more developer-friendly alternative to
`Logger.isLevelEnabled`.


**With `isLevelEnabled`**
```ts
if(logger.isLevelEnabled('info')) {
  const message = someExpensiveMessageProbablyBasedOnJsonStringifyOrSomething(); 
  logger.info(message);
}
```

**With this PR:**
```ts
logger.info(() => someExpensiveMessageProbablyBasedOnJsonStringifyOrSomething());
``` 

### 2. Adapt calls to `log.debug` (arguably) costly to use this syntax

Aka any call relying on `JSON.stringify` or function calls.

I used the new syntax for those, except when the tests were too
complicated to fix or when the code did not allow it (e.g. untyped let
variables infered from return from assignations don't play well with
closures)
This commit is contained in:
Pierre Gayvallet 2024-07-08 15:53:02 +02:00 committed by GitHub
parent a711efa1b2
commit b6fcfac9c1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
116 changed files with 812 additions and 483 deletions

View file

@ -130,7 +130,7 @@ export class FullStoryShipper implements IShipper {
)
.subscribe((pageVars) => {
this.initContext.logger.debug(
`Calling FS.setVars with context ${JSON.stringify(pageVars)}`
() => `Calling FS.setVars with context ${JSON.stringify(pageVars)}`
);
this.fullStoryApi.setVars('page', {
...formatPayload(pageVars),
@ -145,7 +145,7 @@ export class FullStoryShipper implements IShipper {
* @param newContext The full new context to set {@link EventContext}
*/
public extendContext(newContext: EventContext): void {
this.initContext.logger.debug(`Received context ${JSON.stringify(newContext)}`);
this.initContext.logger.debug(() => `Received context ${JSON.stringify(newContext)}`);
// FullStory requires different APIs for different type of contexts:
// User-level context.
@ -226,7 +226,9 @@ export class FullStoryShipper implements IShipper {
cloudIsElasticStaffOwned,
cloudTrialEndDate,
};
this.initContext.logger.debug(`Calling FS.setUserVars with ${JSON.stringify(userVars)}`);
this.initContext.logger.debug(
() => `Calling FS.setUserVars with ${JSON.stringify(userVars)}`
);
this.fullStoryApi.setUserVars(formatPayload(userVars));
}
}

View file

@ -186,7 +186,7 @@ describe('AbstractLogger', () => {
});
describe('log level', () => {
it('does not calls appenders for records with unsupported levels', () => {
it('does not call appender for records with unsupported levels', () => {
logger = new TestLogger(context, LogLevel.Warn, appenderMocks, factory);
logger.trace('some trace message');
@ -215,19 +215,49 @@ describe('AbstractLogger', () => {
);
}
});
it('does not call appender for records with unsupported levels for closure syntax', () => {
logger = new TestLogger(context, LogLevel.Warn, appenderMocks, factory);
logger.trace(() => 'some trace message');
logger.debug(() => 'some debug message');
logger.info(() => 'some info message');
logger.warn(() => 'some warn message');
logger.error(() => 'some error message');
logger.fatal(() => 'some fatal message');
for (const appenderMock of appenderMocks) {
expect(appenderMock.append).toHaveBeenCalledTimes(3);
expect(appenderMock.append).toHaveBeenCalledWith(
expect.objectContaining({
level: LogLevel.Warn,
})
);
expect(appenderMock.append).toHaveBeenCalledWith(
expect.objectContaining({
level: LogLevel.Error,
})
);
expect(appenderMock.append).toHaveBeenCalledWith(
expect.objectContaining({
level: LogLevel.Fatal,
})
);
}
});
});
describe('isLevelEnabled', () => {
const orderedLogLevels = [
LogLevel.Fatal,
LogLevel.Error,
LogLevel.Warn,
LogLevel.Info,
LogLevel.Debug,
LogLevel.Trace,
LogLevel.All,
];
const orderedLogLevels = [
LogLevel.Fatal,
LogLevel.Error,
LogLevel.Warn,
LogLevel.Info,
LogLevel.Debug,
LogLevel.Trace,
LogLevel.All,
];
describe('isLevelEnabled', () => {
for (const logLevel of orderedLogLevels) {
it(`returns the correct value for a '${logLevel.id}' level logger`, () => {
const levelLogger = new TestLogger(context, logLevel, appenderMocks, factory);
@ -238,4 +268,22 @@ describe('AbstractLogger', () => {
});
}
});
describe('closure syntax', () => {
for (const logLevel of orderedLogLevels) {
it(`evaluates the log function for '${logLevel.id}' level if enabled`, () => {
logger = new TestLogger(context, LogLevel.All, appenderMocks, factory);
const logFn = jest.fn(() => 'some message');
logger.trace(logFn);
expect(logFn).toHaveBeenCalledTimes(1);
});
it(`does not evaluate the log function for '${logLevel.id}' level if not enabled`, () => {
logger = new TestLogger(context, LogLevel.Off, appenderMocks, factory);
const logFn = jest.fn(() => 'some message');
logger.trace(logFn);
expect(logFn).not.toHaveBeenCalled();
});
}
});
});

View file

@ -13,6 +13,7 @@ import {
LoggerFactory,
LogMeta,
Logger,
LogMessageSource,
LogLevelId,
} from '@kbn/logging';
@ -43,28 +44,73 @@ export abstract class AbstractLogger implements Logger {
meta?: Meta
): LogRecord;
public trace<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta): void {
this.log(this.createLogRecord<Meta>(LogLevel.Trace, message, meta));
public trace<Meta extends LogMeta = LogMeta>(message: LogMessageSource, meta?: Meta): void {
if (!this.level.supports(LogLevel.Trace)) {
return;
}
if (typeof message === 'function') {
message = message();
}
this.performLog(this.createLogRecord<Meta>(LogLevel.Trace, message, meta));
}
public debug<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta): void {
this.log(this.createLogRecord<Meta>(LogLevel.Debug, message, meta));
public debug<Meta extends LogMeta = LogMeta>(message: LogMessageSource, meta?: Meta): void {
if (!this.level.supports(LogLevel.Debug)) {
return;
}
if (typeof message === 'function') {
message = message();
}
this.performLog(this.createLogRecord<Meta>(LogLevel.Debug, message, meta));
}
public info<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta): void {
this.log(this.createLogRecord<Meta>(LogLevel.Info, message, meta));
public info<Meta extends LogMeta = LogMeta>(message: LogMessageSource, meta?: Meta): void {
if (!this.level.supports(LogLevel.Info)) {
return;
}
if (typeof message === 'function') {
message = message();
}
this.performLog(this.createLogRecord<Meta>(LogLevel.Info, message, meta));
}
public warn<Meta extends LogMeta = LogMeta>(errorOrMessage: string | Error, meta?: Meta): void {
this.log(this.createLogRecord<Meta>(LogLevel.Warn, errorOrMessage, meta));
public warn<Meta extends LogMeta = LogMeta>(
errorOrMessage: LogMessageSource | Error,
meta?: Meta
): void {
if (!this.level.supports(LogLevel.Warn)) {
return;
}
if (typeof errorOrMessage === 'function') {
errorOrMessage = errorOrMessage();
}
this.performLog(this.createLogRecord<Meta>(LogLevel.Warn, errorOrMessage, meta));
}
public error<Meta extends LogMeta = LogMeta>(errorOrMessage: string | Error, meta?: Meta): void {
this.log(this.createLogRecord<Meta>(LogLevel.Error, errorOrMessage, meta));
public error<Meta extends LogMeta = LogMeta>(
errorOrMessage: LogMessageSource | Error,
meta?: Meta
): void {
if (!this.level.supports(LogLevel.Error)) {
return;
}
if (typeof errorOrMessage === 'function') {
errorOrMessage = errorOrMessage();
}
this.performLog(this.createLogRecord<Meta>(LogLevel.Error, errorOrMessage, meta));
}
public fatal<Meta extends LogMeta = LogMeta>(errorOrMessage: string | Error, meta?: Meta): void {
this.log(this.createLogRecord<Meta>(LogLevel.Fatal, errorOrMessage, meta));
public fatal<Meta extends LogMeta = LogMeta>(
errorOrMessage: LogMessageSource | Error,
meta?: Meta
): void {
if (!this.level.supports(LogLevel.Fatal)) {
return;
}
if (typeof errorOrMessage === 'function') {
errorOrMessage = errorOrMessage();
}
this.performLog(this.createLogRecord<Meta>(LogLevel.Fatal, errorOrMessage, meta));
}
public isLevelEnabled(levelId: LogLevelId): boolean {
@ -75,12 +121,16 @@ export abstract class AbstractLogger implements Logger {
if (!this.level.supports(record.level)) {
return;
}
for (const appender of this.appenders) {
appender.append(record);
}
this.performLog(record);
}
public get(...childContextPaths: string[]): Logger {
return this.factory.get(...[this.context, ...childContextPaths]);
}
private performLog(record: LogRecord) {
for (const appender of this.appenders) {
appender.append(record);
}
}
}

View file

@ -10,8 +10,12 @@ import { Logger } from '@kbn/logging';
import { Log } from './log';
export const convertToLogger = (cliLog: Log): Logger => {
const getErrorMessage = (msgOrError: string | Error): string => {
return typeof msgOrError === 'string' ? msgOrError : msgOrError.message;
const getErrorMessage = (msgOrError: string | (() => string) | Error): string => {
return typeof msgOrError === 'function'
? msgOrError()
: typeof msgOrError === 'string'
? msgOrError
: msgOrError.message;
};
const adapter: Logger = {

View file

@ -57,7 +57,7 @@ export class StatusHandler {
const body = await this.poll();
const code = StatusHandler.STATUS_CODE[body.status];
this.logger.debug(`Returning ${code} response with body: ${JSON.stringify(body)}`);
this.logger.debug(() => `Returning ${code} response with body: ${JSON.stringify(body)}`);
return toolkit.response(body).type('application/json').code(code);
}

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
import type { Logger } from '@kbn/logging';
import type { Logger, LogMeta, LogMessageSource } from '@kbn/logging';
export type MockedLogger = jest.Mocked<Logger> & { context: string[] };
@ -43,15 +43,39 @@ const clearLoggerMock = (logger: MockedLogger) => {
logger.log.mockClear();
};
const convertMessageSource = (
value: [message: LogMessageSource, meta?: LogMeta | undefined]
): [string] | [string, LogMeta | undefined] => {
const message = typeof value[0] === 'function' ? value[0]() : value[0];
const meta = value[1];
if (meta) {
return [message, meta];
} else {
return [message];
}
};
const convertMessageSourceOrError = (
value: [message: LogMessageSource | Error, meta?: LogMeta | undefined]
): [string | Error] | [string | Error, LogMeta | undefined] => {
const message = typeof value[0] === 'function' ? value[0]() : value[0];
const meta = value[1];
if (meta) {
return [message, meta];
} else {
return [message];
}
};
const collectLoggerMock = (logger: MockedLogger) => {
return {
debug: logger.debug.mock.calls,
error: logger.error.mock.calls,
fatal: logger.fatal.mock.calls,
info: logger.info.mock.calls,
debug: logger.debug.mock.calls.map(convertMessageSource),
error: logger.error.mock.calls.map(convertMessageSourceOrError),
fatal: logger.fatal.mock.calls.map(convertMessageSourceOrError),
info: logger.info.mock.calls.map(convertMessageSource),
log: logger.log.mock.calls,
trace: logger.trace.mock.calls,
warn: logger.warn.mock.calls,
trace: logger.trace.mock.calls.map(convertMessageSource),
warn: logger.warn.mock.calls.map(convertMessageSourceOrError),
};
};

View file

@ -9,7 +9,7 @@
export type { LogLevelId } from './src/log_level';
export { LogLevel } from './src/log_level';
export type { LogRecord } from './src/log_record';
export type { Logger } from './src/logger';
export type { Logger, LogMessageSource } from './src/logger';
export type { LogMeta } from './src/log_meta';
export type { LoggerFactory } from './src/logger_factory';
export type { Layout } from './src/layout';

View file

@ -10,6 +10,11 @@ import type { LogMeta } from './log_meta';
import type { LogRecord } from './log_record';
import type { LogLevelId } from './log_level';
/**
* @public
*/
export type LogMessageSource = string | (() => string);
/**
* Logger exposes all the necessary methods to log any type of information and
* this is the interface used by the logging consumers including plugins.
@ -20,47 +25,74 @@ export interface Logger {
/**
* Log messages at the most detailed log level
*
* @param message - The log message
* @param meta -
* @param message - The log message, or a function returning the log message
* @param meta - The ECS meta to attach to the log entry
*
* @remark If a function is provided for the message, it will only be evaluated if the logger's level is high enough for this level.
* This can be used as an alternative to {@link Logger.isLevelEnabled} to wrap expensive logging operations into a conditional blocks.
*/
trace<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta): void;
trace<Meta extends LogMeta = LogMeta>(message: LogMessageSource, meta?: Meta): void;
/**
* Log messages useful for debugging and interactive investigation
* @param message - The log message
* @param meta -
*
* @param message - The log message, or a function returning the log message
* @param meta - The ECS meta to attach to the log entry
*
* @remark If a function is provided for the message, it will only be evaluated if the logger's level is high enough for this level.
* This can be used as an alternative to {@link Logger.isLevelEnabled} to wrap expensive logging operations into a conditional blocks.
*/
debug<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta): void;
debug<Meta extends LogMeta = LogMeta>(message: LogMessageSource, meta?: Meta): void;
/**
* Logs messages related to general application flow
* @param message - The log message
* @param meta -
*
* @param message - The log message, or a function returning the log message
* @param meta - The ECS meta to attach to the log entry
*
* @remark If a function is provided for the message, it will only be evaluated if the logger's level is high enough for this level.
* This can be used as an alternative to {@link Logger.isLevelEnabled} to wrap expensive logging operations into a conditional blocks.
*/
info<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta): void;
info<Meta extends LogMeta = LogMeta>(message: LogMessageSource, meta?: Meta): void;
/**
* Logs abnormal or unexpected errors or messages
* @param errorOrMessage - An Error object or message string to log
* @param meta -
*
* @param errorOrMessage - An Error object, message string, or function returning the log message
* @param meta - The ECS meta to attach to the log entry
*
* @remark If a function is provided for the message, it will only be evaluated if the logger's level is high enough for this level.
* This can be used as an alternative to {@link Logger.isLevelEnabled} to wrap expensive logging operations into a conditional blocks.
*/
warn<Meta extends LogMeta = LogMeta>(errorOrMessage: string | Error, meta?: Meta): void;
warn<Meta extends LogMeta = LogMeta>(errorOrMessage: LogMessageSource | Error, meta?: Meta): void;
/**
* Logs abnormal or unexpected errors or messages that caused a failure in the application flow
*
* @param errorOrMessage - An Error object or message string to log
* @param meta -
* @param errorOrMessage - An Error object, message string, or function returning the log message
* @param meta - The ECS meta to attach to the log entry
*
* @remark If a function is provided for the message, it will only be evaluated if the logger's level is high enough for this level.
* This can be used as an alternative to {@link Logger.isLevelEnabled} to wrap expensive logging operations into a conditional blocks.
*/
error<Meta extends LogMeta = LogMeta>(errorOrMessage: string | Error, meta?: Meta): void;
error<Meta extends LogMeta = LogMeta>(
errorOrMessage: LogMessageSource | Error,
meta?: Meta
): void;
/**
* Logs abnormal or unexpected errors or messages that caused an unrecoverable failure
*
* @param errorOrMessage - An Error object or message string to log
* @param meta -
* @param errorOrMessage - An Error object, message string, or function returning the log message
* @param meta - The ECS meta to attach to the log entry
*
* @remark If a function is provided for the message, it will only be evaluated if the logger's level is high enough for this level.
* This can be used as an alternative to {@link Logger.isLevelEnabled} to wrap expensive logging operations into a conditional blocks.
*/
fatal<Meta extends LogMeta = LogMeta>(errorOrMessage: string | Error, meta?: Meta): void;
fatal<Meta extends LogMeta = LogMeta>(
errorOrMessage: LogMessageSource | Error,
meta?: Meta
): void;
/** @internal */
log(record: LogRecord): void;

View file

@ -44,7 +44,7 @@ export const eqlSearchStrategyProvider = (
},
search: ({ id, ...request }, options: IAsyncSearchOptions, { esClient, uiSettingsClient }) => {
logger.debug(`_eql/search ${JSON.stringify(request.params) || id}`);
logger.debug(() => `_eql/search ${JSON.stringify(request.params) || id}`);
const client = esClient.asCurrentUser.eql;

View file

@ -204,7 +204,7 @@ export const enhancedEsSearchStrategyProvider = (
* @throws `KbnSearchError`
*/
search: (request, options: IAsyncSearchOptions, deps) => {
logger.debug(`search ${JSON.stringify(request.params) || request.id}`);
logger.debug(() => `search ${JSON.stringify(request.params) || request.id}`);
if (request.indexType === DataViewType.ROLLUP && deps.rollupsEnabled) {
return from(rollupSearch(request, options, deps));

View file

@ -134,7 +134,7 @@ export const esqlAsyncSearchStrategyProvider = (
* @throws `KbnSearchError`
*/
search: (request, options: IAsyncSearchOptions, deps) => {
logger.debug(`search ${JSON.stringify(request) || request.id}`);
logger.debug(() => `search ${JSON.stringify(request) || request.id}`);
return asyncSearch(request, options, deps);
},

View file

@ -120,7 +120,7 @@ export const sqlSearchStrategyProvider = (
* @throws `KbnSearchError`
*/
search: (request, options: IAsyncSearchOptions, deps) => {
logger.debug(`sql search: search request=${JSON.stringify(request)}`);
logger.debug(() => `sql search: search request=${JSON.stringify(request)}`);
return asyncSearch(request, options, deps);
},

View file

@ -138,9 +138,10 @@ export class ActionsClientChatOpenAI extends ChatOpenAI {
return this.caller.call(async () => {
const requestBody = this.formatRequestForActionsClient(completionRequest);
this.#logger.debug(
`${LLM_TYPE}#completionWithRetry ${this.#traceId} assistantMessage:\n${JSON.stringify(
requestBody.params.subActionParams
)} `
() =>
`${LLM_TYPE}#completionWithRetry ${this.#traceId} assistantMessage:\n${JSON.stringify(
requestBody.params.subActionParams
)} `
);
const actionResult = await this.#actionsClient.execute(requestBody);

View file

@ -84,9 +84,10 @@ export class ActionsClientLlm extends LLM {
// convert the Langchain prompt to an assistant message:
const assistantMessage = getMessageContentAndRole(prompt);
this.#logger.debug(
`ActionsClientLlm#_call\ntraceId: ${this.#traceId}\nassistantMessage:\n${JSON.stringify(
assistantMessage
)} `
() =>
`ActionsClientLlm#_call\ntraceId: ${this.#traceId}\nassistantMessage:\n${JSON.stringify(
assistantMessage
)} `
);
// create a new connector request body with the assistant message:
const requestBody = {

View file

@ -102,9 +102,10 @@ export class ActionsClientSimpleChatModel extends SimpleChatModel {
formattedMessages.push(getMessageContentAndRole(message.content, message._getType()));
});
this.#logger.debug(
`ActionsClientSimpleChatModel#_call\ntraceId: ${
this.#traceId
}\nassistantMessage:\n${JSON.stringify(formattedMessages)} `
() =>
`ActionsClientSimpleChatModel#_call\ntraceId: ${
this.#traceId
}\nassistantMessage:\n${JSON.stringify(formattedMessages)} `
);
// create a new connector request body with the assistant message:
const requestBody = {

View file

@ -8,6 +8,7 @@
import { schema } from '@kbn/config-schema';
import moment from 'moment';
import { ByteSizeValue } from '@kbn/config-schema';
import { MockedLogger, loggerMock } from '@kbn/logging-mocks';
import {
DEFAULT_MICROSOFT_EXCHANGE_URL,
DEFAULT_MICROSOFT_GRAPH_API_SCOPE,
@ -47,7 +48,7 @@ import { actionsAuthorizationMock } from '../authorization/actions_authorization
import { trackLegacyRBACExemption } from '../lib/track_legacy_rbac_exemption';
import { ConnectorTokenClient } from '../lib/connector_token_client';
import { encryptedSavedObjectsMock } from '@kbn/encrypted-saved-objects-plugin/server/mocks';
import { Logger, SavedObject } from '@kbn/core/server';
import { SavedObject } from '@kbn/core/server';
import { connectorTokenClientMock } from '../lib/connector_token_client.mock';
import { inMemoryMetricsMock } from '../monitoring/in_memory_metrics.mock';
import { getOAuthJwtAccessToken } from '../lib/get_oauth_jwt_access_token';
@ -108,7 +109,6 @@ const request = httpServerMock.createKibanaRequest();
const auditLogger = auditLoggerMock.create();
const mockUsageCountersSetup = usageCountersServiceMock.createSetupContract();
const mockUsageCounter = mockUsageCountersSetup.createUsageCounter('test');
const logger = loggingSystemMock.create().get() as jest.Mocked<Logger>;
const mockTaskManager = taskManagerMock.createSetup();
const configurationUtilities = actionsConfigMock.create();
const eventLogClient = eventLogClientMock.create();
@ -133,8 +133,11 @@ const actionTypeIdFromSavedObjectMock = (actionTypeId: string = 'my-action-type'
} as SavedObject;
};
let logger: MockedLogger;
beforeEach(() => {
jest.resetAllMocks();
logger = loggerMock.create();
mockedLicenseState = licenseStateMock.create();
actionTypeRegistryParams = {
licensing: licensingMock.createSetup(),
@ -1853,9 +1856,13 @@ describe('getOAuthAccessToken()', () => {
tokenUrl: 'https://testurl.service-now.com/oauth_token.do',
});
expect(getOAuthClientCredentialsAccessToken).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
`Successfully retrieved access token using JWT OAuth with tokenUrl https://testurl.service-now.com/oauth_token.do and config {\"clientId\":\"abc\",\"jwtKeyId\":\"def\",\"userIdentifierValue\":\"userA\"}`
);
expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
Array [
Array [
"Successfully retrieved access token using JWT OAuth with tokenUrl https://testurl.service-now.com/oauth_token.do and config {\\"clientId\\":\\"abc\\",\\"jwtKeyId\\":\\"def\\",\\"userIdentifierValue\\":\\"userA\\"}",
],
]
`);
});
test('calls getOAuthClientCredentialsAccessToken when type="client"', async () => {
@ -1892,9 +1899,13 @@ describe('getOAuthAccessToken()', () => {
oAuthScope: 'https://graph.microsoft.com/.default',
});
expect(getOAuthJwtAccessToken).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
`Successfully retrieved access token using Client Credentials OAuth with tokenUrl https://login.microsoftonline.com/98765/oauth2/v2.0/token, scope https://graph.microsoft.com/.default and config {\"clientId\":\"abc\",\"tenantId\":\"def\"}`
);
expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
Array [
Array [
"Successfully retrieved access token using Client Credentials OAuth with tokenUrl https://login.microsoftonline.com/98765/oauth2/v2.0/token, scope https://graph.microsoft.com/.default and config {\\"clientId\\":\\"abc\\",\\"tenantId\\":\\"def\\"}",
],
]
`);
});
test('throws when getOAuthJwtAccessToken throws error', async () => {
@ -1919,9 +1930,13 @@ describe('getOAuthAccessToken()', () => {
).rejects.toMatchInlineSnapshot(`[Error: Failed to retrieve access token]`);
expect(getOAuthJwtAccessToken as jest.Mock).toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
`Failed to retrieve access token using JWT OAuth with tokenUrl https://testurl.service-now.com/oauth_token.do and config {\"clientId\":\"abc\",\"jwtKeyId\":\"def\",\"userIdentifierValue\":\"userA\"} - Something went wrong!`
);
expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
Array [
Array [
"Failed to retrieve access token using JWT OAuth with tokenUrl https://testurl.service-now.com/oauth_token.do and config {\\"clientId\\":\\"abc\\",\\"jwtKeyId\\":\\"def\\",\\"userIdentifierValue\\":\\"userA\\"} - Something went wrong!",
],
]
`);
});
test('throws when getOAuthClientCredentialsAccessToken throws error', async () => {
@ -1947,9 +1962,13 @@ describe('getOAuthAccessToken()', () => {
).rejects.toMatchInlineSnapshot(`[Error: Failed to retrieve access token]`);
expect(getOAuthClientCredentialsAccessToken as jest.Mock).toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
`Failed to retrieved access token using Client Credentials OAuth with tokenUrl https://login.microsoftonline.com/98765/oauth2/v2.0/token, scope https://graph.microsoft.com/.default and config {\"clientId\":\"abc\",\"tenantId\":\"def\"} - Something went wrong!`
);
expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
Array [
Array [
"Failed to retrieved access token using Client Credentials OAuth with tokenUrl https://login.microsoftonline.com/98765/oauth2/v2.0/token, scope https://graph.microsoft.com/.default and config {\\"clientId\\":\\"abc\\",\\"tenantId\\":\\"def\\"} - Something went wrong!",
],
]
`);
});
});

View file

@ -557,15 +557,17 @@ export class ActionsClient {
});
this.context.logger.debug(
`Successfully retrieved access token using JWT OAuth with tokenUrl ${
tokenOpts.tokenUrl
} and config ${JSON.stringify(tokenOpts.config)}`
() =>
`Successfully retrieved access token using JWT OAuth with tokenUrl ${
tokenOpts.tokenUrl
} and config ${JSON.stringify(tokenOpts.config)}`
);
} catch (err) {
this.context.logger.debug(
`Failed to retrieve access token using JWT OAuth with tokenUrl ${
tokenOpts.tokenUrl
} and config ${JSON.stringify(tokenOpts.config)} - ${err.message}`
() =>
`Failed to retrieve access token using JWT OAuth with tokenUrl ${
tokenOpts.tokenUrl
} and config ${JSON.stringify(tokenOpts.config)} - ${err.message}`
);
throw Boom.badRequest(`Failed to retrieve access token`);
}
@ -584,17 +586,19 @@ export class ActionsClient {
});
this.context.logger.debug(
`Successfully retrieved access token using Client Credentials OAuth with tokenUrl ${
tokenOpts.tokenUrl
}, scope ${tokenOpts.scope} and config ${JSON.stringify(tokenOpts.config)}`
() =>
`Successfully retrieved access token using Client Credentials OAuth with tokenUrl ${
tokenOpts.tokenUrl
}, scope ${tokenOpts.scope} and config ${JSON.stringify(tokenOpts.config)}`
);
} catch (err) {
this.context.logger.debug(
`Failed to retrieved access token using Client Credentials OAuth with tokenUrl ${
tokenOpts.tokenUrl
}, scope ${tokenOpts.scope} and config ${JSON.stringify(tokenOpts.config)} - ${
err.message
}`
() =>
`Failed to retrieved access token using Client Credentials OAuth with tokenUrl ${
tokenOpts.tokenUrl
}, scope ${tokenOpts.scope} and config ${JSON.stringify(tokenOpts.config)} - ${
err.message
}`
);
throw Boom.badRequest(`Failed to retrieve access token`);
}

View file

@ -107,7 +107,7 @@ export abstract class SubActionConnector<Config, Secrets> {
responseSchema.validate(data);
} catch (resValidationError) {
const err = new Error(`Response validation failed (${resValidationError})`);
this.logger.debug(`${err.message}:\n${inspect(data, { depth: 10 })}`);
this.logger.debug(() => `${err.message}:\n${inspect(data, { depth: 10 })}`);
throw err;
}
}

View file

@ -520,9 +520,10 @@ export class AlertsClient<
);
} else {
this.options.logger.debug(
`Could not find alert document to update for recovered alert with id ${id} and uuid ${currentRecoveredAlerts[
id
].getUuid()}`
() =>
`Could not find alert document to update for recovered alert with id ${id} and uuid ${currentRecoveredAlerts[
id
].getUuid()}`
);
}
}

View file

@ -152,9 +152,10 @@ async function createAliasStream(opts: CreateConcreteWriteIndexOpts): Promise<vo
);
logger.debug(
`Found ${concreteIndices.length} concrete indices for ${
indexPatterns.name
} - ${JSON.stringify(concreteIndices)}`
() =>
`Found ${concreteIndices.length} concrete indices for ${
indexPatterns.name
} - ${JSON.stringify(concreteIndices)}`
);
} catch (error) {
// 404 is expected if no concrete write indices have been created

View file

@ -25,7 +25,7 @@ const esqlQueryRequest = {
},
};
const logger = loggingSystemMock.create().get();
let logger = loggingSystemMock.create().get();
const rule = {
name: 'test-rule',
@ -43,6 +43,10 @@ describe('wrapScopedClusterClient', () => {
jest.useRealTimers();
});
beforeEach(() => {
logger = loggingSystemMock.create().get();
});
afterEach(() => {
jest.resetAllMocks();
});
@ -68,7 +72,7 @@ describe('wrapScopedClusterClient', () => {
});
expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {} and 5000ms requestTimeout`
);
});
@ -93,7 +97,7 @@ describe('wrapScopedClusterClient', () => {
});
expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {} and 5000ms requestTimeout`
);
});
@ -193,7 +197,7 @@ describe('wrapScopedClusterClient', () => {
expect(stats.numSearches).toEqual(3);
expect(stats.esSearchDurationMs).toEqual(999);
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
`executing query for rule .test-rule-type:abcdefg in space my-space - {\"body\":{\"query\":{\"bool\":{\"filter\":{\"range\":{\"@timestamp\":{\"gte\":0}}}}}}} - with options {}`
);
});
@ -241,7 +245,7 @@ describe('wrapScopedClusterClient', () => {
});
expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
'executing eql query for rule .test-rule-type:abcdefg in space my-space - {"index":"foo","query":"process where process.name == \\"regsvr32.exe\\""} - with options {} and 5000ms requestTimeout'
);
});
@ -266,7 +270,7 @@ describe('wrapScopedClusterClient', () => {
});
expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
'executing eql query for rule .test-rule-type:abcdefg in space my-space - {"index":"foo","query":"process where process.name == \\"regsvr32.exe\\""} - with options {} and 5000ms requestTimeout'
);
});
@ -340,7 +344,7 @@ describe('wrapScopedClusterClient', () => {
expect(stats.numSearches).toEqual(3);
expect(stats.esSearchDurationMs).toEqual(999);
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
`executing eql query for rule .test-rule-type:abcdefg in space my-space - {\"index\":\"foo\",\"query\":\"process where process.name == \\\"regsvr32.exe\\\"\"} - with options {}`
);
});
@ -391,7 +395,7 @@ describe('wrapScopedClusterClient', () => {
});
expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
'executing ES|QL query for rule .test-rule-type:abcdefg in space my-space - {"method":"POST","path":"/_query","body":{"query":"from .kibana_task_manager"}} - with options {} and 5000ms requestTimeout'
);
});
@ -416,7 +420,7 @@ describe('wrapScopedClusterClient', () => {
});
expect(scopedClusterClient.asInternalUser.search).not.toHaveBeenCalled();
expect(scopedClusterClient.asCurrentUser.search).not.toHaveBeenCalled();
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
'executing ES|QL query for rule .test-rule-type:abcdefg in space my-space - {"method":"POST","path":"/_query","body":{"query":"from .kibana_task_manager"}} - with options {} and 5000ms requestTimeout'
);
});
@ -490,7 +494,7 @@ describe('wrapScopedClusterClient', () => {
expect(stats.numSearches).toEqual(3);
expect(stats.totalSearchDurationMs).toBeGreaterThan(-1);
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug[0][0]).toEqual(
`executing ES|QL query for rule .test-rule-type:abcdefg in space my-space - {\"method\":\"POST\",\"path\":\"/_query\",\"body\":{\"query\":\"from .kibana_task_manager\"}} - with options {}`
);
});

View file

@ -167,11 +167,12 @@ function getWrappedTransportRequestFn(opts: WrapEsClientOpts) {
const requestOptions = options ?? {};
const start = Date.now();
opts.logger.debug(
`executing ES|QL query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
opts.rule.spaceId
} - ${JSON.stringify(params)} - with options ${JSON.stringify(requestOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
() =>
`executing ES|QL query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
opts.rule.spaceId
} - ${JSON.stringify(params)} - with options ${JSON.stringify(requestOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
);
const result = (await originalRequestFn.call(opts.esClient.transport, params, {
...requestOptions,
@ -235,11 +236,12 @@ function getWrappedEqlSearchFn(opts: WrapEsClientOpts) {
const searchOptions = options ?? {};
const start = Date.now();
opts.logger.debug(
`executing eql query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
opts.rule.spaceId
} - ${JSON.stringify(params)} - with options ${JSON.stringify(searchOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
() =>
`executing eql query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
opts.rule.spaceId
} - ${JSON.stringify(params)} - with options ${JSON.stringify(searchOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
);
const result = (await originalEqlSearch.call(opts.esClient, params, {
...searchOptions,
@ -316,11 +318,12 @@ function getWrappedSearchFn(opts: WrapEsClientOpts) {
const searchOptions = options ?? {};
const start = Date.now();
opts.logger.debug(
`executing query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
opts.rule.spaceId
} - ${JSON.stringify(params)} - with options ${JSON.stringify(searchOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
() =>
`executing query for rule ${opts.rule.alertTypeId}:${opts.rule.id} in space ${
opts.rule.spaceId
} - ${JSON.stringify(params)} - with options ${JSON.stringify(searchOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
);
const result = (await originalSearch.call(opts.esClient, params, {
...searchOptions,

View file

@ -11,7 +11,7 @@ import { createSearchSourceMock } from '@kbn/data-plugin/common/search/search_so
import { of, throwError } from 'rxjs';
import { wrapSearchSourceClient } from './wrap_search_source_client';
const logger = loggingSystemMock.create().get();
let logger: ReturnType<typeof loggingSystemMock.createLogger>;
const rule = {
name: 'test-rule',
@ -38,6 +38,10 @@ describe('wrapSearchSourceClient', () => {
jest.useFakeTimers({ legacyFakeTimers: true });
});
beforeEach(() => {
logger = loggingSystemMock.createLogger();
});
afterAll(() => {
jest.useRealTimers();
});
@ -84,7 +88,7 @@ describe('wrapSearchSourceClient', () => {
requestTimeout: 5000,
},
});
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug.map((params) => params[0])).toContain(
`executing query for rule .test-rule-type:abcdefg in space my-space - with options {} and 5000ms requestTimeout`
);
});
@ -136,7 +140,7 @@ describe('wrapSearchSourceClient', () => {
expect(stats.numSearches).toEqual(3);
expect(stats.esSearchDurationMs).toEqual(999);
expect(logger.debug).toHaveBeenCalledWith(
expect(loggingSystemMock.collect(logger).debug.map((params) => params[0])).toContain(
`executing query for rule .test-rule-type:abcdefg in space my-space - with options {}`
);
});

View file

@ -153,11 +153,12 @@ function wrapFetch$({
const start = Date.now();
logger.debug(
`executing query for rule ${rule.alertTypeId}:${rule.id} in space ${
rule.spaceId
} - with options ${JSON.stringify(searchOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
() =>
`executing query for rule ${rule.alertTypeId}:${rule.id} in space ${
rule.spaceId
} - with options ${JSON.stringify(searchOptions)}${
requestTimeout ? ` and ${requestTimeout}ms requestTimeout` : ''
}`
);
return pureSearchSource

View file

@ -366,6 +366,7 @@ describe('Ad Hoc Task Runner', () => {
triggeredActionsStatus: 'complete',
});
(RuleRunMetricsStore as jest.Mock).mockImplementation(() => ruleRunMetricsStore);
logger.isLevelEnabled.mockReturnValue(true);
logger.get.mockImplementation(() => logger);
taskRunnerFactoryInitializerParams.executionContext.withContext.mockImplementation((ctx, fn) =>
fn()

View file

@ -6,12 +6,12 @@
*/
import { processRunResults } from './process_run_result';
import { loggingSystemMock } from '@kbn/core/server/mocks';
import { loggerMock } from '@kbn/logging-mocks';
import { ruleResultServiceMock } from '../../monitoring/rule_result_service.mock';
import { asErr, asOk } from '../../lib/result_type';
import { ActionsCompletion } from '@kbn/alerting-state-types';
const logger = loggingSystemMock.create().get();
const logger = loggerMock.create();
const ruleResultService = ruleResultServiceMock.create();
const executionMetrics = {
@ -32,6 +32,7 @@ const executionMetrics = {
describe('processRunResults', () => {
beforeEach(() => {
jest.resetAllMocks();
logger.isLevelEnabled.mockReturnValue(true);
});
test('should process results as expected when results are successful', () => {

View file

@ -69,7 +69,7 @@ export function processRunResults({
(err: ElasticsearchError) => lastRunFromError(err)
);
if (logger) {
if (logger && logger.isLevelEnabled('debug')) {
logger.debug(`deprecated ruleRunStatus for ${logPrefix}: ${JSON.stringify(executionStatus)}`);
logger.debug(`ruleRunStatus for ${logPrefix}: ${JSON.stringify(lastRun)}`);
if (executionMetrics) {

View file

@ -27,6 +27,7 @@ describe('logAlerts', () => {
beforeEach(() => {
jest.resetAllMocks();
logger.isLevelEnabled.mockReturnValue(true);
ruleRunMetricsStore = new RuleRunMetricsStore();
});

View file

@ -58,7 +58,7 @@ export function logAlerts<
});
}
if (activeAlertIds.length > 0) {
if (activeAlertIds.length > 0 && logger.isLevelEnabled('debug')) {
logger.debug(
`rule ${ruleLogPrefix} has ${activeAlertIds.length} active alerts: ${JSON.stringify(
activeAlertIds.map((alertId) => ({
@ -68,7 +68,7 @@ export function logAlerts<
)}`
);
}
if (recoveredAlertIds.length > 0) {
if (recoveredAlertIds.length > 0 && logger.isLevelEnabled('debug')) {
logger.debug(
`rule ${ruleLogPrefix} has ${recoveredAlertIds.length} recovered alerts: ${JSON.stringify(
recoveredAlertIds

View file

@ -220,6 +220,7 @@ describe('Task Runner', () => {
beforeEach(() => {
jest.resetAllMocks();
logger.isLevelEnabled.mockReturnValue(true);
jest
.requireMock('../lib/wrap_scoped_cluster_client')
.createWrappedScopedClusterClientFactory.mockReturnValue({

View file

@ -630,11 +630,13 @@ export class TaskRunner<
if (outcome === 'failure') {
this.inMemoryMetrics.increment(IN_MEMORY_METRICS.RULE_FAILURES);
}
this.logger.debug(
`Updating rule task for ${this.ruleType.id} rule with id ${ruleId} - ${JSON.stringify(
executionStatus
)} - ${JSON.stringify(lastRun)}`
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`Updating rule task for ${this.ruleType.id} rule with id ${ruleId} - ${JSON.stringify(
executionStatus
)} - ${JSON.stringify(lastRun)}`
);
}
await this.updateRuleSavedObjectPostRun(ruleId, namespace, {
executionStatus: ruleExecutionStatusToRaw(executionStatus),
nextRun,

View file

@ -198,6 +198,7 @@ describe('Task Runner Cancel', () => {
alertingEventLogger.getStartAndDuration.mockImplementation(() => ({ start: new Date() }));
(AlertingEventLogger as jest.Mock).mockImplementation(() => alertingEventLogger);
logger.get.mockImplementation(() => logger);
logger.isLevelEnabled.mockReturnValue(true);
actionsClient.bulkEnqueueExecution.mockResolvedValue({ errors: false, items: [] });
});

View file

@ -156,10 +156,10 @@ export async function getExecutionsPerDayCount({
},
};
logger.debug(`query for getExecutionsPerDayCount - ${JSON.stringify(query)}`);
logger.debug(() => `query for getExecutionsPerDayCount - ${JSON.stringify(query)}`);
const results = await esClient.search(query);
logger.debug(`results for getExecutionsPerDayCount query - ${JSON.stringify(results)}`);
logger.debug(() => `results for getExecutionsPerDayCount query - ${JSON.stringify(results)}`);
const totalRuleExecutions =
typeof results.hits.total === 'number' ? results.hits.total : results.hits.total?.value;
@ -242,10 +242,12 @@ export async function getExecutionTimeoutsPerDayCount({
},
};
logger.debug(`query for getExecutionTimeoutsPerDayCount - ${JSON.stringify(query)}`);
logger.debug(() => `query for getExecutionTimeoutsPerDayCount - ${JSON.stringify(query)}`);
const results = await esClient.search(query);
logger.debug(`results for getExecutionTimeoutsPerDayCount query - ${JSON.stringify(results)}`);
logger.debug(
() => `results for getExecutionTimeoutsPerDayCount query - ${JSON.stringify(results)}`
);
const aggregations = results.aggregations as {
by_rule_type_id: AggregationsTermsAggregateBase<AggregationsStringTermsBucketKeys>;

View file

@ -272,10 +272,10 @@ export async function getTotalCountAggregations({
},
};
logger.debug(`query for getTotalCountAggregations - ${JSON.stringify(query)}`);
logger.debug(() => `query for getTotalCountAggregations - ${JSON.stringify(query)}`);
const results = await esClient.search(query);
logger.debug(`results for getTotalCountAggregations query - ${JSON.stringify(results)}`);
logger.debug(() => `results for getTotalCountAggregations query - ${JSON.stringify(results)}`);
const aggregations = results.aggregations as {
by_rule_type_id: AggregationsTermsAggregateBase<AggregationsStringTermsBucketKeys>;
@ -445,10 +445,10 @@ export async function getTotalCountInUse({
},
};
logger.debug(`query for getTotalCountInUse - ${JSON.stringify(query)}`);
logger.debug(() => `query for getTotalCountInUse - ${JSON.stringify(query)}`);
const results = await esClient.search(query);
logger.debug(`results for getTotalCountInUse query - ${JSON.stringify(results)}`);
logger.debug(() => `results for getTotalCountInUse query - ${JSON.stringify(results)}`);
const aggregations = results.aggregations as {
by_rule_type_id: AggregationsTermsAggregateBase<AggregationsStringTermsBucketKeys>;

View file

@ -99,11 +99,11 @@ export async function getFailedAndUnrecognizedTasksPerDay({
},
};
logger.debug(`query for getFailedAndUnrecognizedTasksPerDay - ${JSON.stringify(query)}`);
logger.debug(() => `query for getFailedAndUnrecognizedTasksPerDay - ${JSON.stringify(query)}`);
const results = await esClient.search(query);
logger.debug(
`results for getFailedAndUnrecognizedTasksPerDay query - ${JSON.stringify(results)}`
() => `results for getFailedAndUnrecognizedTasksPerDay query - ${JSON.stringify(results)}`
);
const aggregations = results.aggregations as {

View file

@ -173,10 +173,15 @@ export class CasesConnectorExecutor {
}: Pick<CasesConnectorRunParams, 'alerts' | 'groupingBy'> & {
params: CasesConnectorRunParams;
}): GroupedAlerts[] {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][groupAlerts] Grouping ${alerts.length} alerts`,
this.getLogMetadata(params, { labels: { groupingBy }, tags: ['case-connector:groupAlerts'] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][groupAlerts] Grouping ${alerts.length} alerts`,
this.getLogMetadata(params, {
labels: { groupingBy },
tags: ['case-connector:groupAlerts'],
})
);
}
const uniqueGroupingByFields = Array.from(new Set<string>(groupingBy));
const groupingMap = new Map<string, GroupedAlerts>();
@ -190,19 +195,23 @@ export class CasesConnectorExecutor {
uniqueGroupingByFields.every((groupingByField) => Boolean(get(alert, groupingByField, null)))
);
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][groupAlerts] Total alerts to be grouped: ${alertsWithAllGroupingFields.length} out of ${alerts.length}`,
this.getLogMetadata(params, { tags: ['case-connector:groupAlerts'] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][groupAlerts] Total alerts to be grouped: ${alertsWithAllGroupingFields.length} out of ${alerts.length}`,
this.getLogMetadata(params, { tags: ['case-connector:groupAlerts'] })
);
}
for (const alert of alertsWithAllGroupingFields) {
const alertWithOnlyTheGroupingFields = pick(alert, uniqueGroupingByFields);
const groupingKey = stringify(alertWithOnlyTheGroupingFields);
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][groupAlerts] Alert ${alert._id} got grouped into bucket with ID ${groupingKey}`,
this.getLogMetadata(params, { tags: ['case-connector:groupAlerts', groupingKey] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][groupAlerts] Alert ${alert._id} got grouped into bucket with ID ${groupingKey}`,
this.getLogMetadata(params, { tags: ['case-connector:groupAlerts', groupingKey] })
);
}
if (groupingMap.has(groupingKey)) {
groupingMap.get(groupingKey)?.alerts.push(alert);
@ -261,10 +270,12 @@ export class CasesConnectorExecutor {
params: CasesConnectorRunParams,
groupedAlerts: GroupedAlerts[]
): Map<string, GroupedAlertsWithOracleKey> {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][generateOracleKeys] Generating ${groupedAlerts.length} oracle keys`,
this.getLogMetadata(params, { tags: ['case-connector:generateOracleKeys'] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][generateOracleKeys] Generating ${groupedAlerts.length} oracle keys`,
this.getLogMetadata(params, { tags: ['case-connector:generateOracleKeys'] })
);
}
const { rule, owner } = params;
@ -280,21 +291,25 @@ export class CasesConnectorExecutor {
const oracleKey = this.casesOracleService.getRecordId(getRecordIdParams);
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][generateOracleKeys] Oracle key ${oracleKey} generated`,
this.getLogMetadata(params, {
labels: { params: getRecordIdParams },
tags: ['case-connector:generateOracleKeys', oracleKey],
})
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][generateOracleKeys] Oracle key ${oracleKey} generated`,
this.getLogMetadata(params, {
labels: { params: getRecordIdParams },
tags: ['case-connector:generateOracleKeys', oracleKey],
})
);
}
oracleMap.set(oracleKey, { oracleKey, grouping, alerts });
}
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][generateOracleKeys] Total of oracles keys generated ${oracleMap.size}`,
this.getLogMetadata(params, { tags: ['case-connector:generateOracleKeys'] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][generateOracleKeys] Total of oracles keys generated ${oracleMap.size}`,
this.getLogMetadata(params, { tags: ['case-connector:generateOracleKeys'] })
);
}
return oracleMap;
}
@ -303,11 +318,12 @@ export class CasesConnectorExecutor {
params: CasesConnectorRunParams,
groupedAlertsWithOracleKey: Map<string, GroupedAlertsWithOracleKey>
): Promise<Map<string, GroupedAlertsWithOracleRecords>> {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] Upserting ${groupedAlertsWithOracleKey.size} oracle records`,
this.getLogMetadata(params, { tags: ['case-connector:upsertOracleRecords'] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] Upserting ${groupedAlertsWithOracleKey.size} oracle records`,
this.getLogMetadata(params, { tags: ['case-connector:upsertOracleRecords'] })
);
}
const bulkCreateReq: BulkCreateOracleRecordRequest = [];
const oracleRecordMap = new Map<string, GroupedAlertsWithOracleRecords>();
@ -322,25 +338,29 @@ export class CasesConnectorExecutor {
const ids = Array.from(groupedAlertsWithOracleKey.values()).map(({ oracleKey }) => oracleKey);
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] Getting oracle records with ids ${ids}`,
this.getLogMetadata(params, { tags: ['case-connector:upsertOracleRecords', ...ids] })
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] Getting oracle records with ids ${ids}`,
this.getLogMetadata(params, { tags: ['case-connector:upsertOracleRecords', ...ids] })
);
}
const bulkGetRes = await this.casesOracleService.bulkGetRecords(ids);
const [bulkGetValidRecords, bulkGetRecordsErrors] = partitionRecordsByError(bulkGetRes);
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] The total number of valid oracle records is ${bulkGetValidRecords.length} and the total number of errors while getting the records is ${bulkGetRecordsErrors.length}`,
this.getLogMetadata(params, {
labels: {
total: ids.length,
success: bulkGetValidRecords.length,
errors: bulkGetRecordsErrors.length,
},
tags: ['case-connector:upsertOracleRecords'],
})
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] The total number of valid oracle records is ${bulkGetValidRecords.length} and the total number of errors while getting the records is ${bulkGetRecordsErrors.length}`,
this.getLogMetadata(params, {
labels: {
total: ids.length,
success: bulkGetValidRecords.length,
errors: bulkGetRecordsErrors.length,
},
tags: ['case-connector:upsertOracleRecords'],
})
);
}
addRecordToMap(bulkGetValidRecords);
@ -350,16 +370,18 @@ export class CasesConnectorExecutor {
const [nonFoundErrors, restOfErrors] = partitionByNonFoundErrors(bulkGetRecordsErrors);
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] The total number of non found oracle records is ${nonFoundErrors.length} and the total number of the rest of errors while getting the records is ${restOfErrors.length}`,
this.getLogMetadata(params, {
labels: {
nonFoundErrors: nonFoundErrors.length,
restOfErrors: restOfErrors.length,
},
tags: ['case-connector:upsertOracleRecords'],
})
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][upsertOracleRecords] The total number of non found oracle records is ${nonFoundErrors.length} and the total number of the rest of errors while getting the records is ${restOfErrors.length}`,
this.getLogMetadata(params, {
labels: {
nonFoundErrors: nonFoundErrors.length,
restOfErrors: restOfErrors.length,
},
tags: ['case-connector:upsertOracleRecords'],
})
);
}
this.handleAndThrowErrors(restOfErrors);
@ -1073,17 +1095,19 @@ export class CasesConnectorExecutor {
* attachments.bulkCreate throws an error on errors
*/
async (req: BulkCreateAlertsReq) => {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][attachAlertsToCases] Attaching ${req.attachments.length} alerts to case with ID ${req.caseId}`,
this.getLogMetadata(params, {
labels: { caseId: req.caseId },
tags: [
'case-connector:attachAlertsToCases',
req.caseId,
...(req.attachments as Array<{ alertId: string }>).map(({ alertId }) => alertId),
],
})
);
if (this.logger.isLevelEnabled('debug')) {
this.logger.debug(
`[CasesConnector][CasesConnectorExecutor][attachAlertsToCases] Attaching ${req.attachments.length} alerts to case with ID ${req.caseId}`,
this.getLogMetadata(params, {
labels: { caseId: req.caseId },
tags: [
'case-connector:attachAlertsToCases',
req.caseId,
...(req.attachments as Array<{ alertId: string }>).map(({ alertId }) => alertId),
],
})
);
}
await this.casesClient.attachments.bulkCreate(req);
},

View file

@ -274,7 +274,7 @@ export class CasesService {
options?: SavedObjectsBulkDeleteOptions;
}) {
try {
this.log.debug(`Attempting to bulk delete case entities ${JSON.stringify(entities)}`);
this.log.debug(() => `Attempting to bulk delete case entities ${JSON.stringify(entities)}`);
await this.unsecuredSavedObjectsClient.bulkDelete(entities, options);
} catch (error) {
this.log.error(`Error bulk deleting case entities ${JSON.stringify(entities)}: ${error}`);

View file

@ -260,7 +260,7 @@ export class AIAssistantKnowledgeBaseDataClient extends AIAssistantDataClient {
})
: undefined;
this.options.logger.debug(`created: ${created?.data.hits.hits.length ?? '0'}`);
this.options.logger.debug(`errors: ${JSON.stringify(errors, null, 2)}`);
this.options.logger.debug(() => `errors: ${JSON.stringify(errors, null, 2)}`);
return created?.data ? transformESSearchToKnowledgeBaseEntry(created?.data) : [];
};
@ -314,12 +314,14 @@ export class AIAssistantKnowledgeBaseDataClient extends AIAssistantDataClient {
);
this.options.logger.debug(
`getKnowledgeBaseDocuments() - Similarity Search Query:\n ${JSON.stringify(
vectorSearchQuery
)}`
() =>
`getKnowledgeBaseDocuments() - Similarity Search Query:\n ${JSON.stringify(
vectorSearchQuery
)}`
);
this.options.logger.debug(
`getKnowledgeBaseDocuments() - Similarity Search Results:\n ${JSON.stringify(results)}`
() =>
`getKnowledgeBaseDocuments() - Similarity Search Results:\n ${JSON.stringify(results)}`
);
return results;
@ -347,7 +349,7 @@ export class AIAssistantKnowledgeBaseDataClient extends AIAssistantDataClient {
}
this.options.logger.debug(
`Creating Knowledge Base Entry:\n ${JSON.stringify(knowledgeBaseEntry, null, 2)}`
() => `Creating Knowledge Base Entry:\n ${JSON.stringify(knowledgeBaseEntry, null, 2)}`
);
this.options.logger.debug(`kbIndex: ${this.indexTemplateAndPattern.alias}`);
const esClient = await this.options.elasticsearchClientPromise;

View file

@ -127,7 +127,7 @@ export class ElasticsearchStore extends VectorStore {
try {
const response = await this.esClient.bulk({ refresh: true, operations });
this.logger.debug(`Add Documents Response:\n ${JSON.stringify(response)}`);
this.logger.debug(() => `Add Documents Response:\n ${JSON.stringify(response)}`);
const errorIds = response.items.filter((i) => i.index?.error != null);
operations.forEach((op, i) => {
@ -268,11 +268,12 @@ export class ElasticsearchStore extends VectorStore {
});
this.logger.debug(
`Similarity search metadata source:\n${JSON.stringify(
results.map((r) => r?.metadata?.source ?? '(missing metadata.source)'),
null,
2
)}`
() =>
`Similarity search metadata source:\n${JSON.stringify(
results.map((r) => r?.metadata?.source ?? '(missing metadata.source)'),
null,
2
)}`
);
return results;

View file

@ -119,7 +119,9 @@ export const callAgentExecutor: AgentExecutor<true | false> = async ({
(tool) => tool.getTool(assistantToolParams) ?? []
);
logger.debug(`applicable tools: ${JSON.stringify(tools.map((t) => t.name).join(', '), null, 2)}`);
logger.debug(
() => `applicable tools: ${JSON.stringify(tools.map((t) => t.name).join(', '), null, 2)}`
);
const executorArgs = {
memory,

View file

@ -30,7 +30,7 @@ export const TOOLS_NODE = 'tools';
* @param tools - The tools available to execute
*/
export const executeTools = async ({ config, logger, state, tools }: ExecuteToolsParams) => {
logger.debug(`Node state:\n${JSON.stringify(state, null, 2)}`);
logger.debug(() => `Node state:\n${JSON.stringify(state, null, 2)}`);
const toolExecutor = new ToolExecutor({ tools });
const agentAction = state.agentOutcome;

View file

@ -35,7 +35,7 @@ export const generateChatTitle = async ({
model,
state,
}: GenerateChatTitleParams) => {
logger.debug(`Node state:\n ${JSON.stringify(state, null, 2)}`);
logger.debug(() => `Node state:\n ${JSON.stringify(state, null, 2)}`);
if (state.messages.length !== 0) {
logger.debug('No need to generate chat title, messages already exist');

View file

@ -38,7 +38,7 @@ export const runAgent = async ({
logger,
state,
}: RunAgentParams) => {
logger.debug(`Node state:\n${JSON.stringify(state, null, 2)}`);
logger.debug(() => `Node state:\n${JSON.stringify(state, null, 2)}`);
const knowledgeHistory = await dataClients?.kbDataClient?.getKnowledgeBaseDocuments({
kbResource: 'user',

View file

@ -19,7 +19,7 @@ export interface ShouldContinueParams extends NodeParamsBase {
* @param state - The current state of the graph
*/
export const shouldContinue = ({ logger, state }: ShouldContinueParams) => {
logger.debug(`Node state:\n${JSON.stringify(state, null, 2)}`);
logger.debug(() => `Node state:\n${JSON.stringify(state, null, 2)}`);
if (state.agentOutcome && 'returnValues' in state.agentOutcome) {
return 'end';

View file

@ -77,12 +77,12 @@ export class APMTracer extends BaseTracer implements LangChainTracerFields {
}
async onRetrieverStart(run: Run): Promise<void> {
this.logger.debug(`onRetrieverStart: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onRetrieverStart: run:\n${JSON.stringify(run, null, 2)}`);
this.createAndAddSpanFromRun(run, this.retrieverSpans);
}
async onRetrieverEnd(run: Run): Promise<void> {
this.logger.debug(`onRetrieverEnd: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onRetrieverEnd: run:\n${JSON.stringify(run, null, 2)}`);
const span = this.retrieverSpans.pop();
if (span != null) {
span.addLabels(this._getLabelsFromRun(run));
@ -91,16 +91,16 @@ export class APMTracer extends BaseTracer implements LangChainTracerFields {
}
async onRetrieverError(run: Run): Promise<void> {
this.logger.debug(`onRetrieverError: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onRetrieverError: run:\n${JSON.stringify(run, null, 2)}`);
}
async onLLMStart(run: Run): Promise<void> {
this.logger.debug(`onLLMStart: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onLLMStart: run:\n${JSON.stringify(run, null, 2)}`);
this.createAndAddSpanFromRun(run, this.llmSpans);
}
async onLLMEnd(run: Run): Promise<void> {
this.logger.debug(`onLLMEnd: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onLLMEnd: run:\n${JSON.stringify(run, null, 2)}`);
const span = this.llmSpans.pop();
if (span != null) {
span.addLabels(this._getLabelsFromRun(run));
@ -109,16 +109,16 @@ export class APMTracer extends BaseTracer implements LangChainTracerFields {
}
async onLLMError(run: Run): Promise<void> {
this.logger.debug(`onLLMError: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onLLMError: run:\n${JSON.stringify(run, null, 2)}`);
}
async onChainStart(run: Run): Promise<void> {
this.logger.debug(`onChainStart: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onChainStart: run:\n${JSON.stringify(run, null, 2)}`);
this.createAndAddSpanFromRun(run, this.chainSpans);
}
async onChainEnd(run: Run): Promise<void> {
this.logger.debug(`onChainEnd: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onChainEnd: run:\n${JSON.stringify(run, null, 2)}`);
const span = this.chainSpans.pop();
if (span != null) {
span.addLabels(this._getLabelsFromRun(run));
@ -127,16 +127,16 @@ export class APMTracer extends BaseTracer implements LangChainTracerFields {
}
async onChainError(run: Run): Promise<void> {
this.logger.debug(`onChainError: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onChainError: run:\n${JSON.stringify(run, null, 2)}`);
}
async onToolStart(run: Run): Promise<void> {
this.logger.debug(`onToolStart: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onToolStart: run:\n${JSON.stringify(run, null, 2)}`);
this.createAndAddSpanFromRun(run, this.toolSpans);
}
async onToolEnd(run: Run): Promise<void> {
this.logger.debug(`onToolEnd: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onToolEnd: run:\n${JSON.stringify(run, null, 2)}`);
const span = this.toolSpans.pop();
if (span != null) {
span.addLabels(this._getLabelsFromRun(run));
@ -145,6 +145,6 @@ export class APMTracer extends BaseTracer implements LangChainTracerFields {
}
async onToolError(run: Run): Promise<void> {
this.logger.debug(`onToolError: run:\n${JSON.stringify(run, null, 2)}`);
this.logger.debug(() => `onToolError: run:\n${JSON.stringify(run, null, 2)}`);
}
}

View file

@ -144,7 +144,8 @@ export const bulkActionKnowledgeBaseEntriesRoute = (router: ElasticAssistantPlug
}
logger.debug(
`Performing bulk action on Knowledge Base Entries:\n${JSON.stringify(request.body)}`
() =>
`Performing bulk action on Knowledge Base Entries:\n${JSON.stringify(request.body)}`
);
const { body } = request;

View file

@ -60,7 +60,7 @@ export const createKnowledgeBaseEntryRoute = (router: ElasticAssistantPluginRout
return checkResponse;
}
logger.debug(`Creating KB Entry:\n${JSON.stringify(request.body)}`);
logger.debug(() => `Creating KB Entry:\n${JSON.stringify(request.body)}`);
const documents: Array<Document<Metadata>> = [
{
metadata: request.body.metadata,

View file

@ -80,9 +80,10 @@ class AppContextService {
this.logger?.debug('AppContextService:registerFeatures');
this.logger?.debug(`pluginName: ${pluginName}`);
this.logger?.debug(
`features: ${Object.entries(features)
.map(([feature, enabled]) => `${feature}:${enabled}`)
.join(', ')}`
() =>
`features: ${Object.entries(features)
.map(([feature, enabled]) => `${feature}:${enabled}`)
.join(', ')}`
);
if (!this.registeredFeatures.has(pluginName)) {
@ -107,9 +108,10 @@ class AppContextService {
this.logger?.debug('AppContextService:getRegisteredFeatures');
this.logger?.debug(`pluginName: ${pluginName}`);
this.logger?.debug(
`features: ${Object.entries(features)
.map(([feature, enabled]) => `${feature}:${enabled}`)
.join(', ')}`
() =>
`features: ${Object.entries(features)
.map(([feature, enabled]) => `${feature}:${enabled}`)
.join(', ')}`
);
return features;

View file

@ -308,11 +308,12 @@ export class EncryptedSavedObjectsService {
const encryptedAttributesKeys = Object.keys(encryptedAttributes);
if (encryptedAttributesKeys.length !== typeDefinition.attributesToEncrypt.size) {
this.options.logger.debug(
`The following attributes of saved object "${descriptorToArray(
descriptor
)}" should have been encrypted: ${Array.from(
typeDefinition.attributesToEncrypt
)}, but found only: ${encryptedAttributesKeys}`
() =>
`The following attributes of saved object "${descriptorToArray(
descriptor
)}" should have been encrypted: ${Array.from(
typeDefinition.attributesToEncrypt
)}, but found only: ${encryptedAttributesKeys}`
);
}
@ -569,11 +570,12 @@ export class EncryptedSavedObjectsService {
const decryptedAttributesKeys = Object.keys(decryptedAttributes);
if (decryptedAttributesKeys.length !== typeDefinition.attributesToEncrypt.size) {
this.options.logger.debug(
`The following attributes of saved object "${descriptorToArray(
descriptor
)}" should have been decrypted: ${Array.from(
typeDefinition.attributesToEncrypt
)}, but found only: ${decryptedAttributesKeys}`
() =>
`The following attributes of saved object "${descriptorToArray(
descriptor
)}" should have been decrypted: ${Array.from(
typeDefinition.attributesToEncrypt
)}, but found only: ${decryptedAttributesKeys}`
);
}
@ -605,9 +607,10 @@ export class EncryptedSavedObjectsService {
if (Object.keys(attributesAAD).length === 0) {
this.options.logger.debug(
`The AAD for saved object "${descriptorToArray(
descriptor
)}" does not include any attributes.`
() =>
`The AAD for saved object "${descriptorToArray(
descriptor
)}" does not include any attributes.`
);
}

View file

@ -529,10 +529,11 @@ export function getQueryBodyWithAuthFilter(
dslFilterQuery = queryFilter ? toElasticsearchQuery(queryFilter) : undefined;
} catch (err) {
logger.debug(
`esContext: Invalid kuery syntax for the filter (${filter}) error: ${JSON.stringify({
message: err.message,
statusCode: err.statusCode,
})}`
() =>
`esContext: Invalid kuery syntax for the filter (${filter}) error: ${JSON.stringify({
message: err.message,
statusCode: err.statusCode,
})}`
);
throw err;
}
@ -691,10 +692,11 @@ export function getQueryBody(
dslFilterQuery = filterKueryNode ? toElasticsearchQuery(filterKueryNode) : undefined;
} catch (err) {
logger.debug(
`esContext: Invalid kuery syntax for the filter (${filter}) error: ${JSON.stringify({
message: err.message,
statusCode: err.statusCode,
})}`
() =>
`esContext: Invalid kuery syntax for the filter (${filter}) error: ${JSON.stringify({
message: err.message,
statusCode: err.statusCode,
})}`
);
throw err;
}

View file

@ -131,7 +131,9 @@ export async function updateTagsBatch(
);
}
appContextService.getLogger().debug(JSON.stringify(res).slice(0, 1000));
if (appContextService.getLogger().isLevelEnabled('debug')) {
appContextService.getLogger().debug(JSON.stringify(res).slice(0, 1000));
}
// creating unique ids to use as agentId, as we don't have all agent ids in case of action by kuery
const getUuidArray = (count: number) => Array.from({ length: count }, () => uuidv4());

View file

@ -146,9 +146,10 @@ export const bulkCreateArtifacts = async (
for (let batchN = 0; batchN < batches.length; batchN++) {
logger.debug(
`Creating artifacts for batch ${batchN + 1} with ${batches[batchN].length / 2} artifacts`
() =>
`Creating artifacts for batch ${batchN + 1} with ${batches[batchN].length / 2} artifacts`
);
logger.debug(`Artifacts in current batch: ${JSON.stringify(batches[batchN])}`);
logger.debug(() => `Artifacts in current batch: ${JSON.stringify(batches[batchN])}`);
// Generate a bulk create for the current batch of artifacts
const res = await withPackageSpan(`Bulk create fleet artifacts batch [${batchN}]`, () =>
esClient.bulk({

View file

@ -648,9 +648,10 @@ export const installTransforms = async ({
);
if (previousInstalledTransformEsAssets.length > 0) {
logger.debug(
`Found previous transform references:\n ${JSON.stringify(
previousInstalledTransformEsAssets
)}`
() =>
`Found previous transform references:\n ${JSON.stringify(
previousInstalledTransformEsAssets
)}`
);
}
}

View file

@ -387,9 +387,10 @@ async function retryImportOnConflictError(
const retryDelayMs = 1000 + Math.floor(Math.random() * 3000); // 1s + 0-3s of jitter
logger?.debug(
`Retrying import operation after [${
retryDelayMs * 1000
}s] due to conflict errors: ${JSON.stringify(errors)}`
() =>
`Retrying import operation after [${
retryDelayMs * 1000
}s] due to conflict errors: ${JSON.stringify(errors)}`
);
await setTimeout(retryDelayMs);
@ -458,9 +459,10 @@ export async function installKibanaSavedObjects({
the integrations team. */
if (referenceErrors.length) {
logger.debug(
`Resolving ${
referenceErrors.length
} reference errors creating saved objects: ${formatImportErrorsForLog(referenceErrors)}`
() =>
`Resolving ${
referenceErrors.length
} reference errors creating saved objects: ${formatImportErrorsForLog(referenceErrors)}`
);
const retries = toBeSavedObjects.map(({ id, type }) => {

View file

@ -32,7 +32,9 @@ export function registerFleetUsageLogger(
try {
const usageData = await fetchUsage();
if (appContextService.getLogger().isLevelEnabled('debug')) {
appContextService.getLogger().debug(`Fleet Usage: ${JSON.stringify(usageData)}`);
appContextService
.getLogger()
.debug(() => `Fleet Usage: ${JSON.stringify(usageData)}`);
} else {
appContextService.getLogger().info(`Fleet Usage: ${JSON.stringify(usageData)}`);
}

View file

@ -1678,7 +1678,7 @@ class PackagePolicyClientImpl implements PackagePolicyClient {
.info(
`Package policy upgrade dry run ${hasErrors ? 'resulted in errors' : 'ran successfully'}`
);
appContextService.getLogger().debug(JSON.stringify(upgradeTelemetry));
appContextService.getLogger().debug(() => JSON.stringify(upgradeTelemetry));
}
}
@ -2716,9 +2716,10 @@ export function _validateRestrictedFieldsNotModifiedOrThrow(opts: {
appContextService
.getLogger()
.debug(
`Rejecting package policy update due to dataset change, old val '${
oldStream?.vars[DATASET_VAR_NAME]?.value
}, new val '${JSON.stringify(stream?.vars?.[DATASET_VAR_NAME]?.value)}'`
() =>
`Rejecting package policy update due to dataset change, old val '${
oldStream.vars![DATASET_VAR_NAME].value
}, new val '${JSON.stringify(stream?.vars?.[DATASET_VAR_NAME]?.value)}'`
);
throw new PackagePolicyValidationError(
i18n.translate('xpack.fleet.updatePackagePolicy.datasetCannotBeModified', {
@ -2781,7 +2782,7 @@ export function sendUpdatePackagePolicyTelemetryEvent(
upgradeTelemetry
);
appContextService.getLogger().info(`Package policy upgraded successfully`);
appContextService.getLogger().debug(JSON.stringify(upgradeTelemetry));
appContextService.getLogger().debug(() => JSON.stringify(upgradeTelemetry));
}
}
});

View file

@ -296,12 +296,13 @@ export async function ensurePreconfiguredPackagesAndPolicies(
);
});
logger.debug(
`Adding preconfigured package policies ${JSON.stringify(
packagePoliciesToAdd.map((pol) => ({
name: pol.packagePolicy.name,
package: pol.installedPackage.name,
}))
)}`
() =>
`Adding preconfigured package policies ${JSON.stringify(
packagePoliciesToAdd.map((pol) => ({
name: pol.packagePolicy.name,
package: pol.installedPackage.name,
}))
)}`
);
const s = apm.startSpan('Add preconfigured package policies', 'preconfiguration');
await addPreconfiguredPolicyPackages(

View file

@ -133,7 +133,9 @@ async function createLock(
},
{ id: FLEET_SETUP_LOCK_TYPE }
);
logger.debug(`Fleet setup lock created: ${JSON.stringify(created)}`);
if (logger.isLevelEnabled('debug')) {
logger.debug(`Fleet setup lock created: ${JSON.stringify(created)}`);
}
} catch (error) {
logger.info(`Could not create fleet setup lock, abort setup: ${error}`);
return { created: false, toReturn: { isInitialized: false, nonFatalErrors: [] } };

View file

@ -93,27 +93,27 @@ export class FleetUsageSender {
} = usageData;
appContextService
.getLogger()
.debug('Fleet usage telemetry: ' + JSON.stringify(fleetUsageData));
.debug(() => 'Fleet usage telemetry: ' + JSON.stringify(fleetUsageData));
core.analytics.reportEvent(FLEET_USAGES_EVENT_TYPE, fleetUsageData);
appContextService
.getLogger()
.debug('Agents per privileges telemetry: ' + JSON.stringify(agentsPerPrivileges));
.debug(() => 'Agents per privileges telemetry: ' + JSON.stringify(agentsPerPrivileges));
core.analytics.reportEvent(FLEET_AGENTS_EVENT_TYPE, {
agents_per_privileges: agentsPerPrivileges,
});
appContextService
.getLogger()
.debug('Agents per version telemetry: ' + JSON.stringify(agentsPerVersion));
.debug(() => 'Agents per version telemetry: ' + JSON.stringify(agentsPerVersion));
agentsPerVersion.forEach((byVersion) => {
core.analytics.reportEvent(FLEET_AGENTS_EVENT_TYPE, { agents_per_version: byVersion });
});
appContextService
.getLogger()
.debug('Agents per output type telemetry: ' + JSON.stringify(agentsPerOutputType));
.debug(() => 'Agents per output type telemetry: ' + JSON.stringify(agentsPerOutputType));
agentsPerOutputType.forEach((byOutputType) => {
core.analytics.reportEvent(FLEET_AGENTS_EVENT_TYPE, {
agents_per_output_type: byOutputType,
@ -122,7 +122,7 @@ export class FleetUsageSender {
appContextService
.getLogger()
.debug('Agents upgrade details telemetry: ' + JSON.stringify(upgradeDetails));
.debug(() => 'Agents upgrade details telemetry: ' + JSON.stringify(upgradeDetails));
upgradeDetails.forEach((upgradeDetailsObj) => {
core.analytics.reportEvent(FLEET_AGENTS_EVENT_TYPE, { upgrade_details: upgradeDetailsObj });
});

View file

@ -154,7 +154,7 @@ export class TelemetryEventsSender {
deployment_id: appContextService.getCloud()?.deploymentId,
}));
this.logger.debug(JSON.stringify(toSend));
this.logger.debug(() => JSON.stringify(toSend));
await this.send(
toSend,
@ -199,10 +199,12 @@ export class TelemetryEventsSender {
},
timeout: 5000,
});
this.logger.debug(`Events sent!. Response: ${resp.status} ${JSON.stringify(resp.data)}`);
this.logger.debug(
() => `Events sent!. Response: ${resp.status} ${JSON.stringify(resp.data)}`
);
} catch (err) {
this.logger.debug(
`Error sending events: ${err?.response?.status} ${JSON.stringify(err.response.data)}`
() => `Error sending events: ${err?.response?.status} ${JSON.stringify(err.response.data)}`
);
}
}

View file

@ -134,7 +134,8 @@ export class LicensingPlugin implements Plugin<LicensingPluginSetup, LicensingPl
this.loggingSubscription = license$.subscribe((license) =>
this.logger.debug(
'Imported license information from Elasticsearch:' +
() =>
'Imported license information from Elasticsearch:' +
[
`type: ${license.type}`,
`status: ${license.status}`,

View file

@ -264,7 +264,8 @@ export class BaseRule {
DefaultAlert
>): Promise<any> {
this.scopedLogger.debug(
`Executing alert with params: ${JSON.stringify(params)} and state: ${JSON.stringify(state)}`
() =>
`Executing alert with params: ${JSON.stringify(params)} and state: ${JSON.stringify(state)}`
);
const { alertsClient } = services;

View file

@ -55,7 +55,7 @@ export async function installEntityDefinition({
};
try {
logger.debug(`Installing definition ${JSON.stringify(definition)}`);
logger.debug(() => `Installing definition ${JSON.stringify(definition)}`);
validateDefinitionCanCreateValidTransformIds(definition);

View file

@ -34,7 +34,7 @@ export async function upsertTemplate({ esClient, template, logger }: TemplateMan
logger.info(
`Entity manager index template is up to date (use debug logging to see what was installed)`
);
logger.debug(`Entity manager index template: ${JSON.stringify(template)}`);
logger.debug(() => `Entity manager index template: ${JSON.stringify(template)}`);
}
export async function upsertComponent({ esClient, component, logger }: ComponentManagementOptions) {
@ -48,5 +48,5 @@ export async function upsertComponent({ esClient, component, logger }: Component
logger.info(
`Entity manager component template is up to date (use debug logging to see what was installed)`
);
logger.debug(`Entity manager component template: ${JSON.stringify(component)}`);
logger.debug(() => `Entity manager component template: ${JSON.stringify(component)}`);
}

View file

@ -58,7 +58,7 @@ export function parseInlineFunctionCalls({ logger }: { logger: Logger }) {
input?: unknown;
};
logger.debug('Parsed function call:\n ' + JSON.stringify(parsedFunctionCall));
logger.debug(() => 'Parsed function call:\n ' + JSON.stringify(parsedFunctionCall));
if (!parsedFunctionCall.name) {
throw createInternalServerError(`Missing name for tool use`);

View file

@ -437,18 +437,20 @@ export class ObservabilityAIAssistantClient {
if (this.dependencies.logger.isLevelEnabled('debug')) {
switch (event.type) {
case StreamingChatResponseEventType.MessageAdd:
this.dependencies.logger.debug(`Added message: ${JSON.stringify(event.message)}`);
this.dependencies.logger.debug(
() => `Added message: ${JSON.stringify(event.message)}`
);
break;
case StreamingChatResponseEventType.ConversationCreate:
this.dependencies.logger.debug(
`Created conversation: ${JSON.stringify(event.conversation)}`
() => `Created conversation: ${JSON.stringify(event.conversation)}`
);
break;
case StreamingChatResponseEventType.ConversationUpdate:
this.dependencies.logger.debug(
`Updated conversation: ${JSON.stringify(event.conversation)}`
() => `Updated conversation: ${JSON.stringify(event.conversation)}`
);
break;
}

View file

@ -107,7 +107,7 @@ export class KnowledgeBaseService {
});
this.dependencies.logger.debug(
'Model definition status:\n' + JSON.stringify(getResponse.trained_model_configs[0])
() => 'Model definition status:\n' + JSON.stringify(getResponse.trained_model_configs[0])
);
return Boolean(getResponse.trained_model_configs[0]?.fully_defined);
@ -160,7 +160,7 @@ export class KnowledgeBaseService {
}
this.dependencies.logger.debug('Model is not allocated yet');
this.dependencies.logger.debug(JSON.stringify(response));
this.dependencies.logger.debug(() => JSON.stringify(response));
throw gatewayTimeout();
}, retryOptions);
@ -368,7 +368,7 @@ export class KnowledgeBaseService {
entries: RecalledEntry[];
}> => {
this.dependencies.logger.debug(
`Recalling entries from KB for queries: "${JSON.stringify(queries)}"`
() => `Recalling entries from KB for queries: "${JSON.stringify(queries)}"`
);
const modelId = await this.dependencies.getModelId();

View file

@ -101,7 +101,7 @@ export async function scoreSuggestions({
properties: {
scores: {
description: `The document IDs and their scores, as CSV. Example:
my_id,7
my_other_id,3
my_third_id,4
@ -155,7 +155,7 @@ export async function scoreSuggestions({
relevantDocumentIds.includes(suggestion.id)
);
logger.debug(`Relevant documents: ${JSON.stringify(relevantDocuments, null, 2)}`);
logger.debug(() => `Relevant documents: ${JSON.stringify(relevantDocuments, null, 2)}`);
return {
relevantDocuments,

View file

@ -174,7 +174,7 @@ export function registerQueryFunction({ functions, resources }: FunctionRegistra
to classify the user's request in the user message before this ("${abbreviatedUserQuestion}...").
and get more information about specific functions and commands
you think are candidates for answering the question.
Examples for functions and commands:
Do you need to group data? Request \`STATS\`.
Extract data? Request \`DISSECT\` AND \`GROK\`.
@ -216,7 +216,7 @@ export function registerQueryFunction({ functions, resources }: FunctionRegistra
"I want a query that ..." => ${VisualizeESQLUserIntention.generateQueryOnly}
"... Just show me the query" => ${VisualizeESQLUserIntention.generateQueryOnly}
"Create a query that ..." => ${VisualizeESQLUserIntention.generateQueryOnly}
"Show me the avg of x" => ${VisualizeESQLUserIntention.executeAndReturnResults}
"Show me the results of y" => ${VisualizeESQLUserIntention.executeAndReturnResults}
"Display the sum of z" => ${VisualizeESQLUserIntention.executeAndReturnResults}
@ -276,9 +276,10 @@ export function registerQueryFunction({ functions, resources }: FunctionRegistra
if (!response.message.function_call.arguments) {
resources.logger.debug(
`LLM should have called "classify_esql", but instead responded with the following message: ${JSON.stringify(
response.message
)}`
() =>
`LLM should have called "classify_esql", but instead responded with the following message: ${JSON.stringify(
response.message
)}`
);
throw new Error(
'LLM did not call classify_esql function during query generation, execute the "query" function and try again'
@ -374,41 +375,41 @@ export function registerQueryFunction({ functions, resources }: FunctionRegistra
\`\`\`
Respond in plain text. Do not attempt to use a function.
You must use commands and functions for which you have requested documentation.
${
args.intention !== VisualizeESQLUserIntention.generateQueryOnly
? `DO NOT UNDER ANY CIRCUMSTANCES generate more than a single query.
If multiple queries are needed, do it as a follow-up step. Make this clear to the user. For example:
Human: plot both yesterday's and today's data.
Assistant: Here's how you can plot yesterday's data:
\`\`\`esql
<query>
\`\`\`
Let's see that first. We'll look at today's data next.
Human: <response from yesterday's data>
Assistant: Let's look at today's data:
\`\`\`esql
<query>
\`\`\`
`
: ''
}
${userIntentionMessage}
DO NOT UNDER ANY CIRCUMSTANCES use commands or functions that are not a capability of ES|QL
as mentioned in the system message and documentation. When converting queries from one language
to ES|QL, make sure that the functions are available and documented in ES|QL.
E.g., for SPL's LEN, use LENGTH. For IF, use CASE.
`,
},
},

View file

@ -23,7 +23,9 @@ export function createGetStatusService(params: RegisterServicesParams) {
try {
const { type, setupState } = await getSetupState({ ...params, esClient, soClient, spaceId });
params.logger.debug(`Set up state for: ${type}: ${JSON.stringify(setupState, null, 2)}`);
params.logger.debug(
() => `Set up state for: ${type}: ${JSON.stringify(setupState, null, 2)}`
);
return {
has_setup:

View file

@ -140,7 +140,7 @@ export class TelemetryEventsSender {
queue.clearEvents();
this.logger.debug(JSON.stringify(events));
this.logger.debug(() => JSON.stringify(events));
await this.send(events, telemetryUrl);
} catch (err) {
@ -186,10 +186,12 @@ export class TelemetryEventsSender {
},
timeout: 5000,
});
this.logger.debug(`Events sent!. Response: ${resp.status} ${JSON.stringify(resp.data)}`);
this.logger.debug(
() => `Events sent!. Response: ${resp.status} ${JSON.stringify(resp.data)}`
);
} catch (err) {
this.logger.debug(
`Error sending events: ${err.response.status} ${JSON.stringify(err.response.data)}`
() => `Error sending events: ${err.response.status} ${JSON.stringify(err.response.data)}`
);
}
}

View file

@ -17,13 +17,13 @@ import { install } from '../../install';
/* eslint-disable no-console */
const mockLogger = loggingSystemMock.create().get();
mockLogger.warn = jest.fn((message: string | Error) => {
mockLogger.warn = jest.fn((message: string | (() => string) | Error) => {
console.warn(message);
});
mockLogger.debug = jest.fn((message: string | Error) => {
mockLogger.debug = jest.fn((message: string | (() => string) | Error) => {
console.log(message);
});
mockLogger.error = jest.fn((message: string | Error) => {
mockLogger.error = jest.fn((message: string | (() => string) | Error) => {
console.error(message);
});

View file

@ -87,7 +87,7 @@ export class BasicAuthenticationProvider extends BaseAuthenticationProvider {
state: authHeaders,
});
} catch (err) {
this.logger.debug(`Failed to perform a login: ${getDetailedErrorMessage(err)}`);
this.logger.debug(() => `Failed to perform a login: ${getDetailedErrorMessage(err)}`);
return AuthenticationResult.failed(err);
}
}
@ -172,7 +172,7 @@ export class BasicAuthenticationProvider extends BaseAuthenticationProvider {
return AuthenticationResult.succeeded(user, { authHeaders });
} catch (err) {
this.logger.debug(
`Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
() => `Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}

View file

@ -116,9 +116,12 @@ export class HTTPAuthenticationProvider extends BaseAuthenticationProvider {
return AuthenticationResult.succeeded(user);
} catch (err) {
this.logger.debug(
`Failed to authenticate request to ${request.url.pathname} via authorization header with "${
authorizationHeader.scheme
}" scheme: ${getDetailedErrorMessage(err)}`
() =>
`Failed to authenticate request to ${
request.url.pathname
} via authorization header with "${
authorizationHeader.scheme
}" scheme: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}

View file

@ -122,7 +122,7 @@ export class KerberosAuthenticationProvider extends BaseAuthenticationProvider {
await this.options.tokens.invalidate(state);
} catch (err) {
this.logger.debug(
`Failed invalidating access and/or refresh tokens: ${getDetailedErrorMessage(err)}`
() => `Failed invalidating access and/or refresh tokens: ${getDetailedErrorMessage(err)}`
);
return DeauthenticationResult.failed(err);
}

View file

@ -268,7 +268,7 @@ export class OIDCAuthenticationProvider extends BaseAuthenticationProvider {
})) as any;
} catch (err) {
this.logger.debug(
`Failed to authenticate request via OpenID Connect: ${getDetailedErrorMessage(err)}`
() => `Failed to authenticate request via OpenID Connect: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}
@ -319,7 +319,7 @@ export class OIDCAuthenticationProvider extends BaseAuthenticationProvider {
);
} catch (err) {
this.logger.debug(
`Failed to initiate OpenID Connect authentication: ${getDetailedErrorMessage(err)}`
() => `Failed to initiate OpenID Connect authentication: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}
@ -349,7 +349,7 @@ export class OIDCAuthenticationProvider extends BaseAuthenticationProvider {
return AuthenticationResult.succeeded(user, { authHeaders });
} catch (err) {
this.logger.debug(
`Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
() => `Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}
@ -448,7 +448,7 @@ export class OIDCAuthenticationProvider extends BaseAuthenticationProvider {
return DeauthenticationResult.redirectTo(redirect);
}
} catch (err) {
this.logger.debug(`Failed to deauthenticate user: ${getDetailedErrorMessage(err)}`);
this.logger.debug(() => `Failed to deauthenticate user: ${getDetailedErrorMessage(err)}`);
return DeauthenticationResult.failed(err);
}
}

View file

@ -171,7 +171,9 @@ export class PKIAuthenticationProvider extends BaseAuthenticationProvider {
try {
await this.options.tokens.invalidate({ accessToken: state.accessToken });
} catch (err) {
this.logger.debug(`Failed invalidating access token: ${getDetailedErrorMessage(err)}`);
this.logger.debug(
() => `Failed invalidating access token: ${getDetailedErrorMessage(err)}`
);
return DeauthenticationResult.failed(err);
}
}
@ -241,7 +243,7 @@ export class PKIAuthenticationProvider extends BaseAuthenticationProvider {
return AuthenticationResult.succeeded(user, { authHeaders });
} catch (err) {
this.logger.debug(
`Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
() => `Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}
@ -291,9 +293,10 @@ export class PKIAuthenticationProvider extends BaseAuthenticationProvider {
})) as any;
} catch (err) {
this.logger.debug(
`Failed to exchange peer certificate chain to an access token: ${getDetailedErrorMessage(
err
)}`
() =>
`Failed to exchange peer certificate chain to an access token: ${getDetailedErrorMessage(
err
)}`
);
return AuthenticationResult.failed(err);
}

View file

@ -187,9 +187,10 @@ export class SAMLAuthenticationProvider extends BaseAuthenticationProvider {
this.logger.debug('Login has been successfully performed.');
} else {
this.logger.debug(
`Failed to perform a login: ${
authenticationResult.error && getDetailedErrorMessage(authenticationResult.error)
}`
() =>
`Failed to perform a login: ${
authenticationResult.error && getDetailedErrorMessage(authenticationResult.error)
}`
);
}
@ -286,7 +287,7 @@ export class SAMLAuthenticationProvider extends BaseAuthenticationProvider {
return DeauthenticationResult.redirectTo(redirect);
}
} catch (err) {
this.logger.debug(`Failed to deauthenticate user: ${getDetailedErrorMessage(err)}`);
this.logger.debug(() => `Failed to deauthenticate user: ${getDetailedErrorMessage(err)}`);
return DeauthenticationResult.failed(err);
}
}
@ -467,7 +468,7 @@ export class SAMLAuthenticationProvider extends BaseAuthenticationProvider {
});
} catch (err) {
this.logger.debug(
`Failed to perform IdP initiated local logout: ${getDetailedErrorMessage(err)}`
() => `Failed to perform IdP initiated local logout: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}
@ -500,7 +501,7 @@ export class SAMLAuthenticationProvider extends BaseAuthenticationProvider {
return AuthenticationResult.succeeded(user, { authHeaders });
} catch (err) {
this.logger.debug(
`Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
() => `Failed to authenticate request via state: ${getDetailedErrorMessage(err)}`
);
return AuthenticationResult.failed(err);
}
@ -593,7 +594,7 @@ export class SAMLAuthenticationProvider extends BaseAuthenticationProvider {
state: { requestId, redirectURL, realm },
});
} catch (err) {
this.logger.debug(`Failed to initiate SAML handshake: ${getDetailedErrorMessage(err)}`);
this.logger.debug(() => `Failed to initiate SAML handshake: ${getDetailedErrorMessage(err)}`);
return AuthenticationResult.failed(err);
}
}

View file

@ -94,7 +94,7 @@ export class TokenAuthenticationProvider extends BaseAuthenticationProvider {
}
);
} catch (err) {
this.logger.debug(`Failed to perform a login: ${getDetailedErrorMessage(err)}`);
this.logger.debug(() => `Failed to perform a login: ${getDetailedErrorMessage(err)}`);
return AuthenticationResult.failed(err);
}
}

View file

@ -77,7 +77,7 @@ export class Tokens {
authenticationInfo: authenticationInfo as AuthenticationInfo,
};
} catch (err) {
this.logger.debug(`Failed to refresh access token: ${getDetailedErrorMessage(err)}`);
this.logger.debug(() => `Failed to refresh access token: ${getDetailedErrorMessage(err)}`);
// There are at least two common cases when refresh token request can fail:
// 1. Refresh token is valid only for 24 hours and if it hasn't been used it expires.
@ -123,7 +123,9 @@ export class Tokens {
})
).invalidated_tokens;
} catch (err) {
this.logger.debug(`Failed to invalidate refresh token: ${getDetailedErrorMessage(err)}`);
this.logger.debug(
() => `Failed to invalidate refresh token: ${getDetailedErrorMessage(err)}`
);
// When using already deleted refresh token, Elasticsearch responds with 404 and a body that
// shows that no tokens were invalidated.
@ -155,7 +157,9 @@ export class Tokens {
})
).invalidated_tokens;
} catch (err) {
this.logger.debug(`Failed to invalidate access token: ${getDetailedErrorMessage(err)}`);
this.logger.debug(
() => `Failed to invalidate access token: ${getDetailedErrorMessage(err)}`
);
// When using already deleted access token, Elasticsearch responds with 404 and a body that
// shows that no tokens were invalidated.

View file

@ -53,9 +53,10 @@ export function defineKibanaUserRoleDeprecationRoutes({ router, logger }: RouteD
logger.debug(`No users with "${KIBANA_USER_ROLE_NAME}" role found.`);
} else {
logger.debug(
`The following users with "${KIBANA_USER_ROLE_NAME}" role found and will be migrated to "${KIBANA_ADMIN_ROLE_NAME}" role: ${usersWithKibanaUserRole
.map((user) => user.username)
.join(', ')}.`
() =>
`The following users with "${KIBANA_USER_ROLE_NAME}" role found and will be migrated to "${KIBANA_ADMIN_ROLE_NAME}" role: ${usersWithKibanaUserRole
.map((user) => user.username)
.join(', ')}.`
);
}
@ -107,9 +108,10 @@ export function defineKibanaUserRoleDeprecationRoutes({ router, logger }: RouteD
logger.debug(`No role mappings with "${KIBANA_USER_ROLE_NAME}" role found.`);
} else {
logger.debug(
`The following role mappings with "${KIBANA_USER_ROLE_NAME}" role found and will be migrated to "${KIBANA_ADMIN_ROLE_NAME}" role: ${roleMappingsWithKibanaUserRole
.map(([mappingName]) => mappingName)
.join(', ')}.`
() =>
`The following role mappings with "${KIBANA_USER_ROLE_NAME}" role found and will be migrated to "${KIBANA_ADMIN_ROLE_NAME}" role: ${roleMappingsWithKibanaUserRole
.map(([mappingName]) => mappingName)
.join(', ')}.`
);
}

View file

@ -468,9 +468,10 @@ export class Session {
invalidateIndexValueFilter = filter;
} else {
sessionLogger.debug(
`Invalidating sessions that match query: ${JSON.stringify(
filter.query.username ? { ...filter.query, username: '[REDACTED]' } : filter.query
)}.`
() =>
`Invalidating sessions that match query: ${JSON.stringify(
filter.query.username ? { ...filter.query, username: '[REDACTED]' } : filter.query
)}.`
);
invalidateIndexValueFilter = filter.query.username
? {

View file

@ -41,7 +41,9 @@ export const KNOWLEDGE_BASE_RETRIEVAL_TOOL: AssistantTool = {
query: z.string().describe(`Summary of items/things to search for in the knowledge base`),
}),
func: async (input, _, cbManager) => {
logger.debug(`KnowledgeBaseRetrievalToolParams:input\n ${JSON.stringify(input, null, 2)}`);
logger.debug(
() => `KnowledgeBaseRetrievalToolParams:input\n ${JSON.stringify(input, null, 2)}`
);
const docs = await kbDataClient.getKnowledgeBaseDocuments({
query: input.query,

View file

@ -47,14 +47,16 @@ export const KNOWLEDGE_BASE_WRITE_TOOL: AssistantTool = {
),
}),
func: async (input, _, cbManager) => {
logger.debug(`KnowledgeBaseWriteToolParams:input\n ${JSON.stringify(input, null, 2)}`);
logger.debug(
() => `KnowledgeBaseWriteToolParams:input\n ${JSON.stringify(input, null, 2)}`
);
const knowledgeBaseEntry: KnowledgeBaseEntryCreateProps = {
metadata: { kbResource: 'user', source: 'conversation', required: input.required },
text: input.query,
};
logger.debug(`knowledgeBaseEntry\n ${JSON.stringify(knowledgeBaseEntry, null, 2)}`);
logger.debug(() => `knowledgeBaseEntry\n ${JSON.stringify(knowledgeBaseEntry, null, 2)}`);
const resp = await kbDataClient.createKnowledgeBaseEntry({ knowledgeBaseEntry });

View file

@ -185,9 +185,10 @@ export class ManifestTask {
const diff = newManifest.diff(oldManifest);
this.logger.debug(
`New -vs- old manifest diff counts: ${Object.entries(diff).map(
([diffType, diffItems]) => `${diffType}: ${diffItems.length}`
)}`
() =>
`New -vs- old manifest diff counts: ${Object.entries(diff).map(
([diffType, diffItems]) => `${diffType}: ${diffItems.length}`
)}`
);
const persistErrors = await manifestManager.pushArtifacts(

View file

@ -135,7 +135,7 @@ export const turnOffPolicyProtectionsIfNotSupported = async (
}
);
log.debug(`Bulk update response:\n${JSON.stringify(bulkUpdateResponse, null, 2)}`);
log.debug(() => `Bulk update response:\n${JSON.stringify(bulkUpdateResponse, null, 2)}`);
if (bulkUpdateResponse.failedPolicies.length > 0) {
log.error(

View file

@ -319,7 +319,7 @@ function responseActionRequestHandler<T extends EndpointActionDataParameterTypes
const logger = endpointContext.logFactory.get('responseActionsHandler');
return async (context, req, res) => {
logger.debug(`response action [${command}]:\n${stringify(req.body)}`);
logger.debug(() => `response action [${command}]:\n${stringify(req.body)}`);
// Note: because our API schemas are defined as module static variables (as opposed to a
// `getter` function), we need to include this additional validation here, since

View file

@ -98,7 +98,8 @@ export class CrowdstrikeActionsClient extends ResponseActionsClientImpl {
};
this.log.debug(
`calling connector actions 'execute()' for Crowdstrike with:\n${stringify(executeOptions)}`
() =>
`calling connector actions 'execute()' for Crowdstrike with:\n${stringify(executeOptions)}`
);
const actionSendResponse = await this.connectorActionsClient.execute(executeOptions);
@ -114,7 +115,7 @@ export class CrowdstrikeActionsClient extends ResponseActionsClientImpl {
actionSendResponse
);
} else {
this.log.debug(`Response:\n${stringify(actionSendResponse)}`);
this.log.debug(() => `Response:\n${stringify(actionSendResponse)}`);
}
return actionSendResponse;

View file

@ -242,7 +242,7 @@ export abstract class ResponseActionsClientImpl implements ResponseActionsClient
return;
}
this.log.debug(`Updating cases:\n${stringify(allCases)}`);
this.log.debug(() => `Updating cases:\n${stringify(allCases)}`);
const attachments: CaseAttachments = [
{
@ -283,7 +283,7 @@ export abstract class ResponseActionsClientImpl implements ResponseActionsClient
})
);
this.log.debug(`Update to cases done:\n${stringify(casesUpdateResponse)}`);
this.log.debug(() => `Update to cases done:\n${stringify(casesUpdateResponse)}`);
}
protected getMethodOptions<
@ -527,7 +527,7 @@ export abstract class ResponseActionsClientImpl implements ResponseActionsClient
): Promise<LogsEndpointActionResponse<TOutputContent>> {
const doc = this.buildActionResponseEsDoc(options);
this.log.debug(`Writing response action response:\n${stringify(doc)}`);
this.log.debug(() => `Writing response action response:\n${stringify(doc)}`);
await this.options.esClient
.index<LogsEndpointActionResponse<TOutputContent>>({

View file

@ -57,7 +57,7 @@ export class NormalizedExternalConnectorClient {
});
if (!connector) {
this.log.debug(stringify(connectorList));
this.log.debug(() => stringify(connectorList));
throw new ResponseActionsConnectorNotConfiguredError(connectorTypeId);
}

View file

@ -181,7 +181,8 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
};
this.log.debug(
`calling connector actions 'execute()' for SentinelOne with:\n${stringify(executeOptions)}`
() =>
`calling connector actions 'execute()' for SentinelOne with:\n${stringify(executeOptions)}`
);
const actionSendResponse = await this.connectorActionsClient.execute(executeOptions);
@ -198,7 +199,7 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
);
}
this.log.debug(`Response:\n${stringify(actionSendResponse)}`);
this.log.debug(() => `Response:\n${stringify(actionSendResponse)}`);
return actionSendResponse as ActionTypeExecutorResult<T>;
}
@ -227,7 +228,7 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
)) as ActionTypeExecutorResult<SentinelOneGetAgentsResponse>;
this.log.debug(
`Response for SentinelOne agent id [${agentUUID}] returned:\n${stringify(response)}`
() => `Response for SentinelOne agent id [${agentUUID}] returned:\n${stringify(response)}`
);
s1ApiResponse = response.data;
@ -439,9 +440,10 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
>(SUB_ACTION.GET_ACTIVITIES, activitySearchCriteria);
this.log.debug(
`Search of activity log with:\n${stringify(
activitySearchCriteria
)}\n returned:\n${stringify(activityLogSearchResponse.data)}`
() =>
`Search of activity log with:\n${stringify(
activitySearchCriteria
)}\n returned:\n${stringify(activityLogSearchResponse.data)}`
);
if (activityLogSearchResponse.data?.data.length) {
@ -771,10 +773,11 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
};
this.log.debug(
`searching for ${command} responses from [${SENTINEL_ONE_ACTIVITY_INDEX_PATTERN}] index with:\n${stringify(
searchRequestOptions,
15
)}`
() =>
`searching for ${command} responses from [${SENTINEL_ONE_ACTIVITY_INDEX_PATTERN}] index with:\n${stringify(
searchRequestOptions,
15
)}`
);
const searchResults = await this.options.esClient
@ -782,7 +785,10 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
.catch(catchAndWrapError);
this.log.debug(
`Search results for SentinelOne ${command} activity documents:\n${stringify(searchResults)}`
() =>
`Search results for SentinelOne ${command} activity documents:\n${stringify(
searchResults
)}`
);
for (const searchResultHit of searchResults.hits.hits) {
@ -831,9 +837,10 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
}
this.log.debug(
`${completedResponses.length} ${command} action responses generated:\n${stringify(
completedResponses
)}`
() =>
`${completedResponses.length} ${command} action responses generated:\n${stringify(
completedResponses
)}`
);
if (warnings.length > 0) {
@ -928,10 +935,11 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
if (Object.keys(actionsByAgentAndBatchId).length) {
this.log.debug(
`searching for get-file responses from [${SENTINEL_ONE_ACTIVITY_INDEX_PATTERN}] index with:\n${stringify(
searchRequestOptions,
15
)}`
() =>
`searching for get-file responses from [${SENTINEL_ONE_ACTIVITY_INDEX_PATTERN}] index with:\n${stringify(
searchRequestOptions,
15
)}`
);
const searchResults = await this.options.esClient
@ -939,7 +947,8 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
.catch(catchAndWrapError);
this.log.debug(
`Search results for SentinelOne get-file activity documents:\n${stringify(searchResults)}`
() =>
`Search results for SentinelOne get-file activity documents:\n${stringify(searchResults)}`
);
for (const s1Hit of searchResults.hits.hits) {
@ -1008,9 +1017,10 @@ export class SentinelOneActionsClient extends ResponseActionsClientImpl {
}
this.log.debug(
`${completedResponses.length} get-file action responses generated:\n${stringify(
completedResponses
)}`
() =>
`${completedResponses.length} get-file action responses generated:\n${stringify(
completedResponses
)}`
);
if (warnings.length > 0) {

View file

@ -91,7 +91,7 @@ export const setSignalsStatusRoute = (
DETECTION_ENGINE_SIGNALS_STATUS_URL,
status
);
logger.debug(`Sending Insights Payloads ${JSON.stringify(insightsPayloads)}`);
logger.debug(() => `Sending Insights Payloads ${JSON.stringify(insightsPayloads)}`);
await sender.sendOnDemand(INSIGHTS_CHANNEL, insightsPayloads);
}
}

View file

@ -246,11 +246,11 @@ describe('schedule_throttle_notification_actions', () => {
logger,
signals: [],
});
const debugMessages = loggingSystemMock.collect(logger).debug.map((values) => values[0]);
// We only test the first part since it has date math using math
expect(logger.debug.mock.calls[0][0]).toMatch(
/The notification throttle resultsLink created is/
);
expect(logger.debug.mock.calls[1][0]).toEqual(
expect(debugMessages[0]).toMatch(/The notification throttle resultsLink created is/);
expect(debugMessages[1]).toEqual(
'The notification throttle query result size before deconflicting duplicates is: 1. The notification throttle passed in signals size before deconflicting duplicates is: 0. The deconflicted size and size of the signals sent into throttle notification is: 1. The signals count from results size is: 1. The final signals count being sent to the notification is: 1.'
);
// error should not have been called in this case.

View file

@ -69,7 +69,7 @@ export const scheduleThrottledNotificationActions = async ({
kibanaSiemAppUrl,
});
logger.debug(
logger.debug(() =>
[
`The notification throttle resultsLink created is: ${resultsLink}.`,
' Notification throttle is querying the results using',
@ -117,7 +117,7 @@ export const scheduleThrottledNotificationActions = async ({
// Subtract any deconflicted differences from the total count.
const signalsCount = signalsCountFromResults + signals.length - deconflictedDiff;
logger.debug(
logger.debug(() =>
[
`The notification throttle query result size before deconflicting duplicates is: ${resultsFlattened.length}.`,
` The notification throttle passed in signals size before deconflicting duplicates is: ${signals.length}.`,

View file

@ -87,7 +87,7 @@ export const assetCriticalityPublicBulkUploadRoute = (
const tookMs = end.getTime() - start.getTime();
logger.debug(
`Asset criticality Bulk upload completed in ${tookMs}ms ${JSON.stringify(stats)}`
() => `Asset criticality Bulk upload completed in ${tookMs}ms ${JSON.stringify(stats)}`
);
const resBody: AssetCriticalityBulkUploadResponse = { errors, stats };

View file

@ -85,7 +85,9 @@ const handler: (
const end = new Date();
const tookMs = end.getTime() - start.getTime();
logger.debug(`Asset criticality CSV upload completed in ${tookMs}ms ${JSON.stringify(stats)}`);
logger.debug(
() => `Asset criticality CSV upload completed in ${tookMs}ms ${JSON.stringify(stats)}`
);
// type assignment here to ensure that the response body stays in sync with the API schema
const resBody: AssetCriticalityBulkUploadResponse = { errors, stats };

View file

@ -147,9 +147,10 @@ export const createDataStream = async ({
dataStreams = response.data_streams.map((dataStream) => dataStream.name);
logger.debug(
`Found ${dataStreams.length} concrete indices for ${indexPatterns.alias} - ${JSON.stringify(
dataStreams
)}`
() =>
`Found ${dataStreams.length} concrete indices for ${indexPatterns.alias} - ${JSON.stringify(
dataStreams
)}`
);
} catch (error) {
// 404 is expected if no datastream have been created

View file

@ -50,7 +50,7 @@ export class ProductFeatures<T extends string = string, S extends string = strin
Array.from(productFeatureConfig.values())
);
this.logger.debug(JSON.stringify(completeProductFeatureConfig));
this.logger.debug(() => JSON.stringify(completeProductFeatureConfig));
this.featuresSetup.registerKibanaFeature(completeProductFeatureConfig);
this.addRegisteredActions(completeProductFeatureConfig);
}

View file

@ -82,7 +82,7 @@ export const getAlerts = async ({
size: 0,
};
logger.debug(
`Getting alerts with point in time (PIT) query: ${JSON.stringify(ruleSearchOptions)}`
() => `Getting alerts with point in time (PIT) query: ${JSON.stringify(ruleSearchOptions)}`
);
const body = await esClient.search<unknown, AlertAggs>(ruleSearchOptions);
if (body.aggregations?.buckets?.buckets != null) {

View file

@ -34,7 +34,7 @@ export const getCaseComments = async ({
namespaces: ['*'],
filter: `${CASE_COMMENT_SAVED_OBJECT}.attributes.type: alert`,
};
logger.debug(`Getting cases with point in time (PIT) query:', ${JSON.stringify(query)}`);
logger.debug(() => `Getting cases with point in time (PIT) query:', ${JSON.stringify(query)}`);
const finder = savedObjectsClient.createPointInTimeFinder<AttachmentAttributes>(query);
let responses: Array<SavedObjectsFindResult<AttachmentAttributes>> = [];
for await (const response of finder.find()) {

Some files were not shown because too many files have changed in this diff Show more