mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[core.logging] Add RewriteAppender for filtering LogMeta. (#91492)
This commit is contained in:
parent
3471eaa481
commit
0280d5a92b
20 changed files with 1031 additions and 13 deletions
|
@ -8,5 +8,5 @@
|
|||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RollingFileAppenderConfig;
|
||||
export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RewriteAppenderConfig | RollingFileAppenderConfig;
|
||||
```
|
||||
|
|
|
@ -15,6 +15,24 @@ import { LogRecord } from './log_record';
|
|||
*/
|
||||
export interface Appender {
|
||||
append(record: LogRecord): void;
|
||||
/**
|
||||
* Appenders can be "attached" to one another so that they are able to act
|
||||
* as a sort of middleware by calling `append` on a different appender.
|
||||
*
|
||||
* As appenders cannot be attached to each other until they are configured,
|
||||
* the `addAppender` method can be used to pass in a newly configured appender
|
||||
* to attach.
|
||||
*/
|
||||
addAppender?(appenderRef: string, appender: Appender): void;
|
||||
/**
|
||||
* For appenders which implement `addAppender`, they should declare a list of
|
||||
* `appenderRefs`, which specify the names of the appenders that their configuration
|
||||
* depends on.
|
||||
*
|
||||
* Note that these are the appender key names that the user specifies in their
|
||||
* config, _not_ the names of the appender types themselves.
|
||||
*/
|
||||
appenderRefs?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -251,7 +251,7 @@ describe('request logging', () => {
|
|||
expect(JSON.parse(meta).http.response.headers.bar).toBe('world');
|
||||
});
|
||||
|
||||
it('filters sensitive request headers', async () => {
|
||||
it('filters sensitive request headers by default', async () => {
|
||||
const { http } = await root.setup();
|
||||
|
||||
http.createRouter('/').post(
|
||||
|
@ -283,7 +283,139 @@ describe('request logging', () => {
|
|||
expect(JSON.parse(meta).http.request.headers.authorization).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
it('filters sensitive response headers', async () => {
|
||||
it('filters sensitive request headers when RewriteAppender is configured', async () => {
|
||||
root = kbnTestServer.createRoot({
|
||||
logging: {
|
||||
silent: true,
|
||||
appenders: {
|
||||
'test-console': {
|
||||
type: 'console',
|
||||
layout: {
|
||||
type: 'pattern',
|
||||
pattern: '%level|%logger|%message|%meta',
|
||||
},
|
||||
},
|
||||
rewrite: {
|
||||
type: 'rewrite',
|
||||
appenders: ['test-console'],
|
||||
policy: {
|
||||
type: 'meta',
|
||||
mode: 'update',
|
||||
properties: [
|
||||
{ path: 'http.request.headers.authorization', value: '[REDACTED]' },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
loggers: [
|
||||
{
|
||||
name: 'http.server.response',
|
||||
appenders: ['rewrite'],
|
||||
level: 'debug',
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: {
|
||||
initialize: false,
|
||||
},
|
||||
});
|
||||
const { http } = await root.setup();
|
||||
|
||||
http.createRouter('/').post(
|
||||
{
|
||||
path: '/ping',
|
||||
validate: {
|
||||
body: schema.object({ message: schema.string() }),
|
||||
},
|
||||
options: {
|
||||
authRequired: 'optional',
|
||||
body: {
|
||||
accepts: ['application/json'],
|
||||
},
|
||||
timeout: { payload: 100 },
|
||||
},
|
||||
},
|
||||
(context, req, res) => res.ok({ body: { message: req.body.message } })
|
||||
);
|
||||
await root.start();
|
||||
|
||||
await kbnTestServer.request
|
||||
.post(root, '/ping')
|
||||
.set('content-type', 'application/json')
|
||||
.set('authorization', 'abc')
|
||||
.send({ message: 'hi' })
|
||||
.expect(200);
|
||||
expect(mockConsoleLog).toHaveBeenCalledTimes(1);
|
||||
const [, , , meta] = mockConsoleLog.mock.calls[0][0].split('|');
|
||||
expect(JSON.parse(meta).http.request.headers.authorization).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
it('filters sensitive response headers by defaut', async () => {
|
||||
const { http } = await root.setup();
|
||||
|
||||
http.createRouter('/').post(
|
||||
{
|
||||
path: '/ping',
|
||||
validate: {
|
||||
body: schema.object({ message: schema.string() }),
|
||||
},
|
||||
options: {
|
||||
authRequired: 'optional',
|
||||
body: {
|
||||
accepts: ['application/json'],
|
||||
},
|
||||
timeout: { payload: 100 },
|
||||
},
|
||||
},
|
||||
(context, req, res) =>
|
||||
res.ok({ headers: { 'set-cookie': ['123'] }, body: { message: req.body.message } })
|
||||
);
|
||||
await root.start();
|
||||
|
||||
await kbnTestServer.request
|
||||
.post(root, '/ping')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({ message: 'hi' })
|
||||
.expect(200);
|
||||
expect(mockConsoleLog).toHaveBeenCalledTimes(1);
|
||||
const [, , , meta] = mockConsoleLog.mock.calls[0][0].split('|');
|
||||
expect(JSON.parse(meta).http.response.headers['set-cookie']).toBe('[REDACTED]');
|
||||
});
|
||||
|
||||
it('filters sensitive response headers when RewriteAppender is configured', async () => {
|
||||
root = kbnTestServer.createRoot({
|
||||
logging: {
|
||||
silent: true,
|
||||
appenders: {
|
||||
'test-console': {
|
||||
type: 'console',
|
||||
layout: {
|
||||
type: 'pattern',
|
||||
pattern: '%level|%logger|%message|%meta',
|
||||
},
|
||||
},
|
||||
rewrite: {
|
||||
type: 'rewrite',
|
||||
appenders: ['test-console'],
|
||||
policy: {
|
||||
type: 'meta',
|
||||
mode: 'update',
|
||||
properties: [{ path: 'http.response.headers.set-cookie', value: '[REDACTED]' }],
|
||||
},
|
||||
},
|
||||
},
|
||||
loggers: [
|
||||
{
|
||||
name: 'http.server.response',
|
||||
appenders: ['rewrite'],
|
||||
level: 'debug',
|
||||
},
|
||||
],
|
||||
},
|
||||
plugins: {
|
||||
initialize: false,
|
||||
},
|
||||
});
|
||||
const { http } = await root.setup();
|
||||
|
||||
http.createRouter('/').post(
|
||||
|
|
|
@ -171,6 +171,53 @@ describe('getEcsResponseLog', () => {
|
|||
});
|
||||
|
||||
test('does not mutate original headers', () => {
|
||||
const reqHeaders = { a: 'foo', b: ['hello', 'world'] };
|
||||
const resHeaders = { headers: { c: 'bar' } };
|
||||
const req = createMockHapiRequest({
|
||||
headers: reqHeaders,
|
||||
response: { headers: resHeaders },
|
||||
});
|
||||
|
||||
const responseLog = getEcsResponseLog(req, logger);
|
||||
expect(reqHeaders).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": "foo",
|
||||
"b": Array [
|
||||
"hello",
|
||||
"world",
|
||||
],
|
||||
}
|
||||
`);
|
||||
expect(resHeaders).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"headers": Object {
|
||||
"c": "bar",
|
||||
},
|
||||
}
|
||||
`);
|
||||
|
||||
responseLog.http.request.headers.a = 'testA';
|
||||
responseLog.http.request.headers.b[1] = 'testB';
|
||||
responseLog.http.request.headers.c = 'testC';
|
||||
expect(reqHeaders).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": "foo",
|
||||
"b": Array [
|
||||
"hello",
|
||||
"world",
|
||||
],
|
||||
}
|
||||
`);
|
||||
expect(resHeaders).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"headers": Object {
|
||||
"c": "bar",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('does not mutate original headers when redacting sensitive data', () => {
|
||||
const reqHeaders = { authorization: 'a', cookie: 'b', 'user-agent': 'hi' };
|
||||
const resHeaders = { headers: { 'content-length': 123, 'set-cookie': 'c' } };
|
||||
const req = createMockHapiRequest({
|
||||
|
|
|
@ -18,14 +18,22 @@ const ECS_VERSION = '1.7.0';
|
|||
const FORBIDDEN_HEADERS = ['authorization', 'cookie', 'set-cookie'];
|
||||
const REDACTED_HEADER_TEXT = '[REDACTED]';
|
||||
|
||||
type HapiHeaders = Record<string, string | string[]>;
|
||||
|
||||
// We are excluding sensitive headers by default, until we have a log filtering mechanism.
|
||||
function redactSensitiveHeaders(
|
||||
headers?: Record<string, string | string[]>
|
||||
): Record<string, string | string[]> {
|
||||
const result = {} as Record<string, string | string[]>;
|
||||
function redactSensitiveHeaders(key: string, value: string | string[]): string | string[] {
|
||||
return FORBIDDEN_HEADERS.includes(key) ? REDACTED_HEADER_TEXT : value;
|
||||
}
|
||||
|
||||
// Shallow clone the headers so they are not mutated if filtered by a RewriteAppender.
|
||||
function cloneAndFilterHeaders(headers?: HapiHeaders) {
|
||||
const result = {} as HapiHeaders;
|
||||
if (headers) {
|
||||
for (const key of Object.keys(headers)) {
|
||||
result[key] = FORBIDDEN_HEADERS.includes(key) ? REDACTED_HEADER_TEXT : headers[key];
|
||||
result[key] = redactSensitiveHeaders(
|
||||
key,
|
||||
Array.isArray(headers[key]) ? [...headers[key]] : headers[key]
|
||||
);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -45,7 +53,11 @@ export function getEcsResponseLog(request: Request, log: Logger): LogMeta {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const status_code = isBoom(response) ? response.output.statusCode : response.statusCode;
|
||||
const responseHeaders = isBoom(response) ? response.output.headers : response.headers;
|
||||
|
||||
const requestHeaders = cloneAndFilterHeaders(request.headers);
|
||||
const responseHeaders = cloneAndFilterHeaders(
|
||||
isBoom(response) ? (response.output.headers as HapiHeaders) : response.headers
|
||||
);
|
||||
|
||||
// borrowed from the hapi/good implementation
|
||||
const responseTime = (request.info.completed || request.info.responded) - request.info.received;
|
||||
|
@ -66,7 +78,7 @@ export function getEcsResponseLog(request: Request, log: Logger): LogMeta {
|
|||
mime_type: request.mime,
|
||||
referrer: request.info.referrer,
|
||||
// @ts-expect-error Headers are not yet part of ECS: https://github.com/elastic/ecs/issues/232.
|
||||
headers: redactSensitiveHeaders(request.headers),
|
||||
headers: requestHeaders,
|
||||
},
|
||||
response: {
|
||||
body: {
|
||||
|
@ -74,7 +86,7 @@ export function getEcsResponseLog(request: Request, log: Logger): LogMeta {
|
|||
},
|
||||
status_code,
|
||||
// @ts-expect-error Headers are not yet part of ECS: https://github.com/elastic/ecs/issues/232.
|
||||
headers: redactSensitiveHeaders(responseHeaders),
|
||||
headers: responseHeaders,
|
||||
// responseTime is a custom non-ECS field
|
||||
responseTime: !isNaN(responseTime) ? responseTime : undefined,
|
||||
},
|
||||
|
|
|
@ -278,6 +278,124 @@ The maximum number of files to keep. Once this number is reached, oldest files w
|
|||
|
||||
The default value is `7`
|
||||
|
||||
### Rewrite Appender
|
||||
|
||||
*This appender is currently considered experimental and is not intended
|
||||
for public consumption. The API is subject to change at any time.*
|
||||
|
||||
Similar to log4j's `RewriteAppender`, this appender serves as a sort of middleware,
|
||||
modifying the provided log events before passing them along to another
|
||||
appender.
|
||||
|
||||
```yaml
|
||||
logging:
|
||||
appenders:
|
||||
my-rewrite-appender:
|
||||
type: rewrite
|
||||
appenders: [console, file] # name of "destination" appender(s)
|
||||
policy:
|
||||
# ...
|
||||
```
|
||||
|
||||
The most common use case for the `RewriteAppender` is when you want to
|
||||
filter or censor sensitive data that may be contained in a log entry.
|
||||
In fact, with a default configuration, Kibana will automatically redact
|
||||
any `authorization`, `cookie`, or `set-cookie` headers when logging http
|
||||
requests & responses.
|
||||
|
||||
To configure additional rewrite rules, you'll need to specify a `RewritePolicy`.
|
||||
|
||||
#### Rewrite Policies
|
||||
|
||||
Rewrite policies exist to indicate which parts of a log record can be
|
||||
modified within the rewrite appender.
|
||||
|
||||
**Meta**
|
||||
|
||||
The `meta` rewrite policy can read and modify any data contained in the
|
||||
`LogMeta` before passing it along to a destination appender.
|
||||
|
||||
Meta policies must specify one of three modes, which indicate which action
|
||||
to perform on the configured properties:
|
||||
- `update` updates an existing property at the provided `path`.
|
||||
- `remove` removes an existing property at the provided `path`.
|
||||
|
||||
The `properties` are listed as a `path` and `value` pair, where `path` is
|
||||
the dot-delimited path to the target property in the `LogMeta` object, and
|
||||
`value` is the value to add or update in that target property. When using
|
||||
the `remove` mode, a `value` is not necessary.
|
||||
|
||||
Here's an example of how you would replace any `cookie` header values with `[REDACTED]`:
|
||||
|
||||
```yaml
|
||||
logging:
|
||||
appenders:
|
||||
my-rewrite-appender:
|
||||
type: rewrite
|
||||
appenders: [console]
|
||||
policy:
|
||||
type: meta # indicates that we want to rewrite the LogMeta
|
||||
mode: update # will update an existing property only
|
||||
properties:
|
||||
- path: "http.request.headers.cookie" # path to property
|
||||
value: "[REDACTED]" # value to replace at path
|
||||
```
|
||||
|
||||
Rewrite appenders can even be passed to other rewrite appenders to apply
|
||||
multiple filter policies/modes, as long as it doesn't create a circular
|
||||
reference. Each rewrite appender is applied sequentially (one after the other).
|
||||
```yaml
|
||||
logging:
|
||||
appenders:
|
||||
remove-request-headers:
|
||||
type: rewrite
|
||||
appenders: [censor-response-headers] # redirect to the next rewrite appender
|
||||
policy:
|
||||
type: meta
|
||||
mode: remove
|
||||
properties:
|
||||
- path: "http.request.headers" # remove all request headers
|
||||
censor-response-headers:
|
||||
type: rewrite
|
||||
appenders: [console] # output to console
|
||||
policy:
|
||||
type: meta
|
||||
mode: update
|
||||
properties:
|
||||
- path: "http.response.headers.set-cookie"
|
||||
value: "[REDACTED]"
|
||||
```
|
||||
|
||||
#### Complete Example
|
||||
```yaml
|
||||
logging:
|
||||
appenders:
|
||||
console:
|
||||
type: console
|
||||
layout:
|
||||
type: pattern
|
||||
highlight: true
|
||||
pattern: "[%date][%level][%logger] %message %meta"
|
||||
file:
|
||||
type: file
|
||||
fileName: ./kibana.log
|
||||
layout:
|
||||
type: json
|
||||
censor:
|
||||
type: rewrite
|
||||
appenders: [console, file]
|
||||
policy:
|
||||
type: meta
|
||||
mode: update
|
||||
properties:
|
||||
- path: "http.request.headers.cookie"
|
||||
value: "[REDACTED]"
|
||||
loggers:
|
||||
- name: http.server.response
|
||||
appenders: [censor] # pass these logs to our rewrite appender
|
||||
level: debug
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
As any configuration in the platform, logging configuration is validated against the predefined schema and if there are
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
import { Layouts } from '../layouts/layouts';
|
||||
import { ConsoleAppender, ConsoleAppenderConfig } from './console/console_appender';
|
||||
import { FileAppender, FileAppenderConfig } from './file/file_appender';
|
||||
import { RewriteAppender, RewriteAppenderConfig } from './rewrite/rewrite_appender';
|
||||
import {
|
||||
RollingFileAppender,
|
||||
RollingFileAppenderConfig,
|
||||
|
@ -32,6 +33,7 @@ export const appendersSchema = schema.oneOf([
|
|||
ConsoleAppender.configSchema,
|
||||
FileAppender.configSchema,
|
||||
LegacyAppender.configSchema,
|
||||
RewriteAppender.configSchema,
|
||||
RollingFileAppender.configSchema,
|
||||
]);
|
||||
|
||||
|
@ -40,6 +42,7 @@ export type AppenderConfigType =
|
|||
| ConsoleAppenderConfig
|
||||
| FileAppenderConfig
|
||||
| LegacyAppenderConfig
|
||||
| RewriteAppenderConfig
|
||||
| RollingFileAppenderConfig;
|
||||
|
||||
/** @internal */
|
||||
|
@ -57,6 +60,8 @@ export class Appenders {
|
|||
return new ConsoleAppender(Layouts.create(config.layout));
|
||||
case 'file':
|
||||
return new FileAppender(Layouts.create(config.layout), config.fileName);
|
||||
case 'rewrite':
|
||||
return new RewriteAppender(config);
|
||||
case 'rolling-file':
|
||||
return new RollingFileAppender(config);
|
||||
case 'legacy-appender':
|
||||
|
|
20
src/core/server/logging/appenders/rewrite/mocks.ts
Normal file
20
src/core/server/logging/appenders/rewrite/mocks.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { RewritePolicy } from './policies/policy';
|
||||
|
||||
const createPolicyMock = () => {
|
||||
const mock: jest.Mocked<RewritePolicy> = {
|
||||
rewrite: jest.fn((x) => x),
|
||||
};
|
||||
return mock;
|
||||
};
|
||||
|
||||
export const rewriteAppenderMocks = {
|
||||
createPolicy: createPolicyMock,
|
||||
};
|
30
src/core/server/logging/appenders/rewrite/policies/index.ts
Normal file
30
src/core/server/logging/appenders/rewrite/policies/index.ts
Normal file
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { assertNever } from '@kbn/std';
|
||||
import { RewritePolicy } from './policy';
|
||||
import { MetaRewritePolicy, MetaRewritePolicyConfig, metaRewritePolicyConfigSchema } from './meta';
|
||||
|
||||
export { RewritePolicy };
|
||||
|
||||
/**
|
||||
* Available rewrite policies which specify what part of a {@link LogRecord}
|
||||
* can be modified.
|
||||
*/
|
||||
export type RewritePolicyConfig = MetaRewritePolicyConfig;
|
||||
|
||||
export const rewritePolicyConfigSchema = metaRewritePolicyConfigSchema;
|
||||
|
||||
export const createRewritePolicy = (config: RewritePolicyConfig): RewritePolicy => {
|
||||
switch (config.type) {
|
||||
case 'meta':
|
||||
return new MetaRewritePolicy(config);
|
||||
default:
|
||||
return assertNever(config.type);
|
||||
}
|
||||
};
|
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export {
|
||||
MetaRewritePolicy,
|
||||
MetaRewritePolicyConfig,
|
||||
metaRewritePolicyConfigSchema,
|
||||
} from './meta_policy';
|
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { LogRecord, LogLevel, LogMeta } from '@kbn/logging';
|
||||
import { MetaRewritePolicy, MetaRewritePolicyConfig } from './meta_policy';
|
||||
|
||||
describe('MetaRewritePolicy', () => {
|
||||
const createPolicy = (
|
||||
mode: MetaRewritePolicyConfig['mode'],
|
||||
properties: MetaRewritePolicyConfig['properties']
|
||||
) => new MetaRewritePolicy({ type: 'meta', mode, properties });
|
||||
|
||||
const createLogRecord = (meta: LogMeta = {}): LogRecord => ({
|
||||
timestamp: new Date(Date.UTC(2012, 1, 1, 14, 30, 22, 11)),
|
||||
level: LogLevel.Info,
|
||||
context: 'context',
|
||||
message: 'just a log',
|
||||
pid: 42,
|
||||
meta,
|
||||
});
|
||||
|
||||
describe('mode: update', () => {
|
||||
it('updates existing properties in LogMeta', () => {
|
||||
const log = createLogRecord({ a: 'before' });
|
||||
const policy = createPolicy('update', [{ path: 'a', value: 'after' }]);
|
||||
expect(policy.rewrite(log).meta!.a).toBe('after');
|
||||
});
|
||||
|
||||
it('updates nested properties in LogMeta', () => {
|
||||
const log = createLogRecord({ a: 'before a', b: { c: 'before b.c' }, d: [0, 1] });
|
||||
const policy = createPolicy('update', [
|
||||
{ path: 'a', value: 'after a' },
|
||||
{ path: 'b.c', value: 'after b.c' },
|
||||
{ path: 'd[1]', value: 2 },
|
||||
]);
|
||||
expect(policy.rewrite(log).meta).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": "after a",
|
||||
"b": Object {
|
||||
"c": "after b.c",
|
||||
},
|
||||
"d": Array [
|
||||
0,
|
||||
2,
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('handles string, number, boolean, null', () => {
|
||||
const policy = createPolicy('update', [
|
||||
{ path: 'a', value: false },
|
||||
{ path: 'b', value: null },
|
||||
{ path: 'c', value: 123 },
|
||||
{ path: 'd', value: 'hi' },
|
||||
]);
|
||||
const log = createLogRecord({
|
||||
a: 'a',
|
||||
b: 'b',
|
||||
c: 'c',
|
||||
d: 'd',
|
||||
});
|
||||
expect(policy.rewrite(log).meta).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": false,
|
||||
"b": null,
|
||||
"c": 123,
|
||||
"d": "hi",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it(`does not add properties which don't exist yet`, () => {
|
||||
const policy = createPolicy('update', [
|
||||
{ path: 'a.b', value: 'foo' },
|
||||
{ path: 'a.c', value: 'bar' },
|
||||
]);
|
||||
const log = createLogRecord({ a: { b: 'existing meta' } });
|
||||
const { meta } = policy.rewrite(log);
|
||||
expect(meta!.a.b).toBe('foo');
|
||||
expect(meta!.a.c).toBeUndefined();
|
||||
});
|
||||
|
||||
it('does not touch anything outside of LogMeta', () => {
|
||||
const policy = createPolicy('update', [{ path: 'a', value: 'bar' }]);
|
||||
const message = Symbol();
|
||||
expect(
|
||||
policy.rewrite(({ message, meta: { a: 'foo' } } as unknown) as LogRecord).message
|
||||
).toBe(message);
|
||||
expect(policy.rewrite(({ message, meta: { a: 'foo' } } as unknown) as LogRecord))
|
||||
.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"message": Symbol(),
|
||||
"meta": Object {
|
||||
"a": "bar",
|
||||
},
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mode: remove', () => {
|
||||
it('removes existing properties in LogMeta', () => {
|
||||
const log = createLogRecord({ a: 'goodbye' });
|
||||
const policy = createPolicy('remove', [{ path: 'a' }]);
|
||||
expect(policy.rewrite(log).meta!.a).toBeUndefined();
|
||||
});
|
||||
|
||||
it('removes nested properties in LogMeta', () => {
|
||||
const log = createLogRecord({ a: 'a', b: { c: 'b.c' }, d: [0, 1] });
|
||||
const policy = createPolicy('remove', [{ path: 'b.c' }, { path: 'd[1]' }]);
|
||||
expect(policy.rewrite(log).meta).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": "a",
|
||||
"b": Object {},
|
||||
"d": Array [
|
||||
0,
|
||||
undefined,
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('has no effect if property does not exist', () => {
|
||||
const log = createLogRecord({ a: 'a' });
|
||||
const policy = createPolicy('remove', [{ path: 'b' }]);
|
||||
expect(policy.rewrite(log).meta).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"a": "a",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('does not touch anything outside of LogMeta', () => {
|
||||
const policy = createPolicy('remove', [{ path: 'message' }]);
|
||||
const message = Symbol();
|
||||
expect(
|
||||
policy.rewrite(({ message, meta: { message: 'foo' } } as unknown) as LogRecord).message
|
||||
).toBe(message);
|
||||
expect(policy.rewrite(({ message, meta: { message: 'foo' } } as unknown) as LogRecord))
|
||||
.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"message": Symbol(),
|
||||
"meta": Object {},
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { LogRecord } from '@kbn/logging';
|
||||
import { set } from '@elastic/safer-lodash-set';
|
||||
import { has, unset } from 'lodash';
|
||||
import { assertNever } from '@kbn/std';
|
||||
import { RewritePolicy } from '../policy';
|
||||
|
||||
type MetaRewritePolicyConfigProperties = Array<{
|
||||
path: string;
|
||||
value?: string | number | boolean | null;
|
||||
}>;
|
||||
|
||||
export interface MetaRewritePolicyConfig {
|
||||
type: 'meta';
|
||||
|
||||
/**
|
||||
* The 'mode' specifies what action to perform on the specified properties.
|
||||
* - 'update' updates an existing property at the provided 'path'.
|
||||
* - 'remove' removes an existing property at the provided 'path'.
|
||||
*/
|
||||
mode: 'remove' | 'update';
|
||||
|
||||
/**
|
||||
* The properties to modify.
|
||||
*
|
||||
* @remarks
|
||||
* Each provided 'path' is relative to the record's {@link LogMeta}.
|
||||
* For the 'remove' mode, no 'value' is provided.
|
||||
*/
|
||||
properties: MetaRewritePolicyConfigProperties;
|
||||
}
|
||||
|
||||
export const metaRewritePolicyConfigSchema = schema.object({
|
||||
type: schema.literal('meta'),
|
||||
mode: schema.oneOf([schema.literal('update'), schema.literal('remove')], {
|
||||
defaultValue: 'update',
|
||||
}),
|
||||
properties: schema.arrayOf(
|
||||
schema.object({
|
||||
path: schema.string(),
|
||||
value: schema.maybe(
|
||||
schema.nullable(schema.oneOf([schema.string(), schema.number(), schema.boolean()]))
|
||||
),
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
/**
|
||||
* A rewrite policy which can add, remove, or update properties
|
||||
* from a record's {@link LogMeta}.
|
||||
*/
|
||||
export class MetaRewritePolicy implements RewritePolicy {
|
||||
constructor(private readonly config: MetaRewritePolicyConfig) {}
|
||||
|
||||
rewrite(record: LogRecord): LogRecord {
|
||||
switch (this.config.mode) {
|
||||
case 'update':
|
||||
return this.update(record);
|
||||
case 'remove':
|
||||
return this.remove(record);
|
||||
default:
|
||||
return assertNever(this.config.mode);
|
||||
}
|
||||
}
|
||||
|
||||
private update(record: LogRecord) {
|
||||
for (const { path, value } of this.config.properties) {
|
||||
if (!has(record, `meta.${path}`)) {
|
||||
continue; // don't add properties which don't already exist
|
||||
}
|
||||
set(record, `meta.${path}`, value);
|
||||
}
|
||||
return record;
|
||||
}
|
||||
|
||||
private remove(record: LogRecord) {
|
||||
for (const { path } of this.config.properties) {
|
||||
unset(record, `meta.${path}`);
|
||||
}
|
||||
return record;
|
||||
}
|
||||
}
|
16
src/core/server/logging/appenders/rewrite/policies/policy.ts
Normal file
16
src/core/server/logging/appenders/rewrite/policies/policy.ts
Normal file
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { LogRecord } from '@kbn/logging';
|
||||
|
||||
/**
|
||||
* Rewrites a {@link LogRecord} based on the policy's configuration.
|
||||
**/
|
||||
export interface RewritePolicy {
|
||||
rewrite(record: LogRecord): LogRecord;
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
export const createRewritePolicyMock = jest.fn();
|
||||
jest.doMock('./policies', () => ({
|
||||
rewritePolicyConfigSchema: schema.any(),
|
||||
createRewritePolicy: createRewritePolicyMock,
|
||||
}));
|
||||
|
||||
export const resetAllMocks = () => {
|
||||
createRewritePolicyMock.mockReset();
|
||||
};
|
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { MockedKeys } from '@kbn/utility-types/jest';
|
||||
import { createRewritePolicyMock, resetAllMocks } from './rewrite_appender.test.mocks';
|
||||
import { rewriteAppenderMocks } from './mocks';
|
||||
import { LogLevel, LogRecord, LogMeta, DisposableAppender } from '@kbn/logging';
|
||||
import { RewriteAppender, RewriteAppenderConfig } from './rewrite_appender';
|
||||
|
||||
// Helper to ensure tuple is typed [A, B] instead of Array<A | B>
|
||||
const toTuple = <A, B>(a: A, b: B): [A, B] => [a, b];
|
||||
|
||||
const createAppenderMock = (name: string) => {
|
||||
const appenderMock: MockedKeys<DisposableAppender> = {
|
||||
append: jest.fn(),
|
||||
dispose: jest.fn(),
|
||||
};
|
||||
|
||||
return toTuple(name, appenderMock);
|
||||
};
|
||||
|
||||
const createConfig = (appenderNames: string[]): RewriteAppenderConfig => ({
|
||||
type: 'rewrite',
|
||||
appenders: appenderNames,
|
||||
policy: {
|
||||
type: 'meta',
|
||||
mode: 'update',
|
||||
properties: [{ path: 'foo', value: 'bar' }],
|
||||
},
|
||||
});
|
||||
|
||||
const createLogRecord = (meta: LogMeta = {}): LogRecord => ({
|
||||
timestamp: new Date(),
|
||||
level: LogLevel.Info,
|
||||
context: 'context',
|
||||
message: 'just a log',
|
||||
pid: 42,
|
||||
meta,
|
||||
});
|
||||
|
||||
describe('RewriteAppender', () => {
|
||||
let policy: ReturnType<typeof rewriteAppenderMocks.createPolicy>;
|
||||
|
||||
beforeEach(() => {
|
||||
policy = rewriteAppenderMocks.createPolicy();
|
||||
createRewritePolicyMock.mockReturnValue(policy);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
resetAllMocks();
|
||||
});
|
||||
|
||||
it('creates a rewrite policy with the provided config', () => {
|
||||
const config = createConfig([]);
|
||||
new RewriteAppender(config);
|
||||
expect(createRewritePolicyMock).toHaveBeenCalledTimes(1);
|
||||
expect(createRewritePolicyMock).toHaveBeenCalledWith(config.policy);
|
||||
});
|
||||
|
||||
describe('#addAppender', () => {
|
||||
it('updates the map of available appenders', () => {
|
||||
const config = createConfig(['mock1']);
|
||||
const appender = new RewriteAppender(config);
|
||||
appender.addAppender(...createAppenderMock('mock1'));
|
||||
expect(() => {
|
||||
appender.append(createLogRecord());
|
||||
}).not.toThrowError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('#append', () => {
|
||||
it('calls the configured appenders with the provided LogRecord', () => {
|
||||
const config = createConfig(['mock1', 'mock2']);
|
||||
const appenderMocks = [createAppenderMock('mock1'), createAppenderMock('mock2')];
|
||||
|
||||
const appender = new RewriteAppender(config);
|
||||
appenderMocks.forEach((mock) => appender.addAppender(...mock));
|
||||
|
||||
const log1 = createLogRecord({ a: 'b' });
|
||||
const log2 = createLogRecord({ c: 'd' });
|
||||
|
||||
appender.append(log1);
|
||||
|
||||
expect(appenderMocks[0][1].append).toHaveBeenCalledTimes(1);
|
||||
expect(appenderMocks[1][1].append).toHaveBeenCalledTimes(1);
|
||||
expect(appenderMocks[0][1].append).toHaveBeenCalledWith(log1);
|
||||
expect(appenderMocks[1][1].append).toHaveBeenCalledWith(log1);
|
||||
|
||||
appender.append(log2);
|
||||
|
||||
expect(appenderMocks[0][1].append).toHaveBeenCalledTimes(2);
|
||||
expect(appenderMocks[1][1].append).toHaveBeenCalledTimes(2);
|
||||
expect(appenderMocks[0][1].append).toHaveBeenCalledWith(log2);
|
||||
expect(appenderMocks[1][1].append).toHaveBeenCalledWith(log2);
|
||||
});
|
||||
|
||||
it('calls `rewrite` on the configured policy', () => {
|
||||
const config = createConfig(['mock1']);
|
||||
|
||||
const appender = new RewriteAppender(config);
|
||||
appender.addAppender(...createAppenderMock('mock1'));
|
||||
|
||||
const log1 = createLogRecord({ a: 'b' });
|
||||
const log2 = createLogRecord({ c: 'd' });
|
||||
|
||||
appender.append(log1);
|
||||
|
||||
expect(policy.rewrite).toHaveBeenCalledTimes(1);
|
||||
expect(policy.rewrite.mock.calls).toEqual([[log1]]);
|
||||
|
||||
appender.append(log2);
|
||||
|
||||
expect(policy.rewrite).toHaveBeenCalledTimes(2);
|
||||
expect(policy.rewrite.mock.calls).toEqual([[log1], [log2]]);
|
||||
});
|
||||
|
||||
it('throws if an appender key cannot be found', () => {
|
||||
const config = createConfig(['oops']);
|
||||
const appender = new RewriteAppender(config);
|
||||
|
||||
expect(() => {
|
||||
appender.append(createLogRecord());
|
||||
}).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Rewrite Appender could not find appender key \\"oops\\". Be sure \`appender.addAppender()\` was called before \`appender.append()\`."`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
100
src/core/server/logging/appenders/rewrite/rewrite_appender.ts
Normal file
100
src/core/server/logging/appenders/rewrite/rewrite_appender.ts
Normal file
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { LogRecord, Appender, DisposableAppender } from '@kbn/logging';
|
||||
import {
|
||||
createRewritePolicy,
|
||||
rewritePolicyConfigSchema,
|
||||
RewritePolicy,
|
||||
RewritePolicyConfig,
|
||||
} from './policies';
|
||||
|
||||
export interface RewriteAppenderConfig {
|
||||
type: 'rewrite';
|
||||
/**
|
||||
* The {@link Appender | appender(s)} to pass the log event to after
|
||||
* implementing the specified rewrite policy.
|
||||
*/
|
||||
appenders: string[];
|
||||
/**
|
||||
* The {@link RewritePolicy | policy} to use to manipulate the provided data.
|
||||
*/
|
||||
policy: RewritePolicyConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appender that can modify the `LogRecord` instances it receives before passing
|
||||
* them along to another {@link Appender}.
|
||||
* @internal
|
||||
*/
|
||||
export class RewriteAppender implements DisposableAppender {
|
||||
public static configSchema = schema.object({
|
||||
type: schema.literal('rewrite'),
|
||||
appenders: schema.arrayOf(schema.string(), { defaultValue: [] }),
|
||||
policy: rewritePolicyConfigSchema,
|
||||
});
|
||||
|
||||
private appenders: Map<string, Appender> = new Map();
|
||||
private readonly policy: RewritePolicy;
|
||||
|
||||
constructor(private readonly config: RewriteAppenderConfig) {
|
||||
this.policy = createRewritePolicy(config.policy);
|
||||
}
|
||||
|
||||
/**
|
||||
* List of appenders that are dependencies of this appender.
|
||||
*
|
||||
* `addAppender` will throw an error when called with an appender
|
||||
* reference that isn't in this list.
|
||||
*/
|
||||
public get appenderRefs() {
|
||||
return this.config.appenders;
|
||||
}
|
||||
|
||||
/**
|
||||
* Appenders can be "attached" to this one so that the RewriteAppender
|
||||
* is able to act as a sort of middleware by calling `append` on other appenders.
|
||||
*
|
||||
* As appenders cannot be attached to each other until they are created,
|
||||
* the `addAppender` method is used to pass in a configured appender.
|
||||
*/
|
||||
public addAppender(appenderRef: string, appender: Appender) {
|
||||
if (!this.appenderRefs.includes(appenderRef)) {
|
||||
throw new Error(
|
||||
`addAppender was called with an appender key that is missing from the appenderRefs: "${appenderRef}".`
|
||||
);
|
||||
}
|
||||
|
||||
this.appenders.set(appenderRef, appender);
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifies the `record` and passes it to the specified appender.
|
||||
*/
|
||||
public append(record: LogRecord) {
|
||||
const rewrittenRecord = this.policy.rewrite(record);
|
||||
for (const appenderRef of this.appenderRefs) {
|
||||
const appender = this.appenders.get(appenderRef);
|
||||
if (!appender) {
|
||||
throw new Error(
|
||||
`Rewrite Appender could not find appender key "${appenderRef}". ` +
|
||||
'Be sure `appender.addAppender()` was called before `appender.append()`.'
|
||||
);
|
||||
}
|
||||
appender.append(rewrittenRecord);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disposes `RewriteAppender`.
|
||||
*/
|
||||
public dispose() {
|
||||
this.appenders.clear();
|
||||
}
|
||||
}
|
|
@ -78,7 +78,6 @@ test('correctly fills in custom `appenders` config.', () => {
|
|||
type: 'console',
|
||||
layout: { type: 'pattern', highlight: true },
|
||||
});
|
||||
|
||||
expect(configValue.appenders.get('console')).toEqual({
|
||||
type: 'console',
|
||||
layout: { type: 'pattern' },
|
||||
|
|
|
@ -134,6 +134,76 @@ test('uses `root` logger if context name is not specified.', async () => {
|
|||
expect(mockConsoleLog.mock.calls).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('attaches appenders to appenders that declare refs', async () => {
|
||||
await system.upgrade(
|
||||
config.schema.validate({
|
||||
appenders: {
|
||||
console: {
|
||||
type: 'console',
|
||||
layout: { type: 'pattern', pattern: '[%logger] %message %meta' },
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
layout: { type: 'pattern', pattern: '[%logger] %message %meta' },
|
||||
fileName: 'path',
|
||||
},
|
||||
rewrite: {
|
||||
type: 'rewrite',
|
||||
appenders: ['console', 'file'],
|
||||
policy: { type: 'meta', mode: 'remove', properties: [{ path: 'b' }] },
|
||||
},
|
||||
},
|
||||
loggers: [{ name: 'tests', level: 'warn', appenders: ['rewrite'] }],
|
||||
})
|
||||
);
|
||||
|
||||
const testLogger = system.get('tests');
|
||||
testLogger.warn('This message goes to a test context.', { a: 'hi', b: 'remove me' });
|
||||
|
||||
expect(mockConsoleLog).toHaveBeenCalledTimes(1);
|
||||
expect(mockConsoleLog.mock.calls[0][0]).toMatchInlineSnapshot(
|
||||
`"[tests] This message goes to a test context. {\\"a\\":\\"hi\\"}"`
|
||||
);
|
||||
|
||||
expect(mockStreamWrite).toHaveBeenCalledTimes(1);
|
||||
expect(mockStreamWrite.mock.calls[0][0]).toMatchInlineSnapshot(`
|
||||
"[tests] This message goes to a test context. {\\"a\\":\\"hi\\"}
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
test('throws if a circular appender reference is detected', async () => {
|
||||
expect(async () => {
|
||||
await system.upgrade(
|
||||
config.schema.validate({
|
||||
appenders: {
|
||||
console: { type: 'console', layout: { type: 'pattern' } },
|
||||
a: {
|
||||
type: 'rewrite',
|
||||
appenders: ['b'],
|
||||
policy: { type: 'meta', mode: 'remove', properties: [{ path: 'b' }] },
|
||||
},
|
||||
b: {
|
||||
type: 'rewrite',
|
||||
appenders: ['c'],
|
||||
policy: { type: 'meta', mode: 'remove', properties: [{ path: 'b' }] },
|
||||
},
|
||||
c: {
|
||||
type: 'rewrite',
|
||||
appenders: ['console', 'a'],
|
||||
policy: { type: 'meta', mode: 'remove', properties: [{ path: 'b' }] },
|
||||
},
|
||||
},
|
||||
loggers: [{ name: 'tests', level: 'warn', appenders: ['a'] }],
|
||||
})
|
||||
);
|
||||
}).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Circular appender reference detected: [b -> c -> a -> b]"`
|
||||
);
|
||||
|
||||
expect(mockConsoleLog).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('`stop()` disposes all appenders.', async () => {
|
||||
await system.upgrade(
|
||||
config.schema.validate({
|
||||
|
|
|
@ -146,6 +146,26 @@ export class LoggingSystem implements LoggerFactory {
|
|||
return this.getLoggerConfigByContext(config, LoggingConfig.getParentLoggerContext(context));
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves an appender by the provided key, after first checking that no circular
|
||||
* dependencies exist between appender refs.
|
||||
*/
|
||||
private getAppenderByRef(appenderRef: string) {
|
||||
const checkCircularRefs = (key: string, stack: string[]) => {
|
||||
if (stack.includes(key)) {
|
||||
throw new Error(`Circular appender reference detected: [${stack.join(' -> ')} -> ${key}]`);
|
||||
}
|
||||
stack.push(key);
|
||||
const appender = this.appenders.get(key);
|
||||
if (appender?.appenderRefs) {
|
||||
appender.appenderRefs.forEach((ref) => checkCircularRefs(ref, [...stack]));
|
||||
}
|
||||
return appender;
|
||||
};
|
||||
|
||||
return checkCircularRefs(appenderRef, []);
|
||||
}
|
||||
|
||||
private async applyBaseConfig(newBaseConfig: LoggingConfig) {
|
||||
const computedConfig = [...this.contextConfigs.values()].reduce(
|
||||
(baseConfig, contextConfig) => baseConfig.extend(contextConfig),
|
||||
|
@ -167,6 +187,23 @@ export class LoggingSystem implements LoggerFactory {
|
|||
this.appenders.set(appenderKey, Appenders.create(appenderConfig));
|
||||
}
|
||||
|
||||
// Once all appenders have been created, check for any that have explicitly
|
||||
// declared `appenderRefs` dependencies, and look up those dependencies to
|
||||
// attach to the appender. This enables appenders to act as a sort of
|
||||
// middleware and call `append` on each other if needed.
|
||||
for (const [key, appender] of this.appenders) {
|
||||
if (!appender.addAppender || !appender.appenderRefs) {
|
||||
continue;
|
||||
}
|
||||
for (const ref of appender.appenderRefs) {
|
||||
const foundAppender = this.getAppenderByRef(ref);
|
||||
if (!foundAppender) {
|
||||
throw new Error(`Appender "${key}" config contains unknown appender key "${ref}".`);
|
||||
}
|
||||
appender.addAppender(ref, foundAppender);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [loggerKey, loggerAdapter] of this.loggers) {
|
||||
loggerAdapter.updateLogger(this.createLogger(loggerKey, computedConfig));
|
||||
}
|
||||
|
|
|
@ -180,10 +180,11 @@ export interface AppCategory {
|
|||
// Warning: (ae-forgotten-export) The symbol "ConsoleAppenderConfig" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-forgotten-export) The symbol "FileAppenderConfig" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-forgotten-export) The symbol "LegacyAppenderConfig" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-forgotten-export) The symbol "RewriteAppenderConfig" needs to be exported by the entry point index.d.ts
|
||||
// Warning: (ae-forgotten-export) The symbol "RollingFileAppenderConfig" needs to be exported by the entry point index.d.ts
|
||||
//
|
||||
// @public (undocumented)
|
||||
export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RollingFileAppenderConfig;
|
||||
export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RewriteAppenderConfig | RollingFileAppenderConfig;
|
||||
|
||||
// @public @deprecated (undocumented)
|
||||
export interface AssistanceAPIResponse {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue