[Security Solution][Telemetry] Review logging (#225077)

## Summary

This PR reduces logging in the security solution plugin’s telemetry code
for production environments by reviewing existing logs and adjusting
their log levels.

- Deprecated a logging helper method and moved to standard
`logger.[debug|info|warn|error]` calls.
- Reviewed all the logging sentences to adjust the log level, remove the
non-useful ones, and improve messages.

---------

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
Co-authored-by: Alejandro Fernández Haro <afharo@gmail.com>
This commit is contained in:
Sebastián Zaffarano 2025-06-26 20:34:49 +02:00 committed by GitHub
parent 5220391abb
commit 5b4a65b98c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 163 additions and 167 deletions

View file

@ -69,7 +69,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
telemetryUsageCounter?: IUsageCounter, telemetryUsageCounter?: IUsageCounter,
analytics?: AnalyticsServiceSetup analytics?: AnalyticsServiceSetup
): void { ): void {
this.logger.l(`Setting up ${AsyncTelemetryEventsSender.name}`); this.logger.debug('Setting up service');
this.ensureStatus(ServiceStatus.CREATED); this.ensureStatus(ServiceStatus.CREATED);
@ -86,7 +86,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
} }
public start(telemetryStart?: TelemetryPluginStart): void { public start(telemetryStart?: TelemetryPluginStart): void {
this.logger.l(`Starting ${AsyncTelemetryEventsSender.name}`); this.logger.debug('Starting service');
this.ensureStatus(ServiceStatus.CONFIGURED); this.ensureStatus(ServiceStatus.CONFIGURED);
@ -132,13 +132,11 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
); );
} }
}, },
error: (err) => { error: (error) => {
this.logger.warn('Unexpected error sending events to channel', { this.logger.warn('Unexpected error sending events to channel', { error });
error: JSON.stringify(err),
} as LogMeta);
}, },
complete: () => { complete: () => {
this.logger.l('Shutting down'); this.logger.debug('Shutting down');
this.finished$.next(); this.finished$.next();
}, },
}); });
@ -148,7 +146,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
} }
public async stop(): Promise<void> { public async stop(): Promise<void> {
this.logger.l(`Stopping ${AsyncTelemetryEventsSender.name}`); this.logger.debug('Stopping service');
this.ensureStatus(ServiceStatus.CONFIGURED, ServiceStatus.STARTED); this.ensureStatus(ServiceStatus.CONFIGURED, ServiceStatus.STARTED);
@ -231,9 +229,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
if (inflightEventsCounter < this.getConfigFor(channel).inflightEventsThreshold) { if (inflightEventsCounter < this.getConfigFor(channel).inflightEventsThreshold) {
return rx.of(event); return rx.of(event);
} }
this.logger.l( this.logger.debug('Dropping event', { event, channel, inflightEventsCounter } as LogMeta);
`>> Dropping event ${event} (channel: ${channel}, inflightEventsCounter: ${inflightEventsCounter})`
);
this.senderUtils?.incrementCounter(TelemetryCounter.DOCS_DROPPED, 1, channel); this.senderUtils?.incrementCounter(TelemetryCounter.DOCS_DROPPED, 1, channel);
return rx.EMPTY; return rx.EMPTY;
@ -370,23 +366,21 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
if (r.status < 400) { if (r.status < 400) {
return { events: events.length, channel }; return { events: events.length, channel };
} else { } else {
this.logger.l('Unexpected response', { this.logger.warn('Unexpected response', {
status: r.status, status: r.status,
} as LogMeta); } as LogMeta);
throw newFailure(`Got ${r.status}`, channel, events.length); throw newFailure(`Got ${r.status}`, channel, events.length);
} }
}) })
.catch((err) => { .catch((error) => {
this.senderUtils?.incrementCounter( this.senderUtils?.incrementCounter(
TelemetryCounter.RUNTIME_ERROR, TelemetryCounter.RUNTIME_ERROR,
events.length, events.length,
channel channel
); );
this.logger.warn('Runtime error', { this.logger.warn('Runtime error', { error });
error: err.message, throw newFailure(`Error posting events: ${error}`, channel, events.length);
} as LogMeta);
throw newFailure(`Error posting events: ${err}`, channel, events.length);
}); });
} catch (err: unknown) { } catch (err: unknown) {
if (isFailure(err)) { if (isFailure(err)) {

View file

@ -576,7 +576,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
} }
public async *fetchDiagnosticAlertsBatch(executeFrom: string, executeTo: string) { public async *fetchDiagnosticAlertsBatch(executeFrom: string, executeTo: string) {
this.logger.l('Searching diagnostic alerts', { this.logger.debug('Searching diagnostic alerts', {
from: executeFrom, from: executeFrom,
to: executeTo, to: executeTo,
} as LogMeta); } as LogMeta);
@ -620,7 +620,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
fetchMore = false; fetchMore = false;
} }
this.logger.l('Diagnostic alerts to return', { numOfHits } as LogMeta); this.logger.debug('Diagnostic alerts to return', { numOfHits } as LogMeta);
fetchMore = numOfHits > 0 && numOfHits < telemetryConfiguration.telemetry_max_buffer_size; fetchMore = numOfHits > 0 && numOfHits < telemetryConfiguration.telemetry_max_buffer_size;
} catch (e) { } catch (e) {
this.logger.warn('Error fetching alerts', { error_message: e.message } as LogMeta); this.logger.warn('Error fetching alerts', { error_message: e.message } as LogMeta);
@ -866,7 +866,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
executeFrom: string, executeFrom: string,
executeTo: string executeTo: string
) { ) {
this.logger.l('Searching prebuilt rule alerts from', { this.logger.debug('Searching prebuilt rule alerts from', {
executeFrom, executeFrom,
executeTo, executeTo,
} as LogMeta); } as LogMeta);
@ -1004,7 +1004,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
pitId = response?.pit_id; pitId = response?.pit_id;
} }
this.logger.l('Prebuilt rule alerts to return', { alerts: alerts.length } as LogMeta); this.logger.debug('Prebuilt rule alerts to return', { alerts: alerts.length } as LogMeta);
yield alerts; yield alerts;
} }
@ -1146,7 +1146,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
} as LogMeta); } as LogMeta);
} }
this.logger.l('Timeline alerts to return', { alerts: alertsToReturn.length }); this.logger.debug('Timeline alerts to return', { alerts: alertsToReturn.length } as LogMeta);
return alertsToReturn || []; return alertsToReturn || [];
} }
@ -1419,7 +1419,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getIndices(): Promise<IndexSettings[]> { public async getIndices(): Promise<IndexSettings[]> {
const es = this.esClient(); const es = this.esClient();
this.logger.l('Fetching indices'); this.logger.debug('Fetching indices');
const request: IndicesGetRequest = { const request: IndicesGetRequest = {
index: '*', index: '*',
@ -1455,7 +1455,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getDataStreams(): Promise<DataStream[]> { public async getDataStreams(): Promise<DataStream[]> {
const es = this.esClient(); const es = this.esClient();
this.logger.l('Fetching datstreams'); this.logger.debug('Fetching datstreams');
const request: IndicesGetDataStreamRequest = { const request: IndicesGetDataStreamRequest = {
name: '*', name: '*',
@ -1497,11 +1497,11 @@ export class TelemetryReceiver implements ITelemetryReceiver {
const es = this.esClient(); const es = this.esClient();
const safeChunkSize = Math.min(chunkSize, 3000); const safeChunkSize = Math.min(chunkSize, 3000);
this.logger.l('Fetching indices stats'); this.logger.debug('Fetching indices stats');
const groupedIndices = chunkStringsByMaxLength(indices, safeChunkSize); const groupedIndices = chunkStringsByMaxLength(indices, safeChunkSize);
this.logger.l('Splitted indices into groups', { this.logger.debug('Splitted indices into groups', {
groups: groupedIndices.length, groups: groupedIndices.length,
indices: indices.length, indices: indices.length,
} as LogMeta); } as LogMeta);
@ -1565,7 +1565,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
const groupedIndices = chunkStringsByMaxLength(indices, safeChunkSize); const groupedIndices = chunkStringsByMaxLength(indices, safeChunkSize);
this.logger.l('Splitted ilms into groups', { this.logger.debug('Splitted ilms into groups', {
groups: groupedIndices.length, groups: groupedIndices.length,
indices: indices.length, indices: indices.length,
} as LogMeta); } as LogMeta);
@ -1600,7 +1600,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getIndexTemplatesStats(): Promise<IndexTemplateInfo[]> { public async getIndexTemplatesStats(): Promise<IndexTemplateInfo[]> {
const es = this.esClient(); const es = this.esClient();
this.logger.l('Fetching datstreams'); this.logger.debug('Fetching datstreams');
const request: IndicesGetIndexTemplateRequest = { const request: IndicesGetIndexTemplateRequest = {
name: '*', name: '*',
@ -1661,13 +1661,13 @@ export class TelemetryReceiver implements ITelemetryReceiver {
const groupedIlms = chunkStringsByMaxLength(ilms, safeChunkSize); const groupedIlms = chunkStringsByMaxLength(ilms, safeChunkSize);
this.logger.l('Splitted ilms into groups', { this.logger.debug('Splitted ilms into groups', {
groups: groupedIlms.length, groups: groupedIlms.length,
ilms: ilms.length, ilms: ilms.length,
} as LogMeta); } as LogMeta);
for (const group of groupedIlms) { for (const group of groupedIlms) {
this.logger.l('Fetching ilm policies'); this.logger.debug('Fetching ilm policies');
const request: IlmGetLifecycleRequest = { const request: IlmGetLifecycleRequest = {
name: group.join(','), name: group.join(','),
filter_path: [ filter_path: [
@ -1707,7 +1707,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getIngestPipelinesStats(timeout: Duration): Promise<NodeIngestPipelinesStats[]> { public async getIngestPipelinesStats(timeout: Duration): Promise<NodeIngestPipelinesStats[]> {
const es = this.esClient(); const es = this.esClient();
this.logger.l('Fetching ingest pipelines stats'); this.logger.debug('Fetching ingest pipelines stats');
const request: NodesStatsRequest = { const request: NodesStatsRequest = {
metric: 'ingest', metric: 'ingest',

View file

@ -10,7 +10,7 @@ import type { ExperimentalFeatures } from '../../../common';
import { TelemetryEventsSender } from './sender'; import { TelemetryEventsSender } from './sender';
import { loggingSystemMock } from '@kbn/core/server/mocks'; import { loggingSystemMock } from '@kbn/core/server/mocks';
import { usageCountersServiceMock } from '@kbn/usage-collection-plugin/server/usage_counters/usage_counters_service.mock'; import { usageCountersServiceMock } from '@kbn/usage-collection-plugin/server/usage_counters/usage_counters_service.mock';
import { Observable } from 'rxjs'; import { of } from 'rxjs';
import { URL } from 'url'; import { URL } from 'url';
describe('TelemetryEventsSender', () => { describe('TelemetryEventsSender', () => {
@ -485,7 +485,7 @@ describe('TelemetryEventsSender', () => {
const sender = new TelemetryEventsSender(logger, {} as ExperimentalFeatures); const sender = new TelemetryEventsSender(logger, {} as ExperimentalFeatures);
sender['telemetryStart'] = { sender['telemetryStart'] = {
getIsOptedIn: jest.fn(async () => true), getIsOptedIn: jest.fn(async () => true),
isOptedIn$: new Observable<boolean>(), isOptedIn$: of(true),
}; };
sender['telemetrySetup'] = { sender['telemetrySetup'] = {
getTelemetryUrl: jest.fn(async () => new URL('https://telemetry.elastic.co')), getTelemetryUrl: jest.fn(async () => new URL('https://telemetry.elastic.co')),
@ -519,7 +519,7 @@ describe('TelemetryEventsSender', () => {
sender['sendEvents'] = jest.fn(); sender['sendEvents'] = jest.fn();
const telemetryStart = { const telemetryStart = {
getIsOptedIn: jest.fn(async () => false), getIsOptedIn: jest.fn(async () => false),
isOptedIn$: new Observable<boolean>(), isOptedIn$: of(false),
}; };
sender['telemetryStart'] = telemetryStart; sender['telemetryStart'] = telemetryStart;
@ -536,7 +536,7 @@ describe('TelemetryEventsSender', () => {
sender['sendEvents'] = jest.fn(); sender['sendEvents'] = jest.fn();
const telemetryStart = { const telemetryStart = {
getIsOptedIn: jest.fn(async () => true), getIsOptedIn: jest.fn(async () => true),
isOptedIn$: new Observable<boolean>(), isOptedIn$: of(true),
}; };
sender['telemetryStart'] = telemetryStart; sender['telemetryStart'] = telemetryStart;
sender['isTelemetryServicesReachable'] = jest.fn(async () => false); sender['isTelemetryServicesReachable'] = jest.fn(async () => false);

View file

@ -167,11 +167,11 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.telemetryStart = telemetryStart; this.telemetryStart = telemetryStart;
this.receiver = receiver; this.receiver = receiver;
if (taskManager && this.telemetryTasks) { if (taskManager && this.telemetryTasks) {
this.logger.l('Starting security telemetry tasks'); this.logger.debug('Starting security telemetry tasks');
this.telemetryTasks.forEach((task) => task.start(taskManager)); this.telemetryTasks.forEach((task) => task.start(taskManager));
} }
this.logger.l('Starting local task'); this.logger.debug('Starting local task');
timer(this.initialCheckDelayMs, this.checkIntervalMs) timer(this.initialCheckDelayMs, this.checkIntervalMs)
.pipe( .pipe(
takeUntil(this.stop$), takeUntil(this.stop$),
@ -191,20 +191,20 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
events: events.length, events: events.length,
} as LogMeta); } as LogMeta);
if (events.length === 0) { if (events.length === 0) {
this.logger.l('No events to queue'); this.logger.debug('No events to queue');
return; return;
} }
if (qlength >= this.maxQueueSize) { if (qlength >= this.maxQueueSize) {
// we're full already // we're full already
this.logger.l('Queue length is greater than max queue size'); this.logger.debug('Queue length is greater than max queue size');
return; return;
} }
if (events.length > this.maxQueueSize - qlength) { if (events.length > this.maxQueueSize - qlength) {
this.logger.l('Events exceed remaining queue size', { this.logger.info('Events exceed remaining queue size', {
max_queue_size: this.maxQueueSize, max_queue_size: this.maxQueueSize,
queue_length: qlength, queue_length: qlength,
}); } as LogMeta);
this.telemetryUsageCounter?.incrementCounter({ this.telemetryUsageCounter?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix.concat(['queue_stats'])), counterName: createUsageCounterLabel(usageLabelPrefix.concat(['queue_stats'])),
counterType: 'docs_lost', counterType: 'docs_lost',
@ -283,10 +283,8 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
} }
return false; return false;
} catch (e) { } catch (error) {
this.logger.warn('Error pinging telemetry services', { this.logger.warn('Error pinging telemetry services', { error });
error: e.message,
} as LogMeta);
return false; return false;
} }
@ -306,7 +304,7 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.isOptedIn = await this.isTelemetryOptedIn(); this.isOptedIn = await this.isTelemetryOptedIn();
if (!this.isOptedIn) { if (!this.isOptedIn) {
this.logger.l('Telemetry is not opted-in.'); this.logger.debug('Telemetry is not opted-in.');
this.queue = []; this.queue = [];
this.isSending = false; this.isSending = false;
return; return;
@ -314,7 +312,7 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.isElasticTelemetryReachable = await this.isTelemetryServicesReachable(); this.isElasticTelemetryReachable = await this.isTelemetryServicesReachable();
if (!this.isElasticTelemetryReachable) { if (!this.isElasticTelemetryReachable) {
this.logger.l('Telemetry Services are not reachable.'); this.logger.debug('Telemetry Services are not reachable.');
this.queue = []; this.queue = [];
this.isSending = false; this.isSending = false;
return; return;
@ -382,9 +380,9 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.receiver?.fetchLicenseInfo(), this.receiver?.fetchLicenseInfo(),
]); ]);
this.logger.l('Telemetry URL', { this.logger.debug('Telemetry URL', {
url: telemetryUrl, url: telemetryUrl,
}); } as LogMeta);
await this.sendEvents( await this.sendEvents(
toSend, toSend,
@ -496,10 +494,10 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
counterType: 'docs_sent', counterType: 'docs_sent',
incrementBy: events.length, incrementBy: events.length,
}); });
this.logger.l('Events sent!. Response', { status: resp.status }); this.logger.debug('Events sent!. Response', { status: resp.status } as LogMeta);
} catch (err) { } catch (error) {
this.logger.l('Error sending events', { error: JSON.stringify(err) }); this.logger.warn('Error sending events', { error });
const errorStatus = err?.response?.status; const errorStatus = error?.response?.status;
if (errorStatus !== undefined && errorStatus !== null) { if (errorStatus !== undefined && errorStatus !== null) {
this.telemetryUsageCounter?.incrementCounter({ this.telemetryUsageCounter?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix.concat(['payloads', channel])), counterName: createUsageCounterLabel(usageLabelPrefix.concat(['payloads', channel])),

View file

@ -135,10 +135,8 @@ export class SecurityTelemetryTask {
state: emptyState, state: emptyState,
params: { version: this.config.version }, params: { version: this.config.version },
}); });
} catch (e) { } catch (error) {
this.logger.error('Error scheduling task', { this.logger.error('Error scheduling task', { error });
error: e.message,
} as LogMeta);
} }
}; };

View file

@ -4,7 +4,7 @@
* 2.0; you may not use this file except in compliance with the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { newTelemetryLogger } from './helpers'; import { newTelemetryLogger } from './helpers';
import { type TelemetryLogger } from './telemetry_logger'; import { type TelemetryLogger } from './telemetry_logger';
import type { TaskMetric, ITaskMetricsService, Trace } from './task_metrics.types'; import type { TaskMetric, ITaskMetricsService, Trace } from './task_metrics.types';
@ -29,11 +29,11 @@ export class TaskMetricsService implements ITaskMetricsService {
public async end(trace: Trace, error?: Error): Promise<void> { public async end(trace: Trace, error?: Error): Promise<void> {
const event = this.createTaskMetric(trace, error); const event = this.createTaskMetric(trace, error);
this.logger.l('Task completed', { this.logger.debug('Task completed', {
task_name: event.name, task_name: event.name,
time_executed_in_ms: event.time_executed_in_ms, time_executed_in_ms: event.time_executed_in_ms,
error_message: event.error_message, error_message: event.error_message,
}); } as LogMeta);
if (telemetryConfiguration.use_async_sender) { if (telemetryConfiguration.use_async_sender) {
this.sender.sendAsync(TelemetryChannel.TASK_METRICS, [event]); this.sender.sendAsync(TelemetryChannel.TASK_METRICS, [event]);

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import { TelemetryChannel, type TelemetryConfiguration } from '../types'; import { TelemetryChannel, type TelemetryConfiguration } from '../types';
import type { ITelemetryReceiver } from '../receiver'; import type { ITelemetryReceiver } from '../receiver';
@ -36,7 +36,7 @@ export function createTelemetryConfigurationTaskConfig() {
const log = newTelemetryLogger(logger.get('configuration'), mdc); const log = newTelemetryLogger(logger.get('configuration'), mdc);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
const artifactName = 'telemetry-buffer-and-batch-sizes-v1'; const artifactName = 'telemetry-buffer-and-batch-sizes-v1';
@ -50,9 +50,9 @@ export function createTelemetryConfigurationTaskConfig() {
const configArtifact = manifest.data as unknown as TelemetryConfiguration; const configArtifact = manifest.data as unknown as TelemetryConfiguration;
log.l('Got telemetry configuration artifact', { log.debug('Got telemetry configuration artifact', {
artifact: configArtifact ?? '<null>', artifact: configArtifact ?? '<null>',
}); } as LogMeta);
telemetryConfiguration.max_detection_alerts_batch = telemetryConfiguration.max_detection_alerts_batch =
configArtifact.max_detection_alerts_batch; configArtifact.max_detection_alerts_batch;
@ -69,7 +69,7 @@ export function createTelemetryConfigurationTaskConfig() {
} }
if (configArtifact.sender_channels) { if (configArtifact.sender_channels) {
log.l('Updating sender channels configuration'); log.info('Updating sender channels configuration');
telemetryConfiguration.sender_channels = configArtifact.sender_channels; telemetryConfiguration.sender_channels = configArtifact.sender_channels;
const channelsDict = Object.values(TelemetryChannel).reduce( const channelsDict = Object.values(TelemetryChannel).reduce(
(acc, channel) => acc.set(channel as string, channel), (acc, channel) => acc.set(channel as string, channel),
@ -78,7 +78,7 @@ export function createTelemetryConfigurationTaskConfig() {
Object.entries(configArtifact.sender_channels).forEach(([channelName, config]) => { Object.entries(configArtifact.sender_channels).forEach(([channelName, config]) => {
if (channelName === 'default') { if (channelName === 'default') {
log.l('Updating default configuration'); log.debug('Updating default configuration');
sender.updateDefaultQueueConfig({ sender.updateDefaultQueueConfig({
bufferTimeSpanMillis: config.buffer_time_span_millis, bufferTimeSpanMillis: config.buffer_time_span_millis,
inflightEventsThreshold: config.inflight_events_threshold, inflightEventsThreshold: config.inflight_events_threshold,
@ -87,9 +87,11 @@ export function createTelemetryConfigurationTaskConfig() {
} else { } else {
const channel = channelsDict.get(channelName); const channel = channelsDict.get(channelName);
if (!channel) { if (!channel) {
log.l('Ignoring unknown channel', { channel: channelName }); log.info('Ignoring unknown channel', { channel: channelName } as LogMeta);
} else { } else {
log.l('Updating configuration for channel', { channel: channelName }); log.debug('Updating configuration for channel', {
channel: channelName,
} as LogMeta);
sender.updateQueueConfig(channel, { sender.updateQueueConfig(channel, {
bufferTimeSpanMillis: config.buffer_time_span_millis, bufferTimeSpanMillis: config.buffer_time_span_millis,
inflightEventsThreshold: config.inflight_events_threshold, inflightEventsThreshold: config.inflight_events_threshold,
@ -101,31 +103,31 @@ export function createTelemetryConfigurationTaskConfig() {
} }
if (configArtifact.pagination_config) { if (configArtifact.pagination_config) {
log.l('Updating pagination configuration'); log.debug('Updating pagination configuration');
telemetryConfiguration.pagination_config = configArtifact.pagination_config; telemetryConfiguration.pagination_config = configArtifact.pagination_config;
_receiver.setMaxPageSizeBytes(configArtifact.pagination_config.max_page_size_bytes); _receiver.setMaxPageSizeBytes(configArtifact.pagination_config.max_page_size_bytes);
_receiver.setNumDocsToSample(configArtifact.pagination_config.num_docs_to_sample); _receiver.setNumDocsToSample(configArtifact.pagination_config.num_docs_to_sample);
} }
if (configArtifact.indices_metadata_config) { if (configArtifact.indices_metadata_config) {
log.l('Updating indices metadata configuration'); log.debug('Updating indices metadata configuration');
telemetryConfiguration.indices_metadata_config = configArtifact.indices_metadata_config; telemetryConfiguration.indices_metadata_config = configArtifact.indices_metadata_config;
} }
if (configArtifact.ingest_pipelines_stats_config) { if (configArtifact.ingest_pipelines_stats_config) {
log.l('Updating ingest pipelines stats configuration'); log.debug('Updating ingest pipelines stats configuration');
telemetryConfiguration.ingest_pipelines_stats_config = telemetryConfiguration.ingest_pipelines_stats_config =
configArtifact.ingest_pipelines_stats_config; configArtifact.ingest_pipelines_stats_config;
} }
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
log.l('Updated TelemetryConfiguration', { configuration: telemetryConfiguration }); log.debug('Updated TelemetryConfiguration');
return 0; return 0;
} catch (err) { } catch (error) {
log.l('Failed to set telemetry configuration', { error: err.message }); log.warn('Failed to set telemetry configuration', { error });
telemetryConfiguration.resetAllToDefault(); telemetryConfiguration.resetAllToDefault();
await taskMetricsService.end(trace, err); await taskMetricsService.end(trace, error);
return 0; return 0;
} }
}, },

View file

@ -6,7 +6,7 @@
*/ */
import { cloneDeep } from 'lodash'; import { cloneDeep } from 'lodash';
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { import {
batchTelemetryRecords, batchTelemetryRecords,
responseActionsCustomRuleTelemetryData, responseActionsCustomRuleTelemetryData,
@ -55,7 +55,7 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
]; ];
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running response actions rules telemetry task'); log.debug('Running response actions rules telemetry task');
try { try {
const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([ const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([
@ -108,9 +108,9 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
licenseInfo licenseInfo
); );
log.l('Custom response actions rules data', { log.debug('Custom response actions rules data', {
data: JSON.stringify(responseActionsRulesTelemetryData), data: JSON.stringify(responseActionsRulesTelemetryData),
}); } as LogMeta);
usageCollector?.incrementCounter({ usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelEndpointPrefix), counterName: createUsageCounterLabel(usageLabelEndpointPrefix),
@ -127,7 +127,7 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
const documents = cloneDeep(Object.values(responseActionsRulesTelemetryData)); const documents = cloneDeep(Object.values(responseActionsRulesTelemetryData));
if (telemetryConfiguration.use_async_sender) { if (telemetryConfiguration.use_async_sender) {
await sender.sendAsync(TelemetryChannel.LISTS, documents); sender.sendAsync(TelemetryChannel.LISTS, documents);
} else { } else {
const batches = batchTelemetryRecords(documents, maxTelemetryBatch); const batches = batchTelemetryRecords(documents, maxTelemetryBatch);
for (const batch of batches) { for (const batch of batches) {
@ -141,9 +141,9 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
responseActionsRulesTelemetryData.response_actions_rules responseActionsRulesTelemetryData.response_actions_rules
).reduce((acc, count) => acc + count, 0); ).reduce((acc, count) => acc + count, 0);
log.l('Response actions rules telemetry task executed', { log.debug('Response actions rules telemetry task executed', {
totalCount, totalCount,
}); } as LogMeta);
return totalCount; return totalCount;
} catch (err) { } catch (err) {

View file

@ -6,7 +6,7 @@
*/ */
import { cloneDeep } from 'lodash'; import { cloneDeep } from 'lodash';
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { LIST_DETECTION_RULE_EXCEPTION, TELEMETRY_CHANNEL_LISTS } from '../constants'; import { LIST_DETECTION_RULE_EXCEPTION, TELEMETRY_CHANNEL_LISTS } from '../constants';
import { import {
batchTelemetryRecords, batchTelemetryRecords,
@ -44,7 +44,7 @@ export function createTelemetryDetectionRuleListsTaskConfig(maxTelemetryBatch: n
const usageLabelPrefix: string[] = ['security_telemetry', 'detection-rules']; const usageLabelPrefix: string[] = ['security_telemetry', 'detection-rules'];
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([ const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([
@ -102,9 +102,9 @@ export function createTelemetryDetectionRuleListsTaskConfig(maxTelemetryBatch: n
licenseInfo, licenseInfo,
LIST_DETECTION_RULE_EXCEPTION LIST_DETECTION_RULE_EXCEPTION
); );
log.l('Detection rule exception json length', { log.debug('Detection rule exception json length', {
length: detectionRuleExceptionsJson.length, length: detectionRuleExceptionsJson.length,
}); } as LogMeta);
usageCollector?.incrementCounter({ usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix), counterName: createUsageCounterLabel(usageLabelPrefix),
@ -121,7 +121,7 @@ export function createTelemetryDetectionRuleListsTaskConfig(maxTelemetryBatch: n
} }
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
log.l('Task executed', { length: detectionRuleExceptionsJson.length }); log.debug('Task executed', { length: detectionRuleExceptionsJson.length } as LogMeta);
return detectionRuleExceptionsJson.length; return detectionRuleExceptionsJson.length;
} catch (err) { } catch (err) {

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { newTelemetryLogger, getPreviousDiagTaskTimestamp } from '../helpers'; import { newTelemetryLogger, getPreviousDiagTaskTimestamp } from '../helpers';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import { TelemetryChannel, type TelemetryEvent } from '../types'; import { TelemetryChannel, type TelemetryEvent } from '../types';
@ -36,7 +36,7 @@ export function createTelemetryDiagnosticsTaskConfig() {
const log = newTelemetryLogger(logger.get('diagnostic'), mdc); const log = newTelemetryLogger(logger.get('diagnostic'), mdc);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
if (!taskExecutionPeriod.last) { if (!taskExecutionPeriod.last) {
@ -61,9 +61,9 @@ export function createTelemetryDiagnosticsTaskConfig() {
} }
alertCount += alerts.length; alertCount += alerts.length;
log.l('Sending diagnostic alerts', { log.debug('Sending diagnostic alerts', {
alerts_count: alerts.length, alerts_count: alerts.length,
}); } as LogMeta);
sender.sendAsync(TelemetryChannel.ENDPOINT_ALERTS, processedAlerts); sender.sendAsync(TelemetryChannel.ENDPOINT_ALERTS, processedAlerts);
} }

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { FLEET_ENDPOINT_PACKAGE } from '@kbn/fleet-plugin/common'; import { FLEET_ENDPOINT_PACKAGE } from '@kbn/fleet-plugin/common';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import { import {
@ -64,7 +64,7 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
const log = newTelemetryLogger(logger.get('endpoint'), mdc); const log = newTelemetryLogger(logger.get('endpoint'), mdc);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
const processor = new EndpointMetadataProcessor(log, receiver); const processor = new EndpointMetadataProcessor(log, receiver);
@ -80,10 +80,10 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
incrementBy: documents.length, incrementBy: documents.length,
}); });
log.l('Sending endpoint telemetry', { log.debug('Sending endpoint telemetry', {
num_docs: documents.length, num_docs: documents.length,
async_sender: telemetryConfiguration.use_async_sender, async_sender: telemetryConfiguration.use_async_sender,
}); } as LogMeta);
// STAGE 6 - Send the documents // STAGE 6 - Send the documents
if (telemetryConfiguration.use_async_sender) { if (telemetryConfiguration.use_async_sender) {
@ -97,11 +97,9 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
return documents.length; return documents.length;
} catch (err) { } catch (error) {
log.l(`Error running endpoint alert telemetry task`, { log.warn(`Error running endpoint alert telemetry task`, { error });
error: JSON.stringify(err), await taskMetricsService.end(trace, error);
});
await taskMetricsService.end(trace, err);
return 0; return 0;
} }
}, },
@ -127,7 +125,7 @@ class EndpointMetadataProcessor {
const endpointMetrics = await this.receiver.fetchEndpointMetricsAbstract(last, current); const endpointMetrics = await this.receiver.fetchEndpointMetricsAbstract(last, current);
// If no metrics exist, early (and successfull) exit // If no metrics exist, early (and successfull) exit
if (endpointMetrics.totalEndpoints === 0) { if (endpointMetrics.totalEndpoints === 0) {
this.logger.l('no endpoint metrics to report'); this.logger.debug('no endpoint metrics to report');
return []; return [];
} }
@ -143,10 +141,8 @@ class EndpointMetadataProcessor {
policies.delete(DefaultEndpointPolicyIdToIgnore); policies.delete(DefaultEndpointPolicyIdToIgnore);
return policies; return policies;
}) })
.catch((e) => { .catch((error) => {
this.logger.l('Error fetching fleet agents, using an empty value', { this.logger.warn('Error fetching fleet agents, using an empty value', { error });
error: JSON.stringify(e),
});
return new Map(); return new Map();
}); });
const endpointPolicyById = await this.endpointPolicies(policyIdByFleetAgentId.values()); const endpointPolicyById = await this.endpointPolicies(policyIdByFleetAgentId.values());
@ -158,14 +154,12 @@ class EndpointMetadataProcessor {
.fetchEndpointPolicyResponses(last, current) .fetchEndpointPolicyResponses(last, current)
.then((response) => { .then((response) => {
if (response.size === 0) { if (response.size === 0) {
this.logger.l('no endpoint policy responses to report'); this.logger.info('no endpoint policy responses to report');
} }
return response; return response;
}) })
.catch((e) => { .catch((error) => {
this.logger.l('Error fetching policy responses, using an empty value', { this.logger.warn('Error fetching policy responses, using an empty value', { error });
error: JSON.stringify(e),
});
return new Map(); return new Map();
}); });
@ -176,14 +170,12 @@ class EndpointMetadataProcessor {
.fetchEndpointMetadata(last, current) .fetchEndpointMetadata(last, current)
.then((response) => { .then((response) => {
if (response.size === 0) { if (response.size === 0) {
this.logger.l('no endpoint metadata to report'); this.logger.debug('no endpoint metadata to report');
} }
return response; return response;
}) })
.catch((e) => { .catch((error) => {
this.logger.l('Error fetching endpoint metadata, using an empty value', { this.logger.warn('Error fetching endpoint metadata, using an empty value', { error });
error: JSON.stringify(e),
});
return new Map(); return new Map();
}); });
@ -212,12 +204,12 @@ class EndpointMetadataProcessor {
); );
telemetryPayloads.push(...payloads); telemetryPayloads.push(...payloads);
} }
} catch (e) { } catch (error) {
// something happened in the middle of the pagination, log the error // something happened in the middle of the pagination, log the error
// and return what we collect so far instead of aborting the // and return what we collect so far instead of aborting the
// whole execution // whole execution
this.logger.l('Error fetching endpoint metrics by id', { this.logger.warn('Error fetching endpoint metrics by id', {
error: JSON.stringify(e), error,
}); });
} }
@ -244,7 +236,7 @@ class EndpointMetadataProcessor {
for (const policyId of policies) { for (const policyId of policies) {
if (!endpointPolicyCache.has(policyId)) { if (!endpointPolicyCache.has(policyId)) {
const agentPolicy = await this.receiver.fetchPolicyConfigs(policyId).catch((e) => { const agentPolicy = await this.receiver.fetchPolicyConfigs(policyId).catch((e) => {
this.logger.l(`error fetching policy config due to ${e?.message}`); this.logger.warn(`error fetching policy config due to ${e?.message}`);
return null; return null;
}); });

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import type { TelemetryFilterListArtifact } from '../types'; import type { TelemetryFilterListArtifact } from '../types';
import type { ITelemetryReceiver } from '../receiver'; import type { ITelemetryReceiver } from '../receiver';
@ -36,7 +36,7 @@ export function createTelemetryFilterListArtifactTaskConfig() {
const log = newTelemetryLogger(logger.get('filterlists'), mdc); const log = newTelemetryLogger(logger.get('filterlists'), mdc);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
const artifactName = 'telemetry-filterlists-v1'; const artifactName = 'telemetry-filterlists-v1';
@ -48,16 +48,16 @@ export function createTelemetryFilterListArtifactTaskConfig() {
} }
const artifact = manifest.data as unknown as TelemetryFilterListArtifact; const artifact = manifest.data as unknown as TelemetryFilterListArtifact;
log.l('New filterlist artifact', { artifact }); log.debug('New filterlist artifact', { artifact } as LogMeta);
filterList.endpointAlerts = artifact.endpoint_alerts; filterList.endpointAlerts = artifact.endpoint_alerts;
filterList.exceptionLists = artifact.exception_lists; filterList.exceptionLists = artifact.exception_lists;
filterList.prebuiltRulesAlerts = artifact.prebuilt_rules_alerts; filterList.prebuiltRulesAlerts = artifact.prebuilt_rules_alerts;
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
return 0; return 0;
} catch (err) { } catch (error) {
log.l('Failed to set telemetry filterlist artifact', { error: err.message }); log.warn('Failed to set telemetry filterlist artifact', { error });
filterList.resetAllToDefault(); filterList.resetAllToDefault();
await taskMetricsService.end(trace, err); await taskMetricsService.end(trace, error);
return 0; return 0;
} }
}, },

View file

@ -181,8 +181,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'indices-stats', count); incrementCounter(TelemetryCounter.DOCS_SENT, 'indices-stats', count);
return count; return count;
}) })
.catch((err) => { .catch((error) => {
log.warn(`Error getting indices stats`, { error: err.message } as LogMeta); log.warn(`Error getting indices stats`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'indices-stats', 1); incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'indices-stats', 1);
return 0; return 0;
}); });
@ -193,8 +193,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'ilm-stats', names.size); incrementCounter(TelemetryCounter.DOCS_SENT, 'ilm-stats', names.size);
return names; return names;
}) })
.catch((err) => { .catch((error) => {
log.warn(`Error getting ILM stats`, { error: err.message } as LogMeta); log.warn(`Error getting ILM stats`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'ilm-stats', 1); incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'ilm-stats', 1);
return new Set<string>(); return new Set<string>();
}); });
@ -205,8 +205,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'ilm-policies', count); incrementCounter(TelemetryCounter.DOCS_SENT, 'ilm-policies', count);
return count; return count;
}) })
.catch((err) => { .catch((error) => {
log.warn(`Error getting ILM policies`, { error: err.message } as LogMeta); log.warn(`Error getting ILM policies`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'ilm-policies', 1); incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'ilm-policies', 1);
return 0; return 0;
}); });
@ -219,8 +219,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'index-templates', count); incrementCounter(TelemetryCounter.DOCS_SENT, 'index-templates', count);
return count; return count;
}) })
.catch((err) => { .catch((error) => {
log.warn(`Error getting index templates`, { error: err.message } as LogMeta); log.warn(`Error getting index templates`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'index-templates', 1); incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'index-templates', 1);
return 0; return 0;
}); });
@ -237,11 +237,9 @@ export function createTelemetryIndicesMetadataTaskConfig() {
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
return indicesCount; return indicesCount;
} catch (err) { } catch (error) {
log.warn(`Error running indices metadata task`, { log.warn(`Error running indices metadata task`, { error });
error: err.message, await taskMetricsService.end(trace, error);
} as LogMeta);
await taskMetricsService.end(trace, err);
return 0; return 0;
} }
}, },

View file

@ -81,12 +81,12 @@ export function createIngestStatsTaskConfig() {
} as LogMeta); } as LogMeta);
return ingestStats.length; return ingestStats.length;
} catch (err) { } catch (error) {
log.warn(`Error running ingest stats task`, { log.warn(`Error running ingest stats task`, {
error: err.message, error,
elapsed: performance.now() - start, elapsed: performance.now() - start,
} as LogMeta); });
await taskMetricsService.end(trace, err); await taskMetricsService.end(trace, error);
return 0; return 0;
} }
}, },

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import type { ITelemetryReceiver } from '../receiver'; import type { ITelemetryReceiver } from '../receiver';
import type { ITaskMetricsService } from '../task_metrics.types'; import type { ITaskMetricsService } from '../task_metrics.types';
@ -44,7 +44,7 @@ export function createTelemetryPrebuiltRuleAlertsTaskConfig(maxTelemetryBatch: n
const log = newTelemetryLogger(logger.get('prebuilt_rule_alerts'), mdc); const log = newTelemetryLogger(logger.get('prebuilt_rule_alerts'), mdc);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
const [clusterInfoPromise, licenseInfoPromise, packageVersion] = await Promise.allSettled([ const [clusterInfoPromise, licenseInfoPromise, packageVersion] = await Promise.allSettled([
@ -96,7 +96,9 @@ export function createTelemetryPrebuiltRuleAlertsTaskConfig(maxTelemetryBatch: n
}) })
); );
log.l('sending elastic prebuilt alerts', { length: enrichedAlerts.length }); log.debug('sending elastic prebuilt alerts', {
length: enrichedAlerts.length,
} as LogMeta);
const batches = batchTelemetryRecords(enrichedAlerts, maxTelemetryBatch); const batches = batchTelemetryRecords(enrichedAlerts, maxTelemetryBatch);
const promises = batches.map(async (batch) => { const promises = batches.map(async (batch) => {
@ -108,9 +110,9 @@ export function createTelemetryPrebuiltRuleAlertsTaskConfig(maxTelemetryBatch: n
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
return 0; return 0;
} catch (err) { } catch (error) {
logger.error('could not complete task', { error: err }); logger.error('could not complete task', { error });
await taskMetricsService.end(trace, err); await taskMetricsService.end(trace, error);
return 0; return 0;
} }
}, },

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { ENDPOINT_LIST_ID, ENDPOINT_ARTIFACT_LISTS } from '@kbn/securitysolution-list-constants'; import { ENDPOINT_LIST_ID, ENDPOINT_ARTIFACT_LISTS } from '@kbn/securitysolution-list-constants';
import { import {
LIST_ENDPOINT_EXCEPTION, LIST_ENDPOINT_EXCEPTION,
@ -47,7 +47,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
const log = newTelemetryLogger(logger.get('security_lists'), mdc); const log = newTelemetryLogger(logger.get('security_lists'), mdc);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
const usageCollector = sender.getTelemetryUsageCluster(); const usageCollector = sender.getTelemetryUsageCluster();
const usageLabelPrefix: string[] = ['security_telemetry', 'lists']; const usageLabelPrefix: string[] = ['security_telemetry', 'lists'];
@ -81,7 +81,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
LIST_TRUSTED_APPLICATION LIST_TRUSTED_APPLICATION
); );
trustedApplicationsCount = trustedAppsJson.length; trustedApplicationsCount = trustedAppsJson.length;
log.l('Trusted Apps', { trusted_apps_count: trustedApplicationsCount }); log.debug('Trusted Apps', { trusted_apps_count: trustedApplicationsCount } as LogMeta);
usageCollector?.incrementCounter({ usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix), counterName: createUsageCounterLabel(usageLabelPrefix),
@ -106,7 +106,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
LIST_ENDPOINT_EXCEPTION LIST_ENDPOINT_EXCEPTION
); );
endpointExceptionsCount = epExceptionsJson.length; endpointExceptionsCount = epExceptionsJson.length;
log.l('EP Exceptions', { ep_exceptions_count: endpointExceptionsCount }); log.debug('EP Exceptions', { ep_exceptions_count: endpointExceptionsCount } as LogMeta);
usageCollector?.incrementCounter({ usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix), counterName: createUsageCounterLabel(usageLabelPrefix),
@ -131,7 +131,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
LIST_ENDPOINT_EVENT_FILTER LIST_ENDPOINT_EVENT_FILTER
); );
endpointEventFiltersCount = epFiltersJson.length; endpointEventFiltersCount = epFiltersJson.length;
log.l('EP Event Filters', { ep_filters_count: endpointEventFiltersCount }); log.debug('EP Event Filters', { ep_filters_count: endpointEventFiltersCount } as LogMeta);
usageCollector?.incrementCounter({ usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix), counterName: createUsageCounterLabel(usageLabelPrefix),

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import type { ITelemetryReceiver } from '../receiver'; import type { ITelemetryReceiver } from '../receiver';
import type { TaskExecutionPeriod } from '../task'; import type { TaskExecutionPeriod } from '../task';
@ -35,7 +35,7 @@ export function createTelemetryTimelineTaskConfig() {
const fetcher = new TelemetryTimelineFetcher(receiver); const fetcher = new TelemetryTimelineFetcher(receiver);
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
let counter = 0; let counter = 0;
@ -48,7 +48,7 @@ export function createTelemetryTimelineTaskConfig() {
} }
const alerts = await receiver.fetchTimelineAlerts(alertsIndex, rangeFrom, rangeTo); const alerts = await receiver.fetchTimelineAlerts(alertsIndex, rangeFrom, rangeTo);
log.l('found alerts to process', { length: alerts.length }); log.debug('found alerts to process', { length: alerts.length } as LogMeta);
for (const alert of alerts) { for (const alert of alerts) {
const result = await fetcher.fetchTimeline(alert); const result = await fetcher.fetchTimeline(alert);
@ -73,14 +73,14 @@ export function createTelemetryTimelineTaskConfig() {
} }
} }
log.l('Concluding timeline task.', { counter }); log.debug('Concluding timeline task.', { counter } as LogMeta);
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
return counter; return counter;
} catch (err) { } catch (error) {
logger.error('could not complete task', { error: err }); logger.error('could not complete task', { error });
await taskMetricsService.end(trace, err); await taskMetricsService.end(trace, error);
return 0; return 0;
} }
}, },

View file

@ -5,7 +5,7 @@
* 2.0. * 2.0.
*/ */
import type { Logger } from '@kbn/core/server'; import type { LogMeta, Logger } from '@kbn/core/server';
import { DEFAULT_DIAGNOSTIC_INDEX_PATTERN } from '../../../../common/endpoint/constants'; import { DEFAULT_DIAGNOSTIC_INDEX_PATTERN } from '../../../../common/endpoint/constants';
import type { ITelemetryEventsSender } from '../sender'; import type { ITelemetryEventsSender } from '../sender';
import type { ITelemetryReceiver } from '../receiver'; import type { ITelemetryReceiver } from '../receiver';
@ -36,7 +36,7 @@ export function createTelemetryDiagnosticTimelineTaskConfig() {
const trace = taskMetricsService.start(taskType); const trace = taskMetricsService.start(taskType);
const fetcher = new TelemetryTimelineFetcher(receiver); const fetcher = new TelemetryTimelineFetcher(receiver);
log.l('Running telemetry task'); log.debug('Running telemetry task');
try { try {
let counter = 0; let counter = 0;
@ -49,7 +49,7 @@ export function createTelemetryDiagnosticTimelineTaskConfig() {
rangeTo rangeTo
); );
log.l('found alerts to process', { length: alerts.length }); log.debug('found alerts to process', { length: alerts.length } as LogMeta);
for (const alert of alerts) { for (const alert of alerts) {
const result = await fetcher.fetchTimeline(alert); const result = await fetcher.fetchTimeline(alert);
@ -74,14 +74,14 @@ export function createTelemetryDiagnosticTimelineTaskConfig() {
} }
} }
log.l('Concluding timeline task.', { counter }); log.debug('Concluding timeline task.', { counter } as LogMeta);
await taskMetricsService.end(trace); await taskMetricsService.end(trace);
return counter; return counter;
} catch (err) { } catch (error) {
logger.error('could not complete task', { error: err }); logger.error('could not complete task', { error });
await taskMetricsService.end(trace, err); await taskMetricsService.end(trace, error);
return 0; return 0;
} }
}, },

View file

@ -9,6 +9,18 @@ import type { LogLevelId, LogRecord } from '@kbn/logging';
import { clusterInfo, isElasticCloudDeployment } from './helpers'; import { clusterInfo, isElasticCloudDeployment } from './helpers';
export interface TelemetryLogger extends Logger { export interface TelemetryLogger extends Logger {
/**
* @deprecated This method is deprecated and should be avoided in new code.
* Instead, configure appropriate log levels directly in `kibana.yml`. For example:
*
* ```yaml
* # kibana.yml
* logging.loggers:
* - name: plugins.securitySolution
* level: info
* - name: plugins.securitySolution.telemetry_events.sender
* level: debug
*/
l<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta | object): void; l<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta | object): void;
} }