[Security Solution][Telemetry] Review logging (#225077)

## Summary

This PR reduces logging in the security solution plugin’s telemetry code
for production environments by reviewing existing logs and adjusting
their log levels.

- Deprecated a logging helper method and moved to standard
`logger.[debug|info|warn|error]` calls.
- Reviewed all the logging sentences to adjust the log level, remove the
non-useful ones, and improve messages.

---------

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
Co-authored-by: Alejandro Fernández Haro <afharo@gmail.com>
This commit is contained in:
Sebastián Zaffarano 2025-06-26 20:34:49 +02:00 committed by GitHub
parent 5220391abb
commit 5b4a65b98c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 163 additions and 167 deletions

View file

@ -69,7 +69,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
telemetryUsageCounter?: IUsageCounter,
analytics?: AnalyticsServiceSetup
): void {
this.logger.l(`Setting up ${AsyncTelemetryEventsSender.name}`);
this.logger.debug('Setting up service');
this.ensureStatus(ServiceStatus.CREATED);
@ -86,7 +86,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
}
public start(telemetryStart?: TelemetryPluginStart): void {
this.logger.l(`Starting ${AsyncTelemetryEventsSender.name}`);
this.logger.debug('Starting service');
this.ensureStatus(ServiceStatus.CONFIGURED);
@ -132,13 +132,11 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
);
}
},
error: (err) => {
this.logger.warn('Unexpected error sending events to channel', {
error: JSON.stringify(err),
} as LogMeta);
error: (error) => {
this.logger.warn('Unexpected error sending events to channel', { error });
},
complete: () => {
this.logger.l('Shutting down');
this.logger.debug('Shutting down');
this.finished$.next();
},
});
@ -148,7 +146,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
}
public async stop(): Promise<void> {
this.logger.l(`Stopping ${AsyncTelemetryEventsSender.name}`);
this.logger.debug('Stopping service');
this.ensureStatus(ServiceStatus.CONFIGURED, ServiceStatus.STARTED);
@ -231,9 +229,7 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
if (inflightEventsCounter < this.getConfigFor(channel).inflightEventsThreshold) {
return rx.of(event);
}
this.logger.l(
`>> Dropping event ${event} (channel: ${channel}, inflightEventsCounter: ${inflightEventsCounter})`
);
this.logger.debug('Dropping event', { event, channel, inflightEventsCounter } as LogMeta);
this.senderUtils?.incrementCounter(TelemetryCounter.DOCS_DROPPED, 1, channel);
return rx.EMPTY;
@ -370,23 +366,21 @@ export class AsyncTelemetryEventsSender implements IAsyncTelemetryEventsSender {
if (r.status < 400) {
return { events: events.length, channel };
} else {
this.logger.l('Unexpected response', {
this.logger.warn('Unexpected response', {
status: r.status,
} as LogMeta);
throw newFailure(`Got ${r.status}`, channel, events.length);
}
})
.catch((err) => {
.catch((error) => {
this.senderUtils?.incrementCounter(
TelemetryCounter.RUNTIME_ERROR,
events.length,
channel
);
this.logger.warn('Runtime error', {
error: err.message,
} as LogMeta);
throw newFailure(`Error posting events: ${err}`, channel, events.length);
this.logger.warn('Runtime error', { error });
throw newFailure(`Error posting events: ${error}`, channel, events.length);
});
} catch (err: unknown) {
if (isFailure(err)) {

View file

@ -576,7 +576,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
}
public async *fetchDiagnosticAlertsBatch(executeFrom: string, executeTo: string) {
this.logger.l('Searching diagnostic alerts', {
this.logger.debug('Searching diagnostic alerts', {
from: executeFrom,
to: executeTo,
} as LogMeta);
@ -620,7 +620,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
fetchMore = false;
}
this.logger.l('Diagnostic alerts to return', { numOfHits } as LogMeta);
this.logger.debug('Diagnostic alerts to return', { numOfHits } as LogMeta);
fetchMore = numOfHits > 0 && numOfHits < telemetryConfiguration.telemetry_max_buffer_size;
} catch (e) {
this.logger.warn('Error fetching alerts', { error_message: e.message } as LogMeta);
@ -866,7 +866,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
executeFrom: string,
executeTo: string
) {
this.logger.l('Searching prebuilt rule alerts from', {
this.logger.debug('Searching prebuilt rule alerts from', {
executeFrom,
executeTo,
} as LogMeta);
@ -1004,7 +1004,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
pitId = response?.pit_id;
}
this.logger.l('Prebuilt rule alerts to return', { alerts: alerts.length } as LogMeta);
this.logger.debug('Prebuilt rule alerts to return', { alerts: alerts.length } as LogMeta);
yield alerts;
}
@ -1146,7 +1146,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
} as LogMeta);
}
this.logger.l('Timeline alerts to return', { alerts: alertsToReturn.length });
this.logger.debug('Timeline alerts to return', { alerts: alertsToReturn.length } as LogMeta);
return alertsToReturn || [];
}
@ -1419,7 +1419,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getIndices(): Promise<IndexSettings[]> {
const es = this.esClient();
this.logger.l('Fetching indices');
this.logger.debug('Fetching indices');
const request: IndicesGetRequest = {
index: '*',
@ -1455,7 +1455,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getDataStreams(): Promise<DataStream[]> {
const es = this.esClient();
this.logger.l('Fetching datstreams');
this.logger.debug('Fetching datstreams');
const request: IndicesGetDataStreamRequest = {
name: '*',
@ -1497,11 +1497,11 @@ export class TelemetryReceiver implements ITelemetryReceiver {
const es = this.esClient();
const safeChunkSize = Math.min(chunkSize, 3000);
this.logger.l('Fetching indices stats');
this.logger.debug('Fetching indices stats');
const groupedIndices = chunkStringsByMaxLength(indices, safeChunkSize);
this.logger.l('Splitted indices into groups', {
this.logger.debug('Splitted indices into groups', {
groups: groupedIndices.length,
indices: indices.length,
} as LogMeta);
@ -1565,7 +1565,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
const groupedIndices = chunkStringsByMaxLength(indices, safeChunkSize);
this.logger.l('Splitted ilms into groups', {
this.logger.debug('Splitted ilms into groups', {
groups: groupedIndices.length,
indices: indices.length,
} as LogMeta);
@ -1600,7 +1600,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getIndexTemplatesStats(): Promise<IndexTemplateInfo[]> {
const es = this.esClient();
this.logger.l('Fetching datstreams');
this.logger.debug('Fetching datstreams');
const request: IndicesGetIndexTemplateRequest = {
name: '*',
@ -1661,13 +1661,13 @@ export class TelemetryReceiver implements ITelemetryReceiver {
const groupedIlms = chunkStringsByMaxLength(ilms, safeChunkSize);
this.logger.l('Splitted ilms into groups', {
this.logger.debug('Splitted ilms into groups', {
groups: groupedIlms.length,
ilms: ilms.length,
} as LogMeta);
for (const group of groupedIlms) {
this.logger.l('Fetching ilm policies');
this.logger.debug('Fetching ilm policies');
const request: IlmGetLifecycleRequest = {
name: group.join(','),
filter_path: [
@ -1707,7 +1707,7 @@ export class TelemetryReceiver implements ITelemetryReceiver {
public async getIngestPipelinesStats(timeout: Duration): Promise<NodeIngestPipelinesStats[]> {
const es = this.esClient();
this.logger.l('Fetching ingest pipelines stats');
this.logger.debug('Fetching ingest pipelines stats');
const request: NodesStatsRequest = {
metric: 'ingest',

View file

@ -10,7 +10,7 @@ import type { ExperimentalFeatures } from '../../../common';
import { TelemetryEventsSender } from './sender';
import { loggingSystemMock } from '@kbn/core/server/mocks';
import { usageCountersServiceMock } from '@kbn/usage-collection-plugin/server/usage_counters/usage_counters_service.mock';
import { Observable } from 'rxjs';
import { of } from 'rxjs';
import { URL } from 'url';
describe('TelemetryEventsSender', () => {
@ -485,7 +485,7 @@ describe('TelemetryEventsSender', () => {
const sender = new TelemetryEventsSender(logger, {} as ExperimentalFeatures);
sender['telemetryStart'] = {
getIsOptedIn: jest.fn(async () => true),
isOptedIn$: new Observable<boolean>(),
isOptedIn$: of(true),
};
sender['telemetrySetup'] = {
getTelemetryUrl: jest.fn(async () => new URL('https://telemetry.elastic.co')),
@ -519,7 +519,7 @@ describe('TelemetryEventsSender', () => {
sender['sendEvents'] = jest.fn();
const telemetryStart = {
getIsOptedIn: jest.fn(async () => false),
isOptedIn$: new Observable<boolean>(),
isOptedIn$: of(false),
};
sender['telemetryStart'] = telemetryStart;
@ -536,7 +536,7 @@ describe('TelemetryEventsSender', () => {
sender['sendEvents'] = jest.fn();
const telemetryStart = {
getIsOptedIn: jest.fn(async () => true),
isOptedIn$: new Observable<boolean>(),
isOptedIn$: of(true),
};
sender['telemetryStart'] = telemetryStart;
sender['isTelemetryServicesReachable'] = jest.fn(async () => false);

View file

@ -167,11 +167,11 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.telemetryStart = telemetryStart;
this.receiver = receiver;
if (taskManager && this.telemetryTasks) {
this.logger.l('Starting security telemetry tasks');
this.logger.debug('Starting security telemetry tasks');
this.telemetryTasks.forEach((task) => task.start(taskManager));
}
this.logger.l('Starting local task');
this.logger.debug('Starting local task');
timer(this.initialCheckDelayMs, this.checkIntervalMs)
.pipe(
takeUntil(this.stop$),
@ -191,20 +191,20 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
events: events.length,
} as LogMeta);
if (events.length === 0) {
this.logger.l('No events to queue');
this.logger.debug('No events to queue');
return;
}
if (qlength >= this.maxQueueSize) {
// we're full already
this.logger.l('Queue length is greater than max queue size');
this.logger.debug('Queue length is greater than max queue size');
return;
}
if (events.length > this.maxQueueSize - qlength) {
this.logger.l('Events exceed remaining queue size', {
this.logger.info('Events exceed remaining queue size', {
max_queue_size: this.maxQueueSize,
queue_length: qlength,
});
} as LogMeta);
this.telemetryUsageCounter?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix.concat(['queue_stats'])),
counterType: 'docs_lost',
@ -283,10 +283,8 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
}
return false;
} catch (e) {
this.logger.warn('Error pinging telemetry services', {
error: e.message,
} as LogMeta);
} catch (error) {
this.logger.warn('Error pinging telemetry services', { error });
return false;
}
@ -306,7 +304,7 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.isOptedIn = await this.isTelemetryOptedIn();
if (!this.isOptedIn) {
this.logger.l('Telemetry is not opted-in.');
this.logger.debug('Telemetry is not opted-in.');
this.queue = [];
this.isSending = false;
return;
@ -314,7 +312,7 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.isElasticTelemetryReachable = await this.isTelemetryServicesReachable();
if (!this.isElasticTelemetryReachable) {
this.logger.l('Telemetry Services are not reachable.');
this.logger.debug('Telemetry Services are not reachable.');
this.queue = [];
this.isSending = false;
return;
@ -382,9 +380,9 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
this.receiver?.fetchLicenseInfo(),
]);
this.logger.l('Telemetry URL', {
this.logger.debug('Telemetry URL', {
url: telemetryUrl,
});
} as LogMeta);
await this.sendEvents(
toSend,
@ -496,10 +494,10 @@ export class TelemetryEventsSender implements ITelemetryEventsSender {
counterType: 'docs_sent',
incrementBy: events.length,
});
this.logger.l('Events sent!. Response', { status: resp.status });
} catch (err) {
this.logger.l('Error sending events', { error: JSON.stringify(err) });
const errorStatus = err?.response?.status;
this.logger.debug('Events sent!. Response', { status: resp.status } as LogMeta);
} catch (error) {
this.logger.warn('Error sending events', { error });
const errorStatus = error?.response?.status;
if (errorStatus !== undefined && errorStatus !== null) {
this.telemetryUsageCounter?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix.concat(['payloads', channel])),

View file

@ -135,10 +135,8 @@ export class SecurityTelemetryTask {
state: emptyState,
params: { version: this.config.version },
});
} catch (e) {
this.logger.error('Error scheduling task', {
error: e.message,
} as LogMeta);
} catch (error) {
this.logger.error('Error scheduling task', { error });
}
};

View file

@ -4,7 +4,7 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import { newTelemetryLogger } from './helpers';
import { type TelemetryLogger } from './telemetry_logger';
import type { TaskMetric, ITaskMetricsService, Trace } from './task_metrics.types';
@ -29,11 +29,11 @@ export class TaskMetricsService implements ITaskMetricsService {
public async end(trace: Trace, error?: Error): Promise<void> {
const event = this.createTaskMetric(trace, error);
this.logger.l('Task completed', {
this.logger.debug('Task completed', {
task_name: event.name,
time_executed_in_ms: event.time_executed_in_ms,
error_message: event.error_message,
});
} as LogMeta);
if (telemetryConfiguration.use_async_sender) {
this.sender.sendAsync(TelemetryChannel.TASK_METRICS, [event]);

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender';
import { TelemetryChannel, type TelemetryConfiguration } from '../types';
import type { ITelemetryReceiver } from '../receiver';
@ -36,7 +36,7 @@ export function createTelemetryConfigurationTaskConfig() {
const log = newTelemetryLogger(logger.get('configuration'), mdc);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
const artifactName = 'telemetry-buffer-and-batch-sizes-v1';
@ -50,9 +50,9 @@ export function createTelemetryConfigurationTaskConfig() {
const configArtifact = manifest.data as unknown as TelemetryConfiguration;
log.l('Got telemetry configuration artifact', {
log.debug('Got telemetry configuration artifact', {
artifact: configArtifact ?? '<null>',
});
} as LogMeta);
telemetryConfiguration.max_detection_alerts_batch =
configArtifact.max_detection_alerts_batch;
@ -69,7 +69,7 @@ export function createTelemetryConfigurationTaskConfig() {
}
if (configArtifact.sender_channels) {
log.l('Updating sender channels configuration');
log.info('Updating sender channels configuration');
telemetryConfiguration.sender_channels = configArtifact.sender_channels;
const channelsDict = Object.values(TelemetryChannel).reduce(
(acc, channel) => acc.set(channel as string, channel),
@ -78,7 +78,7 @@ export function createTelemetryConfigurationTaskConfig() {
Object.entries(configArtifact.sender_channels).forEach(([channelName, config]) => {
if (channelName === 'default') {
log.l('Updating default configuration');
log.debug('Updating default configuration');
sender.updateDefaultQueueConfig({
bufferTimeSpanMillis: config.buffer_time_span_millis,
inflightEventsThreshold: config.inflight_events_threshold,
@ -87,9 +87,11 @@ export function createTelemetryConfigurationTaskConfig() {
} else {
const channel = channelsDict.get(channelName);
if (!channel) {
log.l('Ignoring unknown channel', { channel: channelName });
log.info('Ignoring unknown channel', { channel: channelName } as LogMeta);
} else {
log.l('Updating configuration for channel', { channel: channelName });
log.debug('Updating configuration for channel', {
channel: channelName,
} as LogMeta);
sender.updateQueueConfig(channel, {
bufferTimeSpanMillis: config.buffer_time_span_millis,
inflightEventsThreshold: config.inflight_events_threshold,
@ -101,31 +103,31 @@ export function createTelemetryConfigurationTaskConfig() {
}
if (configArtifact.pagination_config) {
log.l('Updating pagination configuration');
log.debug('Updating pagination configuration');
telemetryConfiguration.pagination_config = configArtifact.pagination_config;
_receiver.setMaxPageSizeBytes(configArtifact.pagination_config.max_page_size_bytes);
_receiver.setNumDocsToSample(configArtifact.pagination_config.num_docs_to_sample);
}
if (configArtifact.indices_metadata_config) {
log.l('Updating indices metadata configuration');
log.debug('Updating indices metadata configuration');
telemetryConfiguration.indices_metadata_config = configArtifact.indices_metadata_config;
}
if (configArtifact.ingest_pipelines_stats_config) {
log.l('Updating ingest pipelines stats configuration');
log.debug('Updating ingest pipelines stats configuration');
telemetryConfiguration.ingest_pipelines_stats_config =
configArtifact.ingest_pipelines_stats_config;
}
await taskMetricsService.end(trace);
log.l('Updated TelemetryConfiguration', { configuration: telemetryConfiguration });
log.debug('Updated TelemetryConfiguration');
return 0;
} catch (err) {
log.l('Failed to set telemetry configuration', { error: err.message });
} catch (error) {
log.warn('Failed to set telemetry configuration', { error });
telemetryConfiguration.resetAllToDefault();
await taskMetricsService.end(trace, err);
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -6,7 +6,7 @@
*/
import { cloneDeep } from 'lodash';
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import {
batchTelemetryRecords,
responseActionsCustomRuleTelemetryData,
@ -55,7 +55,7 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
];
const trace = taskMetricsService.start(taskType);
log.l('Running response actions rules telemetry task');
log.debug('Running response actions rules telemetry task');
try {
const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([
@ -108,9 +108,9 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
licenseInfo
);
log.l('Custom response actions rules data', {
log.debug('Custom response actions rules data', {
data: JSON.stringify(responseActionsRulesTelemetryData),
});
} as LogMeta);
usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelEndpointPrefix),
@ -127,7 +127,7 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
const documents = cloneDeep(Object.values(responseActionsRulesTelemetryData));
if (telemetryConfiguration.use_async_sender) {
await sender.sendAsync(TelemetryChannel.LISTS, documents);
sender.sendAsync(TelemetryChannel.LISTS, documents);
} else {
const batches = batchTelemetryRecords(documents, maxTelemetryBatch);
for (const batch of batches) {
@ -141,9 +141,9 @@ export function createTelemetryCustomResponseActionRulesTaskConfig(maxTelemetryB
responseActionsRulesTelemetryData.response_actions_rules
).reduce((acc, count) => acc + count, 0);
log.l('Response actions rules telemetry task executed', {
log.debug('Response actions rules telemetry task executed', {
totalCount,
});
} as LogMeta);
return totalCount;
} catch (err) {

View file

@ -6,7 +6,7 @@
*/
import { cloneDeep } from 'lodash';
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import { LIST_DETECTION_RULE_EXCEPTION, TELEMETRY_CHANNEL_LISTS } from '../constants';
import {
batchTelemetryRecords,
@ -44,7 +44,7 @@ export function createTelemetryDetectionRuleListsTaskConfig(maxTelemetryBatch: n
const usageLabelPrefix: string[] = ['security_telemetry', 'detection-rules'];
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
const [clusterInfoPromise, licenseInfoPromise] = await Promise.allSettled([
@ -102,9 +102,9 @@ export function createTelemetryDetectionRuleListsTaskConfig(maxTelemetryBatch: n
licenseInfo,
LIST_DETECTION_RULE_EXCEPTION
);
log.l('Detection rule exception json length', {
log.debug('Detection rule exception json length', {
length: detectionRuleExceptionsJson.length,
});
} as LogMeta);
usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix),
@ -121,7 +121,7 @@ export function createTelemetryDetectionRuleListsTaskConfig(maxTelemetryBatch: n
}
await taskMetricsService.end(trace);
log.l('Task executed', { length: detectionRuleExceptionsJson.length });
log.debug('Task executed', { length: detectionRuleExceptionsJson.length } as LogMeta);
return detectionRuleExceptionsJson.length;
} catch (err) {

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import { newTelemetryLogger, getPreviousDiagTaskTimestamp } from '../helpers';
import type { ITelemetryEventsSender } from '../sender';
import { TelemetryChannel, type TelemetryEvent } from '../types';
@ -36,7 +36,7 @@ export function createTelemetryDiagnosticsTaskConfig() {
const log = newTelemetryLogger(logger.get('diagnostic'), mdc);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
if (!taskExecutionPeriod.last) {
@ -61,9 +61,9 @@ export function createTelemetryDiagnosticsTaskConfig() {
}
alertCount += alerts.length;
log.l('Sending diagnostic alerts', {
log.debug('Sending diagnostic alerts', {
alerts_count: alerts.length,
});
} as LogMeta);
sender.sendAsync(TelemetryChannel.ENDPOINT_ALERTS, processedAlerts);
}

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import { FLEET_ENDPOINT_PACKAGE } from '@kbn/fleet-plugin/common';
import type { ITelemetryEventsSender } from '../sender';
import {
@ -64,7 +64,7 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
const log = newTelemetryLogger(logger.get('endpoint'), mdc);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
const processor = new EndpointMetadataProcessor(log, receiver);
@ -80,10 +80,10 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
incrementBy: documents.length,
});
log.l('Sending endpoint telemetry', {
log.debug('Sending endpoint telemetry', {
num_docs: documents.length,
async_sender: telemetryConfiguration.use_async_sender,
});
} as LogMeta);
// STAGE 6 - Send the documents
if (telemetryConfiguration.use_async_sender) {
@ -97,11 +97,9 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
await taskMetricsService.end(trace);
return documents.length;
} catch (err) {
log.l(`Error running endpoint alert telemetry task`, {
error: JSON.stringify(err),
});
await taskMetricsService.end(trace, err);
} catch (error) {
log.warn(`Error running endpoint alert telemetry task`, { error });
await taskMetricsService.end(trace, error);
return 0;
}
},
@ -127,7 +125,7 @@ class EndpointMetadataProcessor {
const endpointMetrics = await this.receiver.fetchEndpointMetricsAbstract(last, current);
// If no metrics exist, early (and successfull) exit
if (endpointMetrics.totalEndpoints === 0) {
this.logger.l('no endpoint metrics to report');
this.logger.debug('no endpoint metrics to report');
return [];
}
@ -143,10 +141,8 @@ class EndpointMetadataProcessor {
policies.delete(DefaultEndpointPolicyIdToIgnore);
return policies;
})
.catch((e) => {
this.logger.l('Error fetching fleet agents, using an empty value', {
error: JSON.stringify(e),
});
.catch((error) => {
this.logger.warn('Error fetching fleet agents, using an empty value', { error });
return new Map();
});
const endpointPolicyById = await this.endpointPolicies(policyIdByFleetAgentId.values());
@ -158,14 +154,12 @@ class EndpointMetadataProcessor {
.fetchEndpointPolicyResponses(last, current)
.then((response) => {
if (response.size === 0) {
this.logger.l('no endpoint policy responses to report');
this.logger.info('no endpoint policy responses to report');
}
return response;
})
.catch((e) => {
this.logger.l('Error fetching policy responses, using an empty value', {
error: JSON.stringify(e),
});
.catch((error) => {
this.logger.warn('Error fetching policy responses, using an empty value', { error });
return new Map();
});
@ -176,14 +170,12 @@ class EndpointMetadataProcessor {
.fetchEndpointMetadata(last, current)
.then((response) => {
if (response.size === 0) {
this.logger.l('no endpoint metadata to report');
this.logger.debug('no endpoint metadata to report');
}
return response;
})
.catch((e) => {
this.logger.l('Error fetching endpoint metadata, using an empty value', {
error: JSON.stringify(e),
});
.catch((error) => {
this.logger.warn('Error fetching endpoint metadata, using an empty value', { error });
return new Map();
});
@ -212,12 +204,12 @@ class EndpointMetadataProcessor {
);
telemetryPayloads.push(...payloads);
}
} catch (e) {
} catch (error) {
// something happened in the middle of the pagination, log the error
// and return what we collect so far instead of aborting the
// whole execution
this.logger.l('Error fetching endpoint metrics by id', {
error: JSON.stringify(e),
this.logger.warn('Error fetching endpoint metrics by id', {
error,
});
}
@ -244,7 +236,7 @@ class EndpointMetadataProcessor {
for (const policyId of policies) {
if (!endpointPolicyCache.has(policyId)) {
const agentPolicy = await this.receiver.fetchPolicyConfigs(policyId).catch((e) => {
this.logger.l(`error fetching policy config due to ${e?.message}`);
this.logger.warn(`error fetching policy config due to ${e?.message}`);
return null;
});

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender';
import type { TelemetryFilterListArtifact } from '../types';
import type { ITelemetryReceiver } from '../receiver';
@ -36,7 +36,7 @@ export function createTelemetryFilterListArtifactTaskConfig() {
const log = newTelemetryLogger(logger.get('filterlists'), mdc);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
const artifactName = 'telemetry-filterlists-v1';
@ -48,16 +48,16 @@ export function createTelemetryFilterListArtifactTaskConfig() {
}
const artifact = manifest.data as unknown as TelemetryFilterListArtifact;
log.l('New filterlist artifact', { artifact });
log.debug('New filterlist artifact', { artifact } as LogMeta);
filterList.endpointAlerts = artifact.endpoint_alerts;
filterList.exceptionLists = artifact.exception_lists;
filterList.prebuiltRulesAlerts = artifact.prebuilt_rules_alerts;
await taskMetricsService.end(trace);
return 0;
} catch (err) {
log.l('Failed to set telemetry filterlist artifact', { error: err.message });
} catch (error) {
log.warn('Failed to set telemetry filterlist artifact', { error });
filterList.resetAllToDefault();
await taskMetricsService.end(trace, err);
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -181,8 +181,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'indices-stats', count);
return count;
})
.catch((err) => {
log.warn(`Error getting indices stats`, { error: err.message } as LogMeta);
.catch((error) => {
log.warn(`Error getting indices stats`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'indices-stats', 1);
return 0;
});
@ -193,8 +193,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'ilm-stats', names.size);
return names;
})
.catch((err) => {
log.warn(`Error getting ILM stats`, { error: err.message } as LogMeta);
.catch((error) => {
log.warn(`Error getting ILM stats`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'ilm-stats', 1);
return new Set<string>();
});
@ -205,8 +205,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'ilm-policies', count);
return count;
})
.catch((err) => {
log.warn(`Error getting ILM policies`, { error: err.message } as LogMeta);
.catch((error) => {
log.warn(`Error getting ILM policies`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'ilm-policies', 1);
return 0;
});
@ -219,8 +219,8 @@ export function createTelemetryIndicesMetadataTaskConfig() {
incrementCounter(TelemetryCounter.DOCS_SENT, 'index-templates', count);
return count;
})
.catch((err) => {
log.warn(`Error getting index templates`, { error: err.message } as LogMeta);
.catch((error) => {
log.warn(`Error getting index templates`, { error });
incrementCounter(TelemetryCounter.RUNTIME_ERROR, 'index-templates', 1);
return 0;
});
@ -237,11 +237,9 @@ export function createTelemetryIndicesMetadataTaskConfig() {
await taskMetricsService.end(trace);
return indicesCount;
} catch (err) {
log.warn(`Error running indices metadata task`, {
error: err.message,
} as LogMeta);
await taskMetricsService.end(trace, err);
} catch (error) {
log.warn(`Error running indices metadata task`, { error });
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -81,12 +81,12 @@ export function createIngestStatsTaskConfig() {
} as LogMeta);
return ingestStats.length;
} catch (err) {
} catch (error) {
log.warn(`Error running ingest stats task`, {
error: err.message,
error,
elapsed: performance.now() - start,
} as LogMeta);
await taskMetricsService.end(trace, err);
});
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender';
import type { ITelemetryReceiver } from '../receiver';
import type { ITaskMetricsService } from '../task_metrics.types';
@ -44,7 +44,7 @@ export function createTelemetryPrebuiltRuleAlertsTaskConfig(maxTelemetryBatch: n
const log = newTelemetryLogger(logger.get('prebuilt_rule_alerts'), mdc);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
const [clusterInfoPromise, licenseInfoPromise, packageVersion] = await Promise.allSettled([
@ -96,7 +96,9 @@ export function createTelemetryPrebuiltRuleAlertsTaskConfig(maxTelemetryBatch: n
})
);
log.l('sending elastic prebuilt alerts', { length: enrichedAlerts.length });
log.debug('sending elastic prebuilt alerts', {
length: enrichedAlerts.length,
} as LogMeta);
const batches = batchTelemetryRecords(enrichedAlerts, maxTelemetryBatch);
const promises = batches.map(async (batch) => {
@ -108,9 +110,9 @@ export function createTelemetryPrebuiltRuleAlertsTaskConfig(maxTelemetryBatch: n
await taskMetricsService.end(trace);
return 0;
} catch (err) {
logger.error('could not complete task', { error: err });
await taskMetricsService.end(trace, err);
} catch (error) {
logger.error('could not complete task', { error });
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import { ENDPOINT_LIST_ID, ENDPOINT_ARTIFACT_LISTS } from '@kbn/securitysolution-list-constants';
import {
LIST_ENDPOINT_EXCEPTION,
@ -47,7 +47,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
const log = newTelemetryLogger(logger.get('security_lists'), mdc);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
const usageCollector = sender.getTelemetryUsageCluster();
const usageLabelPrefix: string[] = ['security_telemetry', 'lists'];
@ -81,7 +81,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
LIST_TRUSTED_APPLICATION
);
trustedApplicationsCount = trustedAppsJson.length;
log.l('Trusted Apps', { trusted_apps_count: trustedApplicationsCount });
log.debug('Trusted Apps', { trusted_apps_count: trustedApplicationsCount } as LogMeta);
usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix),
@ -106,7 +106,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
LIST_ENDPOINT_EXCEPTION
);
endpointExceptionsCount = epExceptionsJson.length;
log.l('EP Exceptions', { ep_exceptions_count: endpointExceptionsCount });
log.debug('EP Exceptions', { ep_exceptions_count: endpointExceptionsCount } as LogMeta);
usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix),
@ -131,7 +131,7 @@ export function createTelemetrySecurityListTaskConfig(maxTelemetryBatch: number)
LIST_ENDPOINT_EVENT_FILTER
);
endpointEventFiltersCount = epFiltersJson.length;
log.l('EP Event Filters', { ep_filters_count: endpointEventFiltersCount });
log.debug('EP Event Filters', { ep_filters_count: endpointEventFiltersCount } as LogMeta);
usageCollector?.incrementCounter({
counterName: createUsageCounterLabel(usageLabelPrefix),

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import type { ITelemetryEventsSender } from '../sender';
import type { ITelemetryReceiver } from '../receiver';
import type { TaskExecutionPeriod } from '../task';
@ -35,7 +35,7 @@ export function createTelemetryTimelineTaskConfig() {
const fetcher = new TelemetryTimelineFetcher(receiver);
const trace = taskMetricsService.start(taskType);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
let counter = 0;
@ -48,7 +48,7 @@ export function createTelemetryTimelineTaskConfig() {
}
const alerts = await receiver.fetchTimelineAlerts(alertsIndex, rangeFrom, rangeTo);
log.l('found alerts to process', { length: alerts.length });
log.debug('found alerts to process', { length: alerts.length } as LogMeta);
for (const alert of alerts) {
const result = await fetcher.fetchTimeline(alert);
@ -73,14 +73,14 @@ export function createTelemetryTimelineTaskConfig() {
}
}
log.l('Concluding timeline task.', { counter });
log.debug('Concluding timeline task.', { counter } as LogMeta);
await taskMetricsService.end(trace);
return counter;
} catch (err) {
logger.error('could not complete task', { error: err });
await taskMetricsService.end(trace, err);
} catch (error) {
logger.error('could not complete task', { error });
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { Logger } from '@kbn/core/server';
import type { LogMeta, Logger } from '@kbn/core/server';
import { DEFAULT_DIAGNOSTIC_INDEX_PATTERN } from '../../../../common/endpoint/constants';
import type { ITelemetryEventsSender } from '../sender';
import type { ITelemetryReceiver } from '../receiver';
@ -36,7 +36,7 @@ export function createTelemetryDiagnosticTimelineTaskConfig() {
const trace = taskMetricsService.start(taskType);
const fetcher = new TelemetryTimelineFetcher(receiver);
log.l('Running telemetry task');
log.debug('Running telemetry task');
try {
let counter = 0;
@ -49,7 +49,7 @@ export function createTelemetryDiagnosticTimelineTaskConfig() {
rangeTo
);
log.l('found alerts to process', { length: alerts.length });
log.debug('found alerts to process', { length: alerts.length } as LogMeta);
for (const alert of alerts) {
const result = await fetcher.fetchTimeline(alert);
@ -74,14 +74,14 @@ export function createTelemetryDiagnosticTimelineTaskConfig() {
}
}
log.l('Concluding timeline task.', { counter });
log.debug('Concluding timeline task.', { counter } as LogMeta);
await taskMetricsService.end(trace);
return counter;
} catch (err) {
logger.error('could not complete task', { error: err });
await taskMetricsService.end(trace, err);
} catch (error) {
logger.error('could not complete task', { error });
await taskMetricsService.end(trace, error);
return 0;
}
},

View file

@ -9,6 +9,18 @@ import type { LogLevelId, LogRecord } from '@kbn/logging';
import { clusterInfo, isElasticCloudDeployment } from './helpers';
export interface TelemetryLogger extends Logger {
/**
* @deprecated This method is deprecated and should be avoided in new code.
* Instead, configure appropriate log levels directly in `kibana.yml`. For example:
*
* ```yaml
* # kibana.yml
* logging.loggers:
* - name: plugins.securitySolution
* level: info
* - name: plugins.securitySolution.telemetry_events.sender
* level: debug
*/
l<Meta extends LogMeta = LogMeta>(message: string, meta?: Meta | object): void;
}