[8.x] [Streams 🌊] Add initial telemetry to Streams (#214027) (#215200)

# Backport

This will backport the following commits from `main` to `8.x`:
- [[Streams 🌊] Add initial telemetry to Streams
(#214027)](https://github.com/elastic/kibana/pull/214027)

<!--- Backport version: 9.6.6 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sorenlouv/backport)

<!--BACKPORT [{"author":{"name":"Chris
Cowan","email":"chris@elastic.co"},"sourceCommit":{"committedDate":"2025-03-17T19:34:52Z","message":"[Streams
🌊] Add initial telemetry to Streams (#214027)\n\n## Summary\n\nThis PR
adds EBT telemetry to the Streams backend and UI. For the\nbackend APIs,
we are only measuring latency on the \"write\" endpoints\nthat have the
highest potential for being slow:\n\n- `POST
/api/streams/{name}/processing/_simulate 2023-10-31`\n- `POST
/api/streams/{name}/processing/_suggestions 2023-10-31`\n- `POST
/api/streams/{name}/_fork 2023-10-31`\n- `PUT
/api/streams/{name}/dashboards/{dashboardId} 2023-10-31`\n- `PUT
/api/streams/{name} 2023-10-31`\n- `PUT /api/streams/{name}/_group
2023-10-31`\n- `PUT /api/streams/{name}/_ingest 2023-10-31`\n- `DELETE
/api/streams/{name} 2023-10-31`\n- `POST /api/streams/_enable
2023-10-31`\n- `POST /api/streams/_disable 2023-10-31`\n- `POST
/api/streams/_resync 2023-10-31`\n\nThis is controlled by an allow list
located in the server's telemetry\nclient.\n\nFor the UI, I've added the
following tracking:\n\n- **Number of Dashboards associated with a
Stream** – This captures the\nstream name and the number of dashboards
associated with it. It's\nattached to the dashboard hook, this means
that we only track dashboard\ncounts on streams that users are actually
using.\n- **Clicks on Dashboards** – This captures the name of the
dashboard,\nthe dashboard id, and the stream name\n- **Latency of AI
Grok Suggestions** – This captures the name of the\nstream, the field
used, the number of suggested patterns, the success\nrates of the
suggest patterns, and the connector id of the LLM. This\nwill also
capture if the AI returned ZERO suggestions.\n- **When a user accepts an
AI Grok Suggestion** – This captures the name\nof the stream, the name
of the field, the success rate of the pattern,\nand number of fields
detected.\n\n---------\n\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>\nCo-authored-by:
Elastic Machine
<elasticmachine@users.noreply.github.com>","sha":"508c89fc15fd611971d28e8f9434b214a945efcf","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","backport:version","Feature:Streams","v9.1.0","v8.19.0"],"title":"[Streams
🌊] Add initial telemetry to
Streams","number":214027,"url":"https://github.com/elastic/kibana/pull/214027","mergeCommit":{"message":"[Streams
🌊] Add initial telemetry to Streams (#214027)\n\n## Summary\n\nThis PR
adds EBT telemetry to the Streams backend and UI. For the\nbackend APIs,
we are only measuring latency on the \"write\" endpoints\nthat have the
highest potential for being slow:\n\n- `POST
/api/streams/{name}/processing/_simulate 2023-10-31`\n- `POST
/api/streams/{name}/processing/_suggestions 2023-10-31`\n- `POST
/api/streams/{name}/_fork 2023-10-31`\n- `PUT
/api/streams/{name}/dashboards/{dashboardId} 2023-10-31`\n- `PUT
/api/streams/{name} 2023-10-31`\n- `PUT /api/streams/{name}/_group
2023-10-31`\n- `PUT /api/streams/{name}/_ingest 2023-10-31`\n- `DELETE
/api/streams/{name} 2023-10-31`\n- `POST /api/streams/_enable
2023-10-31`\n- `POST /api/streams/_disable 2023-10-31`\n- `POST
/api/streams/_resync 2023-10-31`\n\nThis is controlled by an allow list
located in the server's telemetry\nclient.\n\nFor the UI, I've added the
following tracking:\n\n- **Number of Dashboards associated with a
Stream** – This captures the\nstream name and the number of dashboards
associated with it. It's\nattached to the dashboard hook, this means
that we only track dashboard\ncounts on streams that users are actually
using.\n- **Clicks on Dashboards** – This captures the name of the
dashboard,\nthe dashboard id, and the stream name\n- **Latency of AI
Grok Suggestions** – This captures the name of the\nstream, the field
used, the number of suggested patterns, the success\nrates of the
suggest patterns, and the connector id of the LLM. This\nwill also
capture if the AI returned ZERO suggestions.\n- **When a user accepts an
AI Grok Suggestion** – This captures the name\nof the stream, the name
of the field, the success rate of the pattern,\nand number of fields
detected.\n\n---------\n\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>\nCo-authored-by:
Elastic Machine
<elasticmachine@users.noreply.github.com>","sha":"508c89fc15fd611971d28e8f9434b214a945efcf"}},"sourceBranch":"main","suggestedTargetBranches":["8.x"],"targetPullRequestStates":[{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/214027","number":214027,"mergeCommit":{"message":"[Streams
🌊] Add initial telemetry to Streams (#214027)\n\n## Summary\n\nThis PR
adds EBT telemetry to the Streams backend and UI. For the\nbackend APIs,
we are only measuring latency on the \"write\" endpoints\nthat have the
highest potential for being slow:\n\n- `POST
/api/streams/{name}/processing/_simulate 2023-10-31`\n- `POST
/api/streams/{name}/processing/_suggestions 2023-10-31`\n- `POST
/api/streams/{name}/_fork 2023-10-31`\n- `PUT
/api/streams/{name}/dashboards/{dashboardId} 2023-10-31`\n- `PUT
/api/streams/{name} 2023-10-31`\n- `PUT /api/streams/{name}/_group
2023-10-31`\n- `PUT /api/streams/{name}/_ingest 2023-10-31`\n- `DELETE
/api/streams/{name} 2023-10-31`\n- `POST /api/streams/_enable
2023-10-31`\n- `POST /api/streams/_disable 2023-10-31`\n- `POST
/api/streams/_resync 2023-10-31`\n\nThis is controlled by an allow list
located in the server's telemetry\nclient.\n\nFor the UI, I've added the
following tracking:\n\n- **Number of Dashboards associated with a
Stream** – This captures the\nstream name and the number of dashboards
associated with it. It's\nattached to the dashboard hook, this means
that we only track dashboard\ncounts on streams that users are actually
using.\n- **Clicks on Dashboards** – This captures the name of the
dashboard,\nthe dashboard id, and the stream name\n- **Latency of AI
Grok Suggestions** – This captures the name of the\nstream, the field
used, the number of suggested patterns, the success\nrates of the
suggest patterns, and the connector id of the LLM. This\nwill also
capture if the AI returned ZERO suggestions.\n- **When a user accepts an
AI Grok Suggestion** – This captures the name\nof the stream, the name
of the field, the success rate of the pattern,\nand number of fields
detected.\n\n---------\n\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>\nCo-authored-by:
Elastic Machine
<elasticmachine@users.noreply.github.com>","sha":"508c89fc15fd611971d28e8f9434b214a945efcf"}},{"branch":"8.x","label":"v8.19.0","branchLabelMappingKey":"^v8.19.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->
This commit is contained in:
Chris Cowan 2025-03-19 13:00:40 -06:00 committed by GitHub
parent 900689c628
commit 872d103b7e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 617 additions and 38 deletions

View file

@ -18,7 +18,7 @@
"licensing",
"taskManager",
"alerting",
"inference",
"inference"
],
"optionalPlugins": [
"cloud",

View file

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AnalyticsServiceSetup } from '@kbn/core-analytics-server';
import { StreamEndpointLatencyProps } from './types';
import { STREAMS_ENDPOINT_LATENCY_EVENT } from './constants';
const LATENCY_TRACKING_ENDPOINT_ALLOW_LIST = [
'POST /api/streams/{name}/processing/_simulate 2023-10-31',
'POST /api/streams/{name}/processing/_suggestions 2023-10-31',
'POST /api/streams/{name}/_fork 2023-10-31',
'PUT /api/streams/{name}/dashboards/{dashboardId} 2023-10-31',
'PUT /api/streams/{name} 2023-10-31',
'PUT /api/streams/{name}/_group 2023-10-31',
'PUT /api/streams/{name}/_ingest 2023-10-31',
'DELETE /api/streams/{name} 2023-10-31',
'POST /api/streams/_enable 2023-10-31',
'POST /api/streams/_disable 2023-10-31',
'POST /api/streams/_resync 2023-10-31',
];
export class StreamsTelemetryClient {
constructor(private readonly analytics: AnalyticsServiceSetup) {}
public startTrackingEndpointLatency(
props: Pick<StreamEndpointLatencyProps, 'name' | 'endpoint'>
) {
if (!LATENCY_TRACKING_ENDPOINT_ALLOW_LIST.includes(props.endpoint)) {
return () => {};
}
const startTime = Date.now();
return () => {
this.analytics.reportEvent(STREAMS_ENDPOINT_LATENCY_EVENT, {
...props,
duration_ms: Date.now() - startTime,
});
};
}
}

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
const STREAMS_ENDPOINT_LATENCY_EVENT = 'streams-endpoint-latency';
export { STREAMS_ENDPOINT_LATENCY_EVENT };

View file

@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { STREAMS_ENDPOINT_LATENCY_EVENT } from './constants';
import { streamsEndpointLatencySchema } from './schemas';
const streamsEndpointLatencyEventType = {
eventType: STREAMS_ENDPOINT_LATENCY_EVENT,
schema: streamsEndpointLatencySchema,
};
export { streamsEndpointLatencyEventType };

View file

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { RootSchema } from '@elastic/ebt/client';
import { StreamEndpointLatencyProps } from './types';
const streamsEndpointLatencySchema: RootSchema<StreamEndpointLatencyProps> = {
name: {
type: 'keyword',
_meta: {
description: 'The name of the Stream',
},
},
endpoint: {
type: 'keyword',
_meta: {
description: 'The name of the Streams endpoint',
},
},
duration_ms: {
type: 'long',
_meta: {
description: 'The duration of the endpoint in milliseconds',
},
},
};
export { streamsEndpointLatencySchema };

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AnalyticsServiceSetup } from '@kbn/core-analytics-server';
import { streamsEndpointLatencyEventType } from './events';
import { StreamsTelemetryClient } from './client';
export class StreamsTelemetryService {
private analytics?: AnalyticsServiceSetup;
constructor() {}
public setup(analytics: AnalyticsServiceSetup) {
this.analytics = analytics;
this.analytics.registerEventType(streamsEndpointLatencyEventType);
}
public getClient() {
if (!this.analytics) {
throw new Error('Analytics service is not available.');
}
return new StreamsTelemetryClient(this.analytics);
}
}

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
interface StreamEndpointLatencyProps {
name: string;
endpoint: string;
duration_ms: number;
}
export { type StreamEndpointLatencyProps };

View file

@ -25,6 +25,7 @@ import {
import { AssetService } from './lib/streams/assets/asset_service';
import { RouteHandlerScopedClients } from './routes/types';
import { StreamsService } from './lib/streams/service';
import { StreamsTelemetryService } from './lib/telemetry/service';
// eslint-disable-next-line @typescript-eslint/no-empty-interface
export interface StreamsPluginSetup {}
@ -48,6 +49,7 @@ export class StreamsPlugin
public config: StreamsConfig;
public logger: Logger;
public server?: StreamsServer;
private telemtryService = new StreamsTelemetryService();
constructor(context: PluginInitializerContext<StreamsConfig>) {
this.config = context.config.get();
@ -63,6 +65,8 @@ export class StreamsPlugin
logger: this.logger,
} as StreamsServer;
this.telemtryService.setup(core.analytics);
const assetService = new AssetService(core, this.logger);
const streamsService = new StreamsService(core, this.logger);
@ -71,6 +75,7 @@ export class StreamsPlugin
dependencies: {
assets: assetService,
server: this.server,
telemetry: this.telemtryService.getClient(),
getScopedClients: async ({
request,
}: {

View file

@ -9,6 +9,7 @@ import { createServerRouteFactory } from '@kbn/server-route-repository';
import { CreateServerRouteFactory } from '@kbn/server-route-repository-utils/src/typings';
import { badRequest, conflict, forbidden, internal, notFound } from '@hapi/boom';
import { errors } from '@elastic/elasticsearch';
import { get } from 'lodash';
import { StreamsRouteHandlerResources } from './types';
import { StatusError } from '../lib/streams/errors/status_error';
@ -25,27 +26,34 @@ export const createServerRoute: CreateServerRouteFactory<
tags: [...(config.options?.tags ?? []), 'oas-tag:streams'],
},
handler: (options) => {
return handler(options).catch((error) => {
if (error instanceof StatusError || error instanceof errors.ResponseError) {
switch (error.statusCode) {
case 400:
throw badRequest(error);
case 403:
throw forbidden(error);
case 404:
throw notFound(error);
case 409:
throw conflict(error);
case 500:
throw internal(error);
}
}
throw error;
const { telemetry } = options;
const finishTracking = telemetry.startTrackingEndpointLatency({
name: get(options, 'params.path.name', '__all__'),
endpoint: config.endpoint,
});
return handler(options)
.catch((error) => {
if (error instanceof StatusError || error instanceof errors.ResponseError) {
switch (error.statusCode) {
case 400:
throw badRequest(error);
case 403:
throw forbidden(error);
case 404:
throw notFound(error);
case 409:
throw conflict(error);
case 500:
throw internal(error);
}
}
throw error;
})
.finally(finishTracking);
},
});
};

View file

@ -49,7 +49,10 @@ export const handleProcessingSuggestion = async (
};
};
type SimulationWithPattern = ReturnType<typeof simulateProcessing> & { pattern: string };
type SimulationWithPattern = ReturnType<typeof simulateProcessing> & {
pattern: string;
success_rate: number;
};
export function extractAndGroupPatterns(samples: FlattenRecord[], field: string) {
const evalPattern = (sample: string) => {
@ -156,7 +159,7 @@ async function processPattern(
} as const,
input: `Logs:
${sample.exampleValues.join('\n')}
Given the raw messages coming from one data source, help us do the following:
Given the raw messages coming from one data source, help us do the following:
1. Name the log source based on logs format.
2. Write a parsing rule for Elastic ingest pipeline to extract structured fields from the raw message.
Make sure that the parsing rule is unique per log source. When in doubt, suggest multiple patterns, one generic one matching the general case and more specific ones.

View file

@ -14,6 +14,7 @@ import { StreamsServer } from '../types';
import { AssetService } from '../lib/streams/assets/asset_service';
import { AssetClient } from '../lib/streams/assets/asset_client';
import { StreamsClient } from '../lib/streams/client';
import { StreamsTelemetryClient } from '../lib/telemetry/client';
type GetScopedClients = ({
request,
@ -32,6 +33,7 @@ export interface RouteHandlerScopedClients {
export interface RouteDependencies {
assets: AssetService;
server: StreamsServer;
telemetry: StreamsTelemetryClient;
getScopedClients: GetScopedClients;
}

View file

@ -42,5 +42,6 @@
"@kbn/core-elasticsearch-client-server-internal",
"@kbn/utils",
"@kbn/core-saved-objects-server-internal",
"@kbn/core-analytics-server"
]
}

View file

@ -17,10 +17,15 @@ import { fieldsMetadataPluginPublicMock } from '@kbn/fields-metadata-plugin/publ
import { DataStreamsStatsClient } from '@kbn/dataset-quality-plugin/public/services/data_streams_stats/data_streams_stats_client';
import { LicensingPluginStart } from '@kbn/licensing-plugin/public';
import type { StreamsAppKibanaContext } from '../public/hooks/use_kibana';
import { StreamsTelemetryService } from '../public/telemetry/service';
export function getMockStreamsAppContext(): StreamsAppKibanaContext {
const appParams = coreMock.createAppMountParameters();
const core = coreMock.createStart();
const coreSetup = coreMock.createSetup();
const telemetryService = new StreamsTelemetryService();
telemetryService.setup(coreSetup.analytics);
return {
appParams,
@ -41,6 +46,7 @@ export function getMockStreamsAppContext(): StreamsAppKibanaContext {
services: {
dataStreamsClient: Promise.resolve({} as unknown as DataStreamsStatsClient),
PageTemplate: () => null,
telemetryClient: telemetryService.getClient(),
},
isServerless: false,
};

View file

@ -19,7 +19,7 @@
"navigation",
"fieldsMetadata",
"datasetQuality",
"licensing",
"licensing"
],
"requiredBundles": [
"kibanaReact"

View file

@ -154,7 +154,10 @@ function InnerGrokAiSuggestions({
previewDocuments: FlattenRecord[];
definition: IngestStreamGetResponse;
}) {
const { dependencies } = useKibana();
const {
dependencies,
services: { telemetryClient },
} = useKibana();
const {
streams: { streamsRepositoryClient },
observabilityAIAssistant,
@ -183,6 +186,11 @@ function InnerGrokAiSuggestions({
setSuggestionsLoading(true);
setSuggestionsError(undefined);
setSuggestions(undefined);
const finishTrackingAndReport = telemetryClient.startTrackingAIGrokSuggestionLatency({
name: definition.stream.name,
field: fieldValue,
connector_id: currentConnector,
});
streamsRepositoryClient
.fetch('POST /internal/streams/{name}/processing/_suggestions', {
signal: abortController.signal,
@ -196,6 +204,10 @@ function InnerGrokAiSuggestions({
},
})
.then((response) => {
finishTrackingAndReport(
response.patterns.length || 0,
response.simulations.map((item) => item.success_rate)
);
setSuggestions(response);
setSuggestionsLoading(false);
})
@ -210,6 +222,7 @@ function InnerGrokAiSuggestions({
fieldValue,
previewDocuments,
streamsRepositoryClient,
telemetryClient,
]);
let content: React.ReactNode = null;
@ -233,6 +246,7 @@ function InnerGrokAiSuggestions({
.map((pattern, i) => ({
pattern,
success_rate: suggestions.simulations[i].success_rate,
detected_fields_count: suggestions.simulations[i].detected_fields.length,
}))
.filter(
(suggestion) =>
@ -302,6 +316,13 @@ function InnerGrokAiSuggestions({
{ value: suggestion.pattern },
]);
}
telemetryClient.trackAIGrokSuggestionAccepted({
name: definition.stream.name,
field: fieldValue,
connector_id: currentConnector || 'n/a',
match_rate: suggestion.success_rate,
detected_fields: suggestion.detected_fields_count,
});
}}
data-test-subj="streamsAppGrokAiSuggestionsButton"
iconType="plusInCircle"

View file

@ -202,6 +202,7 @@ export function AddDashboardFlyout({
</EuiFlexItem>
</EuiFlexGroup>
<DashboardsTable
entityId={entityId}
dashboards={allDashboards}
loading={dashboardSuggestionsFetch.loading}
selectedDashboards={selectedDashboards}

View file

@ -25,7 +25,9 @@ export function DashboardsTable({
selectedDashboards,
setSelectedDashboards,
loading,
entityId,
}: {
entityId?: string;
loading: boolean;
dashboards: SanitizedDashboardAsset[] | undefined;
compact?: boolean;
@ -33,6 +35,8 @@ export function DashboardsTable({
setSelectedDashboards?: (dashboards: SanitizedDashboardAsset[]) => void;
}) {
const {
core: { application },
services: { telemetryClient },
dependencies: {
start: {
savedObjectsTagging: { ui: savedObjectsTaggingUi },
@ -53,7 +57,19 @@ export function DashboardsTable({
render: (_, { label, id }) => (
<EuiLink
data-test-subj="streamsAppColumnsLink"
href={dashboardLocator?.getRedirectUrl({ dashboardId: id, timeRange } || '')}
onClick={() => {
if (entityId) {
telemetryClient.trackAssetClick({
asset_id: id,
asset_type: 'dashboard',
name: entityId,
});
}
const url = dashboardLocator?.getRedirectUrl({ dashboardId: id, timeRange } || '');
if (url) {
application.navigateToUrl(url);
}
}}
>
{label}
</EuiLink>
@ -79,7 +95,15 @@ export function DashboardsTable({
] satisfies Array<EuiBasicTableColumn<SanitizedDashboardAsset>>)
: []),
];
}, [compact, dashboardLocator, savedObjectsTaggingUi, timeRange]);
}, [
application,
compact,
dashboardLocator,
entityId,
savedObjectsTaggingUi,
telemetryClient,
timeRange,
]);
const items = useMemo(() => {
return dashboards ?? [];

View file

@ -53,7 +53,7 @@ export function StreamDetailDashboardsView({
setIsUnlinkLoading(true);
await removeDashboards(selectedDashboards);
await dashboardsFetch.refresh();
dashboardsFetch.refresh();
setSelectedDashboards([]);
} finally {
@ -91,6 +91,7 @@ export function StreamDetailDashboardsView({
</EuiFlexItem>
<EuiFlexItem>
<DashboardsTable
entityId={definition?.stream.name}
dashboards={filteredDashboards}
loading={dashboardsFetch.loading}
selectedDashboards={selectedDashboards}
@ -102,7 +103,7 @@ export function StreamDetailDashboardsView({
entityId={definition.stream.name}
onAddDashboards={async (dashboards) => {
await addDashboards(dashboards);
await dashboardsFetch.refresh();
dashboardsFetch.refresh();
setIsAddDashboardFlyoutOpen(false);
}}
onClose={() => {

View file

@ -282,6 +282,7 @@ function QuickLinks({ definition }: { definition?: IngestStreamGetResponse }) {
return (
<DashboardsTable
entityId={definition?.stream.name}
dashboards={dashboardsFetch.value?.dashboards ?? EMPTY_DASHBOARD_LIST}
loading={dashboardsFetch.loading}
/>

View file

@ -9,6 +9,7 @@ import { useStreamsAppFetch } from './use_streams_app_fetch';
export const useDashboardsFetch = (name?: string) => {
const {
services: { telemetryClient },
dependencies: {
start: {
streams: { streamsRepositoryClient },
@ -17,18 +18,28 @@ export const useDashboardsFetch = (name?: string) => {
} = useKibana();
const dashboardsFetch = useStreamsAppFetch(
({ signal }) => {
async ({ signal }) => {
if (!name) {
return Promise.resolve(undefined);
}
return streamsRepositoryClient.fetch('GET /api/streams/{name}/dashboards 2023-10-31', {
signal,
params: {
path: {
name,
const response = await streamsRepositoryClient.fetch(
'GET /api/streams/{name}/dashboards 2023-10-31',
{
signal,
params: {
path: {
name,
},
},
},
}
);
telemetryClient.trackAssetCounts({
name,
dashboards: response.dashboards.length,
});
return response;
},
[name, streamsRepositoryClient]
);

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import { CoreStart, Plugin, PluginInitializerContext } from '@kbn/core/public';
import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from '@kbn/core/public';
import type { Logger } from '@kbn/logging';
import { DataStreamsStatsService } from '@kbn/dataset-quality-plugin/public';
import { dynamic } from '@kbn/shared-ux-utility';
@ -19,6 +19,7 @@ import type {
StreamsApplicationProps,
} from './types';
import { StreamsAppServices } from './services/types';
import { StreamsTelemetryService } from './telemetry/service';
const StreamsApplication = dynamic(() =>
import('./application').then((mod) => ({ default: mod.StreamsApplication }))
@ -34,11 +35,13 @@ export class StreamsAppPlugin
>
{
logger: Logger;
telemetry: StreamsTelemetryService = new StreamsTelemetryService();
constructor(private readonly context: PluginInitializerContext<ConfigSchema>) {
this.logger = context.logger.get();
}
setup(): StreamsAppPublicSetup {
setup(coreSetup: CoreSetup): StreamsAppPublicSetup {
this.telemetry.setup(coreSetup.analytics);
return {};
}
@ -51,6 +54,7 @@ export class StreamsAppPlugin
.start({ http: coreStart.http })
.getClient(),
PageTemplate,
telemetryClient: this.telemetry.getClient(),
};
return (
<StreamsApplication

View file

@ -6,8 +6,10 @@
*/
import { IDataStreamsStatsClient } from '@kbn/dataset-quality-plugin/public';
import { StreamsTelemetryClient } from '../telemetry/client';
export interface StreamsAppServices {
dataStreamsClient: Promise<IDataStreamsStatsClient>;
PageTemplate: React.FC<React.PropsWithChildren<{}>>;
telemetryClient: StreamsTelemetryClient;
}

View file

@ -0,0 +1,50 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AnalyticsServiceSetup } from '@kbn/core-analytics-browser';
import {
StreamsAIGrokSuggestionAcceptedProps,
StreamsAIGrokSuggestionLatencyProps,
StreamsAssetClickEventProps,
StreamsAssetCountProps,
} from './types';
import {
STREAMS_AI_GROK_SUGGESTION_ACCEPTED_EVENT_TYPE,
STREAMS_AI_GROK_SUGGESTION_LATENCY_EVENT_TYPE,
STREAMS_ASSET_CLICK_EVENT_TYPE,
STREAMS_ASSET_COUNT_EVENT_TYPE,
} from './constants';
export class StreamsTelemetryClient {
constructor(private readonly analytics: AnalyticsServiceSetup) {}
public trackAssetCounts(params: StreamsAssetCountProps) {
this.analytics.reportEvent(STREAMS_ASSET_COUNT_EVENT_TYPE, params);
}
public trackAssetClick(params: StreamsAssetClickEventProps) {
this.analytics.reportEvent(STREAMS_ASSET_CLICK_EVENT_TYPE, params);
}
public startTrackingAIGrokSuggestionLatency(
params: Pick<StreamsAIGrokSuggestionLatencyProps, 'name' | 'field' | 'connector_id'>
) {
const start = Date.now();
return (count: number, rates: number[]) => {
this.analytics.reportEvent(STREAMS_AI_GROK_SUGGESTION_LATENCY_EVENT_TYPE, {
...params,
duration_ms: Date.now() - start,
suggestion_count: count,
match_rate: rates,
});
};
}
public trackAIGrokSuggestionAccepted(params: StreamsAIGrokSuggestionAcceptedProps) {
this.analytics.reportEvent(STREAMS_AI_GROK_SUGGESTION_ACCEPTED_EVENT_TYPE, params);
}
}

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
const STREAMS_ASSET_COUNT_EVENT_TYPE = 'streams-asset-count';
const STREAMS_ASSET_CLICK_EVENT_TYPE = 'streams-asset-click';
const STREAMS_AI_GROK_SUGGESTION_LATENCY_EVENT_TYPE = 'streams-ai-grok-suggestion-latency';
const STREAMS_AI_GROK_SUGGESTION_ACCEPTED_EVENT_TYPE = 'streams-ai-grok-suggestion-accepted';
export {
STREAMS_ASSET_COUNT_EVENT_TYPE,
STREAMS_ASSET_CLICK_EVENT_TYPE,
STREAMS_AI_GROK_SUGGESTION_LATENCY_EVENT_TYPE,
STREAMS_AI_GROK_SUGGESTION_ACCEPTED_EVENT_TYPE,
};

View file

@ -0,0 +1,46 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
STREAMS_AI_GROK_SUGGESTION_ACCEPTED_EVENT_TYPE,
STREAMS_AI_GROK_SUGGESTION_LATENCY_EVENT_TYPE,
STREAMS_ASSET_CLICK_EVENT_TYPE,
STREAMS_ASSET_COUNT_EVENT_TYPE,
} from './constants';
import {
streamsAIGrokSuggestionAcceptedSchema,
streamsAIGrokSuggestionLatencySchema,
streamsAssetClickEventSchema,
streamsAssetCountSchema,
} from './schemas';
const streamsAssetCountEventType = {
eventType: STREAMS_ASSET_COUNT_EVENT_TYPE,
schema: streamsAssetCountSchema,
};
const streamsAssetClickEventType = {
eventType: STREAMS_ASSET_CLICK_EVENT_TYPE,
schema: streamsAssetClickEventSchema,
};
const streamsAIGrokSuggestionLatencyEventType = {
eventType: STREAMS_AI_GROK_SUGGESTION_LATENCY_EVENT_TYPE,
schema: streamsAIGrokSuggestionLatencySchema,
};
const streamsAIGrokSuggestionAcceptedEventType = {
eventType: STREAMS_AI_GROK_SUGGESTION_ACCEPTED_EVENT_TYPE,
schema: streamsAIGrokSuggestionAcceptedSchema,
};
export {
streamsAssetCountEventType,
streamsAssetClickEventType,
streamsAIGrokSuggestionLatencyEventType,
streamsAIGrokSuggestionAcceptedEventType,
};

View file

@ -0,0 +1,151 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { RootSchema, SchemaArray } from '@elastic/ebt';
import {
StreamsAIGrokSuggestionAcceptedProps,
StreamsAIGrokSuggestionLatencyProps,
StreamsAssetClickEventProps,
StreamsAssetCountProps,
} from './types';
const streamsAssetCountSchema: RootSchema<StreamsAssetCountProps> = {
name: {
type: 'keyword',
_meta: {
description: 'The name of the Stream',
},
},
dashboards: {
type: 'long',
_meta: {
description: 'The duration of the endpoint in milliseconds',
},
},
slos: {
type: 'long',
_meta: {
description: 'The duration of the endpoint in milliseconds',
optional: true,
},
},
rules: {
type: 'long',
_meta: {
description: 'The duration of the endpoint in milliseconds',
optional: true,
},
},
};
const streamsAssetClickEventSchema: RootSchema<StreamsAssetClickEventProps> = {
name: {
type: 'keyword',
_meta: {
description: 'The name of the Stream',
},
},
asset_type: {
type: 'keyword',
_meta: {
description: 'The type of asset: dashboard, slo, rule',
},
},
asset_id: {
type: 'keyword',
_meta: {
description: 'The id of the asset',
},
},
};
const matchRate: SchemaArray<number, number> = {
type: 'array',
items: {
type: 'float',
_meta: {
description: 'The rate',
},
},
_meta: {
description: 'The success rate of each match',
},
};
const streamsAIGrokSuggestionLatencySchema: RootSchema<StreamsAIGrokSuggestionLatencyProps> = {
name: {
type: 'keyword',
_meta: {
description: 'The name of the Stream',
},
},
field: {
type: 'keyword',
_meta: {
description: 'The name of the field used.',
},
},
connector_id: {
type: 'keyword',
_meta: {
description: 'The ID of the LLM connector',
},
},
suggestion_count: {
type: 'long',
_meta: {
description: 'The number of suggestions in the response',
},
},
match_rate: matchRate,
duration_ms: {
type: 'long',
_meta: {
description: 'The duration of the request',
},
},
};
const streamsAIGrokSuggestionAcceptedSchema: RootSchema<StreamsAIGrokSuggestionAcceptedProps> = {
name: {
type: 'keyword',
_meta: {
description: 'The name of the Stream',
},
},
field: {
type: 'keyword',
_meta: {
description: 'The name of the field used.',
},
},
connector_id: {
type: 'keyword',
_meta: {
description: 'The ID of the LLM connector',
},
},
match_rate: {
type: 'float',
_meta: {
description: 'The success rate of suggestion',
},
},
detected_fields: {
type: 'long',
_meta: {
description: 'The number of detected fields',
},
},
};
export {
streamsAssetCountSchema,
streamsAssetClickEventSchema,
streamsAIGrokSuggestionLatencySchema,
streamsAIGrokSuggestionAcceptedSchema,
};

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { AnalyticsServiceSetup } from '@kbn/core-analytics-browser';
import {
streamsAIGrokSuggestionAcceptedEventType,
streamsAIGrokSuggestionLatencyEventType,
streamsAssetClickEventType,
streamsAssetCountEventType,
} from './events';
import { StreamsTelemetryClient } from './client';
export class StreamsTelemetryService {
private analytics?: AnalyticsServiceSetup;
constructor() {}
public setup(analytics: AnalyticsServiceSetup) {
this.analytics = analytics;
this.analytics.registerEventType(streamsAssetCountEventType);
this.analytics.registerEventType(streamsAssetClickEventType);
this.analytics.registerEventType(streamsAIGrokSuggestionLatencyEventType);
this.analytics.registerEventType(streamsAIGrokSuggestionAcceptedEventType);
}
public getClient() {
if (!this.analytics) {
throw new Error('Analytics service is not available.');
}
return new StreamsTelemetryClient(this.analytics);
}
}

View file

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
interface StreamsAssetCountProps {
name: string;
dashboards: number;
slos?: number;
rules?: number;
}
interface StreamsAssetClickEventProps {
name: string;
asset_type: 'dashboard' | 'slo' | 'rule';
asset_id: string;
}
interface StreamsAIGrokSuggestionLatencyProps {
name: string;
field: string;
connector_id: string;
suggestion_count: number;
duration_ms: number;
match_rate: number[];
}
interface StreamsAIGrokSuggestionAcceptedProps {
name: string;
field: string;
connector_id: string;
match_rate: number;
detected_fields: number;
}
export {
type StreamsAssetCountProps,
type StreamsAssetClickEventProps,
type StreamsAIGrokSuggestionLatencyProps,
type StreamsAIGrokSuggestionAcceptedProps,
};

View file

@ -59,5 +59,6 @@
"@kbn/licensing-plugin",
"@kbn/datemath",
"@kbn/xstate-utils",
"@kbn/core-analytics-browser",
]
}