[StdPerf] Extending kibana:plugin_render_time with custom metrics (#189115)

## Summary

This PR enable consumers of `PerformanceContextProvider` to track
customMetrics along `onPageReady`.
Tracking customMetrics is important to further analyse and put into
perspective TTFMP. e.g. is not the same trying to load a component that
underneath is using thousand of documents vs one just using a couple of
them.

### Changes
Changes are leveraging existing `PerformanceMetricEvent` schema.

### Ideas
One improvement could be reserving a pair of key/values (aka omitting
them when defining `CustomMetrics` type) for explicitly dedicate them to
timerange values. e.g. the amount of milliseconds between the start and
end date in the timepicker. This could help us to keep the values in the
same property for all observability applications and standardise the
measurement of it.
This commit is contained in:
Yngrid Coello 2024-08-08 18:09:07 +02:00 committed by GitHub
parent a1dd9f8f21
commit c8edac6e3f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 185 additions and 8 deletions

View file

@ -0,0 +1,129 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { trackPerformanceMeasureEntries } from './track_performance_measure_entries';
import { analyticsClientMock } from './analytics_service.test.mocks';
interface MockEntryList {
getEntries: () => [object];
}
type ObsCallback = (_entries: MockEntryList, _obs: object) => undefined;
const mockObs = { observe: jest.fn, disconnect: jest.fn };
const setupMockPerformanceObserver = (entries: [object]) => {
const mockPerformanceObserver = function (callback: ObsCallback) {
callback(
{
getEntries: () => entries,
},
mockObs
);
return mockObs;
};
(global.PerformanceObserver as unknown) = mockPerformanceObserver;
};
describe('trackPerformanceMeasureEntries', () => {
beforeEach(() => {
jest.clearAllMocks();
});
test("doesn't report an analytics event when not receiving events", () => {
setupMockPerformanceObserver([{}]);
trackPerformanceMeasureEntries(analyticsClientMock, true);
expect(analyticsClientMock.reportEvent).toHaveBeenCalledTimes(0);
});
test("doesn't report an analytics event when receiving not 'kibana:performance' events", () => {
setupMockPerformanceObserver([
{
name: '/',
entryType: 'measure',
startTime: 100,
duration: 1000,
detail: {
eventName: 'kibana:plugin_render_time',
type: 'anything',
},
},
]);
trackPerformanceMeasureEntries(analyticsClientMock, true);
expect(analyticsClientMock.reportEvent).toHaveBeenCalledTimes(0);
});
test("doesn't report an analytics event when receiving not 'measure' events", () => {
setupMockPerformanceObserver([
{
name: '/',
entryType: 'anything',
startTime: 100,
duration: 1000,
detail: {
eventName: 'kibana:plugin_render_time',
type: 'kibana:performance',
},
},
]);
trackPerformanceMeasureEntries(analyticsClientMock, true);
expect(analyticsClientMock.reportEvent).toHaveBeenCalledTimes(0);
});
test('reports an analytics event when receiving "measure" and "kibana:performance" events', () => {
setupMockPerformanceObserver([
{
name: '/',
entryType: 'measure',
startTime: 100,
duration: 1000,
detail: {
eventName: 'kibana:plugin_render_time',
type: 'kibana:performance',
},
},
]);
trackPerformanceMeasureEntries(analyticsClientMock, true);
expect(analyticsClientMock.reportEvent).toHaveBeenCalledTimes(1);
});
test('reports an analytics event ignoring keys and values not allowed', () => {
setupMockPerformanceObserver([
{
name: '/',
entryType: 'measure',
startTime: 100,
duration: 1000,
detail: {
eventName: 'kibana:plugin_render_time',
type: 'kibana:performance',
customMetrics: {
key1: 'key1',
value1: 'value1',
key10: 'key10',
value10: 'value10',
anyKey: 'anyKey',
anyValue: 'anyValue',
},
},
},
]);
trackPerformanceMeasureEntries(analyticsClientMock, true);
expect(analyticsClientMock.reportEvent).toHaveBeenCalledTimes(1);
expect(analyticsClientMock.reportEvent).toHaveBeenCalledWith('performance_metric', {
duration: 1000,
eventName: 'kibana:plugin_render_time',
key1: 'key1',
meta: { target: '/' },
value1: 'value1',
});
});
});

View file

@ -8,6 +8,13 @@
import type { AnalyticsClient } from '@elastic/ebt/client';
import { reportPerformanceMetricEvent } from '@kbn/ebt-tools';
const MAX_CUSTOM_METRICS = 9;
// The keys and values for the custom metrics are limited to 9 pairs
const ALLOWED_CUSTOM_METRICS_KEYS_VALUES = Array.from({ length: MAX_CUSTOM_METRICS }, (_, i) => [
`key${i + 1}`,
`value${i + 1}`,
]).flat();
export function trackPerformanceMeasureEntries(analytics: AnalyticsClient, isDevMode: boolean) {
function perfObserver(
list: PerformanceObserverEntryList,
@ -18,6 +25,19 @@ export function trackPerformanceMeasureEntries(analytics: AnalyticsClient, isDev
if (entry.entryType === 'measure' && entry.detail?.type === 'kibana:performance') {
const target = entry?.name;
const duration = entry.duration;
const customMetrics = Object.keys(entry.detail?.customMetrics ?? {}).reduce(
(acc, metric) => {
if (ALLOWED_CUSTOM_METRICS_KEYS_VALUES.includes(metric)) {
return {
...acc,
[metric]: entry.detail.customMetrics[metric],
};
}
return acc;
},
{}
);
if (isDevMode) {
if (!target) {
@ -47,6 +67,7 @@ export function trackPerformanceMeasureEntries(analytics: AnalyticsClient, isDev
reportPerformanceMetricEvent(analytics, {
eventName: entry.detail.eventName,
duration,
...customMetrics,
meta: {
target,
},

View file

@ -11,6 +11,9 @@ import { afterFrame } from '@elastic/apm-rum-core';
import { useLocation } from 'react-router-dom';
import { perfomanceMarkers } from '../performance_markers';
import { PerformanceApi, PerformanceContext } from './use_performance_context';
import { PerformanceMetricEvent } from '../../performance_metric_events';
export type CustomMetrics = Omit<PerformanceMetricEvent, 'eventName' | 'meta' | 'duration'>;
function measureInteraction() {
performance.mark(perfomanceMarkers.startPageChange);
@ -19,13 +22,18 @@ function measureInteraction() {
/**
* Marks the end of the page ready state and measures the performance between the start of the page change and the end of the page ready state.
* @param pathname - The pathname of the page.
* @param customMetrics - Custom metrics to be included in the performance measure.
*/
pageReady(pathname: string) {
pageReady(pathname: string, customMetrics?: CustomMetrics) {
performance.mark(perfomanceMarkers.endPageReady);
if (!trackedRoutes.includes(pathname)) {
performance.measure(pathname, {
detail: { eventName: 'kibana:plugin_render_time', type: 'kibana:performance' },
detail: {
eventName: 'kibana:plugin_render_time',
type: 'kibana:performance',
customMetrics,
},
start: perfomanceMarkers.startPageChange,
end: perfomanceMarkers.endPageReady,
});
@ -52,9 +60,9 @@ export function PerformanceContextProvider({ children }: { children: React.React
const api = useMemo<PerformanceApi>(
() => ({
onPageReady() {
onPageReady(customMetrics) {
if (isRendered) {
interaction.pageReady(location.pathname);
interaction.pageReady(location.pathname, customMetrics);
}
},
}),

View file

@ -7,9 +7,14 @@
*/
import { createContext, useContext } from 'react';
import { CustomMetrics } from './performance_context';
export interface PerformanceApi {
onPageReady(): void;
/**
* Marks the end of the page ready state and measures the performance between the start of the page change and the end of the page ready state.
* @param customMetrics - Custom metrics to be included in the performance measure.
*/
onPageReady(customMetrics?: CustomMetrics): void;
}
export const PerformanceContext = createContext<PerformanceApi | undefined>(undefined);

View file

@ -33,13 +33,24 @@ import { mapPercentagesToQualityCounts } from '../../quality_indicator';
export function DatasetsQualityIndicators() {
const { onPageReady } = usePerformanceContext();
const { datasetsQuality, isDatasetsQualityLoading, datasetsActivity } = useSummaryPanelContext();
const {
datasetsQuality,
isDatasetsQualityLoading,
datasetsActivity,
numberOfDatasets,
numberOfDocuments,
} = useSummaryPanelContext();
const qualityCounts = mapPercentagesToQualityCounts(datasetsQuality.percentages);
const datasetsWithoutIgnoredField =
datasetsActivity.total > 0 ? datasetsActivity.total - datasetsQuality.percentages.length : 0;
if (!isDatasetsQualityLoading) {
onPageReady();
if (!isDatasetsQualityLoading && (numberOfDatasets || numberOfDocuments)) {
onPageReady({
key1: 'datasets',
value1: numberOfDatasets,
key2: 'documents',
value2: numberOfDocuments,
});
}
return (

View file

@ -85,6 +85,9 @@ const useSummaryPanel = () => {
isDatasetsActivityLoading,
datasetsActivity,
numberOfDatasets: filteredItems.length,
numberOfDocuments: filteredItems.reduce((acc, curr) => acc + curr.degradedDocs.docsCount, 0),
};
};