mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Profiling] Making plugin Production ready (#159738)
This PR does: - checks Kibana.spec file - Checks server feature.ts - Adds correct route access to APIs - Removes unnecessary logs - Removes collector and symbolized `secret_token` from config schema as it won't be used - Add README file --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
88c2f02fca
commit
c87e4e983d
16 changed files with 168 additions and 111 deletions
|
@ -674,7 +674,7 @@ Elastic.
|
||||||
|
|
||||||
|
|
||||||
|{kib-repo}blob/{branch}/x-pack/plugins/profiling/README.md[profiling]
|
|{kib-repo}blob/{branch}/x-pack/plugins/profiling/README.md[profiling]
|
||||||
|undefined
|
|Universal Profiling provides fleet-wide, whole-system, continuous profiling with zero instrumentation. Get a comprehensive understanding of what lines of code are consuming compute resources throughout your entire fleet by visualizing your data in Kibana using the flamegraph, stacktraces, and top functions views.
|
||||||
|
|
||||||
|
|
||||||
|{kib-repo}blob/{branch}/x-pack/plugins/remote_clusters/README.md[remoteClusters]
|
|{kib-repo}blob/{branch}/x-pack/plugins/remote_clusters/README.md[remoteClusters]
|
||||||
|
|
|
@ -1 +1,79 @@
|
||||||
### TODO
|
# Universal Profiling (Beta)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Universal Profiling provides fleet-wide, whole-system, continuous profiling with zero instrumentation. Get a comprehensive understanding of what lines of code are consuming compute resources throughout your entire fleet by visualizing your data in Kibana using the flamegraph, stacktraces, and top functions views.
|
||||||
|
|
||||||
|
### Universal profiling setup
|
||||||
|
Universal Profiling is enabled by default on [Elastic Cloud](https://www.elastic.co/cloud/), and you can find it under **Observability**. To see data in Universal Profiling, you need to initialize it.
|
||||||
|
|
||||||
|
##### **Initialize Universal Profiling**
|
||||||
|
Initialize Universal Profiling by navigating to one of the views and clicking the **Set up** button. Clicking this will trigger some checks and install some packages so data can be processed.
|
||||||
|
|
||||||
|
The following are some of the actions and checks that occur during initialization:
|
||||||
|
- Check that the APM integration is installed and configured.
|
||||||
|
- Create Universal Profiling indices.
|
||||||
|
- Install the Collector integration.
|
||||||
|
- Install the Symbolizer integration.
|
||||||
|
|
||||||
|
### Collector integration
|
||||||
|
The Collector is the component that receives data from the profiling agents deployed on users machines.
|
||||||
|
|
||||||
|
It runs a gRPC server over HTTPS and exposes an endpoint where the profiling agents can send data.
|
||||||
|
|
||||||
|
To send data, agents are required to use a token-based authentication, referred as `secretToken` in the agent configurations.
|
||||||
|
|
||||||
|
The token is generated by Kibana during the setup process and at the moment cannot be configured by users.
|
||||||
|
|
||||||
|
The "Add Data" page will display instructions for several deployment methodologies.
|
||||||
|
The instructions contain both the endpoint and the token that allow profiling agent to connect to the Collector.
|
||||||
|
|
||||||
|
### Symbolizer integration
|
||||||
|
The Symbolizer is the component processing debug symbols for the received profiles data, enriching with source-code metadata the profiling visualizations.
|
||||||
|
|
||||||
|
It processes both publicly-available debug symbols and "private" debug symbols.
|
||||||
|
|
||||||
|
For public symbols, users don't have to do anything: the symbolizer asynchronously intercepts unsymbolized frames and populates them automatically.
|
||||||
|
|
||||||
|
For private symbols, an HTTPS endpoint is provided to users for uploading the debug symbols of the software they own.
|
||||||
|
|
||||||
|
The authentication and authorization on this endpoint are provided as part of the request, in form of an Elasticsearch API key.
|
||||||
|
|
||||||
|
|
||||||
|
## Testing (unit, e2e)
|
||||||
|
### Unit Tests (Jest)
|
||||||
|
|
||||||
|
```
|
||||||
|
node scripts/jest --config x-pack/plugins/profiling/jest.config.js [--watchAll]
|
||||||
|
```
|
||||||
|
|
||||||
|
## E2E Tests (Cypress)
|
||||||
|
The E2E tests are located in [`x-pack/plugins/profiling/e2e`](./e2e).
|
||||||
|
|
||||||
|
Universal Profiling uses [FTR](../../../packages/kbn-test/README.mdx) (functional test runner) and [Cypress](https://www.cypress.io/) to run the e2e tests. The tests are located at `kibana/x-pack/plugins/profiling/e2e/cypress/e2e`.
|
||||||
|
|
||||||
|
### Start test server
|
||||||
|
|
||||||
|
```
|
||||||
|
node x-pack/plugins/profiling/scripts/test/e2e --server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Open cypress dashboard
|
||||||
|
|
||||||
|
```
|
||||||
|
node x-pack/plugins/profiling/scripts/test/e2e --open
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run tests in terminal
|
||||||
|
|
||||||
|
```
|
||||||
|
node x-pack/plugins/profiling/scripts/test/e2e --runner
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run like CI
|
||||||
|
|
||||||
|
```
|
||||||
|
node x-pack/plugins/profiling/scripts/test/e2e
|
||||||
|
```
|
||||||
|
|
||||||
|
## Other resources
|
||||||
|
- [Official Profiling documentation](https://www.elastic.co/observability/universal-profiling)
|
|
@ -7,22 +7,22 @@
|
||||||
"server": true,
|
"server": true,
|
||||||
"browser": true,
|
"browser": true,
|
||||||
"configPath": ["xpack", "profiling"],
|
"configPath": ["xpack", "profiling"],
|
||||||
|
"optionalPlugins": ["spaces"],
|
||||||
"requiredPlugins": [
|
"requiredPlugins": [
|
||||||
"navigation",
|
"charts",
|
||||||
|
"cloud",
|
||||||
"data",
|
"data",
|
||||||
"kibanaUtils",
|
"dataViews",
|
||||||
"share",
|
"features",
|
||||||
|
"fleet",
|
||||||
|
"licensing",
|
||||||
"observability",
|
"observability",
|
||||||
"observabilityShared",
|
"observabilityShared",
|
||||||
"features",
|
|
||||||
"kibanaReact",
|
|
||||||
"unifiedSearch",
|
"unifiedSearch",
|
||||||
"dataViews",
|
],
|
||||||
"charts",
|
"requiredBundles": [
|
||||||
"spaces",
|
"kibanaReact",
|
||||||
"cloud",
|
"kibanaUtils",
|
||||||
"fleet",
|
|
||||||
"licensing"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,26 +18,25 @@ export const PROFILING_FEATURE = {
|
||||||
order: 1200,
|
order: 1200,
|
||||||
category: DEFAULT_APP_CATEGORIES.observability,
|
category: DEFAULT_APP_CATEGORIES.observability,
|
||||||
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
|
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
|
||||||
catalogue: [PROFILING_SERVER_FEATURE_ID],
|
|
||||||
// see x-pack/plugins/features/common/feature_kibana_privileges.ts
|
// see x-pack/plugins/features/common/feature_kibana_privileges.ts
|
||||||
privileges: {
|
privileges: {
|
||||||
all: {
|
all: {
|
||||||
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
|
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
|
||||||
catalogue: [PROFILING_SERVER_FEATURE_ID],
|
|
||||||
savedObject: {
|
savedObject: {
|
||||||
all: [],
|
all: [],
|
||||||
read: [],
|
read: [],
|
||||||
},
|
},
|
||||||
ui: ['show'],
|
ui: ['show'],
|
||||||
|
api: [PROFILING_SERVER_FEATURE_ID],
|
||||||
},
|
},
|
||||||
read: {
|
read: {
|
||||||
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
|
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
|
||||||
catalogue: [PROFILING_SERVER_FEATURE_ID],
|
|
||||||
savedObject: {
|
savedObject: {
|
||||||
all: [],
|
all: [],
|
||||||
read: [],
|
read: [],
|
||||||
},
|
},
|
||||||
ui: ['show'],
|
ui: ['show'],
|
||||||
|
api: [PROFILING_SERVER_FEATURE_ID],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -9,9 +9,14 @@ import { schema, TypeOf } from '@kbn/config-schema';
|
||||||
import type { PluginConfigDescriptor, PluginInitializerContext } from '@kbn/core/server';
|
import type { PluginConfigDescriptor, PluginInitializerContext } from '@kbn/core/server';
|
||||||
import { ProfilingPlugin } from './plugin';
|
import { ProfilingPlugin } from './plugin';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* These properties are used to create both the Collector and the Symbolizer integrations
|
||||||
|
* when Universal Profiling is initialized.
|
||||||
|
* As of now Universal Profiling is only availble on Elastic Cloud, so
|
||||||
|
* Elastic Cloud will be responsable of filling these properties up and pass it to Kibana.
|
||||||
|
*/
|
||||||
const packageInputSchema = schema.object({
|
const packageInputSchema = schema.object({
|
||||||
host: schema.maybe(schema.string()),
|
host: schema.maybe(schema.string()),
|
||||||
secret_token: schema.maybe(schema.string()),
|
|
||||||
tls_enabled: schema.maybe(schema.boolean()),
|
tls_enabled: schema.maybe(schema.boolean()),
|
||||||
tls_supported_protocols: schema.maybe(schema.arrayOf(schema.string())),
|
tls_supported_protocols: schema.maybe(schema.arrayOf(schema.string())),
|
||||||
tls_certificate_path: schema.maybe(schema.string()),
|
tls_certificate_path: schema.maybe(schema.string()),
|
||||||
|
@ -22,12 +27,17 @@ const configSchema = schema.object({
|
||||||
enabled: schema.boolean({ defaultValue: false }),
|
enabled: schema.boolean({ defaultValue: false }),
|
||||||
symbolizer: schema.maybe(packageInputSchema),
|
symbolizer: schema.maybe(packageInputSchema),
|
||||||
collector: schema.maybe(packageInputSchema),
|
collector: schema.maybe(packageInputSchema),
|
||||||
elasticsearch: schema.maybe(
|
elasticsearch: schema.conditional(
|
||||||
schema.object({
|
schema.contextRef('dist'),
|
||||||
hosts: schema.string(),
|
schema.literal(true),
|
||||||
username: schema.string(),
|
schema.never(),
|
||||||
password: schema.string(),
|
schema.maybe(
|
||||||
})
|
schema.object({
|
||||||
|
hosts: schema.string(),
|
||||||
|
username: schema.string(),
|
||||||
|
password: schema.string(),
|
||||||
|
})
|
||||||
|
)
|
||||||
),
|
),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -50,33 +50,6 @@ describe('getVarsFor', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('discards secret_token defined and generate a new one', () => {
|
|
||||||
const config: PackageInputType = {
|
|
||||||
host: 'example.com',
|
|
||||||
tls_enabled: true,
|
|
||||||
tls_supported_protocols: ['foo', 'bar'],
|
|
||||||
tls_certificate_path: '123',
|
|
||||||
tls_key_path: '456',
|
|
||||||
secret_token: 'bar!',
|
|
||||||
};
|
|
||||||
|
|
||||||
const { secret_token: secretToken, ...result } = getVarsFor({
|
|
||||||
config,
|
|
||||||
includeSecretToken: true,
|
|
||||||
});
|
|
||||||
expect(secretToken?.type).toBe('text');
|
|
||||||
expect(secretToken?.value).not.toBe('bar!');
|
|
||||||
expect(secretToken?.value.length).toBe(16);
|
|
||||||
expect(secretTokenRegex.test(secretToken?.value)).toBeTruthy();
|
|
||||||
expect(result).toEqual({
|
|
||||||
host: { type: 'text', value: 'example.com' },
|
|
||||||
tls_enabled: { type: 'bool', value: true },
|
|
||||||
tls_supported_protocols: { type: 'text', value: ['foo', 'bar'] },
|
|
||||||
tls_certificate_path: { type: 'text', value: '123' },
|
|
||||||
tls_key_path: { type: 'text', value: '456' },
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns vars without secret_token', () => {
|
it('returns vars without secret_token', () => {
|
||||||
const config: PackageInputType = {
|
const config: PackageInputType = {
|
||||||
host: 'example.com',
|
host: 'example.com',
|
||||||
|
|
|
@ -161,6 +161,7 @@ export function generateSecretToken() {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PackagePolicyVars = PackageInputType & { secret_token?: string };
|
||||||
export function getVarsFor({
|
export function getVarsFor({
|
||||||
config,
|
config,
|
||||||
includeSecretToken,
|
includeSecretToken,
|
||||||
|
@ -168,13 +169,13 @@ export function getVarsFor({
|
||||||
config: PackageInputType;
|
config: PackageInputType;
|
||||||
includeSecretToken: boolean;
|
includeSecretToken: boolean;
|
||||||
}) {
|
}) {
|
||||||
const configKeys = Object.keys(config) as Array<keyof PackageInputType>;
|
const configKeys = Object.keys(config) as Array<keyof PackagePolicyVars>;
|
||||||
if (includeSecretToken) {
|
if (includeSecretToken) {
|
||||||
configKeys.push('secret_token');
|
configKeys.push('secret_token');
|
||||||
}
|
}
|
||||||
|
|
||||||
return configKeys.reduce<
|
return configKeys.reduce<
|
||||||
Partial<Record<keyof PackageInputType, { type: 'text' | 'bool'; value: any }>>
|
Partial<Record<keyof PackagePolicyVars, { type: 'text' | 'bool'; value: any }>>
|
||||||
>((acc, currKey) => {
|
>((acc, currKey) => {
|
||||||
const value = currKey === 'secret_token' ? generateSecretToken() : config[currKey];
|
const value = currKey === 'secret_token' ? generateSecretToken() : config[currKey];
|
||||||
const type = typeof value === 'boolean' ? 'bool' : 'text';
|
const type = typeof value === 'boolean' ? 'bool' : 'text';
|
||||||
|
|
|
@ -35,7 +35,6 @@ export class ProfilingPlugin
|
||||||
}
|
}
|
||||||
|
|
||||||
public setup(core: CoreSetup<ProfilingPluginStartDeps>, deps: ProfilingPluginSetupDeps) {
|
public setup(core: CoreSetup<ProfilingPluginStartDeps>, deps: ProfilingPluginSetupDeps) {
|
||||||
this.logger.debug('profiling: Setup');
|
|
||||||
const router = core.http.createRouter<ProfilingRequestHandlerContext>();
|
const router = core.http.createRouter<ProfilingRequestHandlerContext>();
|
||||||
|
|
||||||
deps.features.registerKibanaFeature(PROFILING_FEATURE);
|
deps.features.registerKibanaFeature(PROFILING_FEATURE);
|
||||||
|
@ -80,7 +79,6 @@ export class ProfilingPlugin
|
||||||
}
|
}
|
||||||
|
|
||||||
public start(core: CoreStart) {
|
public start(core: CoreStart) {
|
||||||
this.logger.debug('profiling: Started');
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,9 +87,8 @@ export async function findDownsampledIndex({
|
||||||
});
|
});
|
||||||
sampleCountFromInitialExp = resp.hits.total.value;
|
sampleCountFromInitialExp = resp.hits.total.value;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.info(e.message);
|
logger.error(e.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('sampleCountFromPow6 ' + sampleCountFromInitialExp);
|
|
||||||
return getSampledTraceEventsIndex(index, sampleSize, sampleCountFromInitialExp, initialExp);
|
return getSampledTraceEventsIndex(index, sampleSize, sampleCountFromInitialExp, initialExp);
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ export function registerFlameChartSearchRoute({
|
||||||
router.get(
|
router.get(
|
||||||
{
|
{
|
||||||
path: paths.Flamechart,
|
path: paths.Flamechart,
|
||||||
|
options: { tags: ['access:profiling'] },
|
||||||
validate: {
|
validate: {
|
||||||
query: schema.object({
|
query: schema.object({
|
||||||
timeFrom: schema.number(),
|
timeFrom: schema.number(),
|
||||||
|
@ -48,7 +49,6 @@ export function registerFlameChartSearchRoute({
|
||||||
});
|
});
|
||||||
const totalSeconds = timeTo - timeFrom;
|
const totalSeconds = timeTo - timeFrom;
|
||||||
|
|
||||||
const t0 = Date.now();
|
|
||||||
const {
|
const {
|
||||||
stackTraceEvents,
|
stackTraceEvents,
|
||||||
stackTraces,
|
stackTraces,
|
||||||
|
@ -61,10 +61,8 @@ export function registerFlameChartSearchRoute({
|
||||||
filter,
|
filter,
|
||||||
sampleSize: targetSampleSize,
|
sampleSize: targetSampleSize,
|
||||||
});
|
});
|
||||||
logger.info(`querying stacktraces took ${Date.now() - t0} ms`);
|
|
||||||
|
|
||||||
const flamegraph = await withProfilingSpan('create_flamegraph', async () => {
|
const flamegraph = await withProfilingSpan('create_flamegraph', async () => {
|
||||||
const t1 = Date.now();
|
|
||||||
const tree = createCalleeTree(
|
const tree = createCalleeTree(
|
||||||
stackTraceEvents,
|
stackTraceEvents,
|
||||||
stackTraces,
|
stackTraces,
|
||||||
|
@ -73,20 +71,20 @@ export function registerFlameChartSearchRoute({
|
||||||
totalFrames,
|
totalFrames,
|
||||||
samplingRate
|
samplingRate
|
||||||
);
|
);
|
||||||
logger.info(`creating callee tree took ${Date.now() - t1} ms`);
|
|
||||||
|
|
||||||
const t2 = Date.now();
|
|
||||||
const fg = createBaseFlameGraph(tree, samplingRate, totalSeconds);
|
const fg = createBaseFlameGraph(tree, samplingRate, totalSeconds);
|
||||||
logger.info(`creating flamegraph took ${Date.now() - t2} ms`);
|
|
||||||
|
|
||||||
return fg;
|
return fg;
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info('returning payload response to client');
|
|
||||||
|
|
||||||
return response.ok({ body: flamegraph });
|
return response.ok({ body: flamegraph });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return handleRouteHandlerError({ error, logger, response });
|
return handleRouteHandlerError({
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
response,
|
||||||
|
message: 'Error while fetching flamegraph',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -34,6 +34,7 @@ export function registerTopNFunctionsSearchRoute({
|
||||||
router.get(
|
router.get(
|
||||||
{
|
{
|
||||||
path: paths.TopNFunctions,
|
path: paths.TopNFunctions,
|
||||||
|
options: { tags: ['access:profiling'] },
|
||||||
validate: {
|
validate: {
|
||||||
query: querySchema,
|
query: querySchema,
|
||||||
},
|
},
|
||||||
|
@ -51,16 +52,13 @@ export function registerTopNFunctionsSearchRoute({
|
||||||
kuery,
|
kuery,
|
||||||
});
|
});
|
||||||
|
|
||||||
const t0 = Date.now();
|
|
||||||
const { stackTraceEvents, stackTraces, executables, stackFrames, samplingRate } =
|
const { stackTraceEvents, stackTraces, executables, stackFrames, samplingRate } =
|
||||||
await searchStackTraces({
|
await searchStackTraces({
|
||||||
client: profilingElasticsearchClient,
|
client: profilingElasticsearchClient,
|
||||||
filter,
|
filter,
|
||||||
sampleSize: targetSampleSize,
|
sampleSize: targetSampleSize,
|
||||||
});
|
});
|
||||||
logger.info(`querying stacktraces took ${Date.now() - t0} ms`);
|
|
||||||
|
|
||||||
const t1 = Date.now();
|
|
||||||
const topNFunctions = await withProfilingSpan('create_topn_functions', async () => {
|
const topNFunctions = await withProfilingSpan('create_topn_functions', async () => {
|
||||||
return createTopNFunctions(
|
return createTopNFunctions(
|
||||||
stackTraceEvents,
|
stackTraceEvents,
|
||||||
|
@ -72,15 +70,17 @@ export function registerTopNFunctionsSearchRoute({
|
||||||
samplingRate
|
samplingRate
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
logger.info(`creating topN functions took ${Date.now() - t1} ms`);
|
|
||||||
|
|
||||||
logger.info('returning payload response to client');
|
|
||||||
|
|
||||||
return response.ok({
|
return response.ok({
|
||||||
body: topNFunctions,
|
body: topNFunctions,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return handleRouteHandlerError({ error, logger, response });
|
return handleRouteHandlerError({
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
response,
|
||||||
|
message: 'Error while fetching TopN functions',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License
|
|
||||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
|
||||||
* 2.0.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Logger } from '@kbn/core/server';
|
|
||||||
|
|
||||||
export async function logExecutionLatency<T>(
|
|
||||||
logger: Logger,
|
|
||||||
activity: string,
|
|
||||||
func: () => Promise<T>
|
|
||||||
): Promise<T> {
|
|
||||||
const start = Date.now();
|
|
||||||
return await func().then((res) => {
|
|
||||||
logger.info(activity + ' took ' + (Date.now() - start) + 'ms');
|
|
||||||
return res;
|
|
||||||
});
|
|
||||||
}
|
|
|
@ -5,6 +5,7 @@
|
||||||
* 2.0.
|
* 2.0.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { DEFAULT_SPACE_ID } from '@kbn/spaces-plugin/common';
|
||||||
import { RouteRegisterParameters } from '.';
|
import { RouteRegisterParameters } from '.';
|
||||||
import { getClient } from './compat';
|
import { getClient } from './compat';
|
||||||
import { installLatestApmPackage, isApmPackageInstalled } from '../lib/setup/apm_package';
|
import { installLatestApmPackage, isApmPackageInstalled } from '../lib/setup/apm_package';
|
||||||
|
@ -45,6 +46,7 @@ export function registerSetupRoute({
|
||||||
router.get(
|
router.get(
|
||||||
{
|
{
|
||||||
path: paths.HasSetupESResources,
|
path: paths.HasSetupESResources,
|
||||||
|
options: { tags: ['access:profiling'] },
|
||||||
validate: false,
|
validate: false,
|
||||||
},
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
|
@ -56,18 +58,18 @@ export function registerSetupRoute({
|
||||||
request,
|
request,
|
||||||
useDefaultAuth: true,
|
useDefaultAuth: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
const setupOptions: ProfilingSetupOptions = {
|
const setupOptions: ProfilingSetupOptions = {
|
||||||
client: clientWithDefaultAuth,
|
client: clientWithDefaultAuth,
|
||||||
logger,
|
logger,
|
||||||
packagePolicyClient: dependencies.start.fleet.packagePolicyService,
|
packagePolicyClient: dependencies.start.fleet.packagePolicyService,
|
||||||
soClient: core.savedObjects.client,
|
soClient: core.savedObjects.client,
|
||||||
spaceId: dependencies.setup.spaces.spacesService.getSpaceId(request),
|
spaceId:
|
||||||
|
dependencies.setup.spaces?.spacesService?.getSpaceId(request) ?? DEFAULT_SPACE_ID,
|
||||||
isCloudEnabled: dependencies.setup.cloud.isCloudEnabled,
|
isCloudEnabled: dependencies.setup.cloud.isCloudEnabled,
|
||||||
config: dependencies.config,
|
config: dependencies.config,
|
||||||
};
|
};
|
||||||
|
|
||||||
logger.info('Checking if Elasticsearch and Fleet are setup for Universal Profiling');
|
|
||||||
|
|
||||||
const state = createDefaultSetupState();
|
const state = createDefaultSetupState();
|
||||||
state.cloud.available = dependencies.setup.cloud.isCloudEnabled;
|
state.cloud.available = dependencies.setup.cloud.isCloudEnabled;
|
||||||
|
|
||||||
|
@ -102,7 +104,12 @@ export function registerSetupRoute({
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return handleRouteHandlerError({ error, logger, response });
|
return handleRouteHandlerError({
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
response,
|
||||||
|
message: 'Error while checking plugin setup',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -110,7 +117,8 @@ export function registerSetupRoute({
|
||||||
router.post(
|
router.post(
|
||||||
{
|
{
|
||||||
path: paths.HasSetupESResources,
|
path: paths.HasSetupESResources,
|
||||||
validate: {},
|
options: { tags: ['access:profiling'] },
|
||||||
|
validate: false,
|
||||||
},
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
try {
|
try {
|
||||||
|
@ -126,13 +134,12 @@ export function registerSetupRoute({
|
||||||
logger,
|
logger,
|
||||||
packagePolicyClient: dependencies.start.fleet.packagePolicyService,
|
packagePolicyClient: dependencies.start.fleet.packagePolicyService,
|
||||||
soClient: core.savedObjects.client,
|
soClient: core.savedObjects.client,
|
||||||
spaceId: dependencies.setup.spaces.spacesService.getSpaceId(request),
|
spaceId:
|
||||||
|
dependencies.setup.spaces?.spacesService?.getSpaceId(request) ?? DEFAULT_SPACE_ID,
|
||||||
isCloudEnabled: dependencies.setup.cloud.isCloudEnabled,
|
isCloudEnabled: dependencies.setup.cloud.isCloudEnabled,
|
||||||
config: dependencies.config,
|
config: dependencies.config,
|
||||||
};
|
};
|
||||||
|
|
||||||
logger.info('Setting up Elasticsearch and Fleet for Universal Profiling');
|
|
||||||
|
|
||||||
const state = createDefaultSetupState();
|
const state = createDefaultSetupState();
|
||||||
state.cloud.available = dependencies.setup.cloud.isCloudEnabled;
|
state.cloud.available = dependencies.setup.cloud.isCloudEnabled;
|
||||||
|
|
||||||
|
@ -179,7 +186,12 @@ export function registerSetupRoute({
|
||||||
// and is not guaranteed to complete before Kibana sends a response.
|
// and is not guaranteed to complete before Kibana sends a response.
|
||||||
return response.accepted();
|
return response.accepted();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return handleRouteHandlerError({ error, logger, response });
|
return handleRouteHandlerError({
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
response,
|
||||||
|
message: 'Error while setting up Universal Profiling',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -187,6 +199,7 @@ export function registerSetupRoute({
|
||||||
router.get(
|
router.get(
|
||||||
{
|
{
|
||||||
path: paths.SetupDataCollectionInstructions,
|
path: paths.SetupDataCollectionInstructions,
|
||||||
|
options: { tags: ['access:profiling'] },
|
||||||
validate: false,
|
validate: false,
|
||||||
},
|
},
|
||||||
async (context, request, response) => {
|
async (context, request, response) => {
|
||||||
|
@ -198,7 +211,12 @@ export function registerSetupRoute({
|
||||||
|
|
||||||
return response.ok({ body: setupInstructions });
|
return response.ok({ body: setupInstructions });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return handleRouteHandlerError({ error, logger, response });
|
return handleRouteHandlerError({
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
response,
|
||||||
|
message: 'Error while fetching Universal Profiling instructions',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -100,7 +100,6 @@ export async function topNElasticSearchQuery({
|
||||||
}
|
}
|
||||||
|
|
||||||
let totalSampledStackTraces = aggregations.total_count.value ?? 0;
|
let totalSampledStackTraces = aggregations.total_count.value ?? 0;
|
||||||
logger.info('total sampled stacktraces: ' + totalSampledStackTraces);
|
|
||||||
totalSampledStackTraces = Math.floor(totalSampledStackTraces / eventsIndex.sampleRate);
|
totalSampledStackTraces = Math.floor(totalSampledStackTraces / eventsIndex.sampleRate);
|
||||||
|
|
||||||
if (searchField !== ProfilingESField.StacktraceID) {
|
if (searchField !== ProfilingESField.StacktraceID) {
|
||||||
|
@ -139,8 +138,6 @@ export async function topNElasticSearchQuery({
|
||||||
return groupStackFrameMetadataByStackTrace(stackTraces, stackFrames, executables);
|
return groupStackFrameMetadataByStackTrace(stackTraces, stackFrames, executables);
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info('returning payload response to client');
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
TotalCount: totalSampledStackTraces,
|
TotalCount: totalSampledStackTraces,
|
||||||
TopN: topN,
|
TopN: topN,
|
||||||
|
@ -164,6 +161,7 @@ export function queryTopNCommon({
|
||||||
router.get(
|
router.get(
|
||||||
{
|
{
|
||||||
path: pathName,
|
path: pathName,
|
||||||
|
options: { tags: ['access:profiling'] },
|
||||||
validate: {
|
validate: {
|
||||||
query: schema.object({
|
query: schema.object({
|
||||||
timeFrom: schema.number(),
|
timeFrom: schema.number(),
|
||||||
|
@ -189,7 +187,12 @@ export function queryTopNCommon({
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return handleRouteHandlerError({ error, logger, response });
|
return handleRouteHandlerError({
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
response,
|
||||||
|
message: 'Error while fetching TopN functions',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -15,17 +15,17 @@ import { FleetSetupContract, FleetStartContract } from '@kbn/fleet-plugin/server
|
||||||
export interface ProfilingPluginSetupDeps {
|
export interface ProfilingPluginSetupDeps {
|
||||||
observability: ObservabilityPluginSetup;
|
observability: ObservabilityPluginSetup;
|
||||||
features: FeaturesPluginSetup;
|
features: FeaturesPluginSetup;
|
||||||
spaces: SpacesPluginSetup;
|
|
||||||
cloud: CloudSetup;
|
cloud: CloudSetup;
|
||||||
fleet: FleetSetupContract;
|
fleet: FleetSetupContract;
|
||||||
|
spaces?: SpacesPluginSetup;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ProfilingPluginStartDeps {
|
export interface ProfilingPluginStartDeps {
|
||||||
observability: {};
|
observability: {};
|
||||||
features: {};
|
features: {};
|
||||||
spaces: SpacesPluginStart;
|
|
||||||
cloud: CloudStart;
|
cloud: CloudStart;
|
||||||
fleet: FleetStartContract;
|
fleet: FleetStartContract;
|
||||||
|
spaces?: SpacesPluginStart;
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||||
|
|
|
@ -14,10 +14,12 @@ export function handleRouteHandlerError({
|
||||||
error,
|
error,
|
||||||
logger,
|
logger,
|
||||||
response,
|
response,
|
||||||
|
message,
|
||||||
}: {
|
}: {
|
||||||
error: any;
|
error: any;
|
||||||
response: KibanaResponseFactory;
|
response: KibanaResponseFactory;
|
||||||
logger: Logger;
|
logger: Logger;
|
||||||
|
message: string;
|
||||||
}) {
|
}) {
|
||||||
if (
|
if (
|
||||||
error instanceof WrappedElasticsearchClientError &&
|
error instanceof WrappedElasticsearchClientError &&
|
||||||
|
@ -34,8 +36,6 @@ export function handleRouteHandlerError({
|
||||||
|
|
||||||
return response.customError({
|
return response.customError({
|
||||||
statusCode: error.statusCode ?? 500,
|
statusCode: error.statusCode ?? 500,
|
||||||
body: {
|
body: { message },
|
||||||
message: error.message,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue