mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
Define schema for monitoring-sourced telemetry (#94434)
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
7cc9f01843
commit
f62a3153cd
27 changed files with 734 additions and 55 deletions
|
@ -6,6 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { ParsedUsageCollection } from '../ts_parser';
|
||||
import { parsedExternallyDefinedCollector } from './parsed_externally_defined_collector';
|
||||
import { parsedImportedSchemaCollector } from './parsed_imported_schema';
|
||||
import { parsedImportedUsageInterface } from './parsed_imported_usage_interface';
|
||||
|
@ -14,15 +15,18 @@ import { parsedNestedCollector } from './parsed_nested_collector';
|
|||
import { parsedSchemaDefinedWithSpreadsCollector } from './parsed_schema_defined_with_spreads_collector';
|
||||
import { parsedWorkingCollector } from './parsed_working_collector';
|
||||
import { parsedCollectorWithDescription } from './parsed_working_collector_with_description';
|
||||
import { ParsedUsageCollection } from '../ts_parser';
|
||||
import { parsedStatsCollector } from './parsed_stats_collector';
|
||||
import { parsedImportedInterfaceFromExport } from './parsed_imported_interface_from_export';
|
||||
|
||||
export const allExtractedCollectors: ParsedUsageCollection[] = [
|
||||
...parsedExternallyDefinedCollector,
|
||||
...parsedImportedInterfaceFromExport,
|
||||
...parsedImportedSchemaCollector,
|
||||
...parsedImportedUsageInterface,
|
||||
parsedIndexedInterfaceWithNoMatchingSchema,
|
||||
parsedNestedCollector,
|
||||
parsedSchemaDefinedWithSpreadsCollector,
|
||||
...parsedStatsCollector,
|
||||
parsedCollectorWithDescription,
|
||||
parsedWorkingCollector,
|
||||
];
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { SyntaxKind } from 'typescript';
|
||||
import { ParsedUsageCollection } from '../ts_parser';
|
||||
|
||||
export const parsedImportedInterfaceFromExport: ParsedUsageCollection[] = [
|
||||
[
|
||||
'src/fixtures/telemetry_collectors/imported_interface_from_export/index.ts',
|
||||
{
|
||||
collectorName: 'importing_from_export_collector',
|
||||
schema: {
|
||||
value: {
|
||||
some_field: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
fetch: {
|
||||
typeName: 'Usage',
|
||||
typeDescriptor: {
|
||||
some_field: {
|
||||
kind: SyntaxKind.StringKeyword,
|
||||
type: 'StringKeyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
];
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { SyntaxKind } from 'typescript';
|
||||
import { ParsedUsageCollection } from '../ts_parser';
|
||||
|
||||
export const parsedStatsCollector: ParsedUsageCollection[] = [
|
||||
[
|
||||
'src/fixtures/telemetry_collectors/stats_collector.ts',
|
||||
{
|
||||
collectorName: 'my_stats_collector_with_schema',
|
||||
schema: {
|
||||
value: {
|
||||
some_field: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
fetch: {
|
||||
typeName: 'Usage',
|
||||
typeDescriptor: {
|
||||
some_field: {
|
||||
kind: SyntaxKind.StringKeyword,
|
||||
type: 'StringKeyword',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
];
|
|
@ -24,7 +24,7 @@ describe('extractCollectors', () => {
|
|||
const programPaths = await getProgramPaths(configs[0]);
|
||||
|
||||
const results = [...extractCollectors(programPaths, tsConfig)];
|
||||
expect(results).toHaveLength(9);
|
||||
expect(results).toHaveLength(11);
|
||||
expect(results).toStrictEqual(allExtractedCollectors);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -202,7 +202,7 @@ export function getDescriptor(node: ts.Node, program: ts.Program): Descriptor |
|
|||
return getDescriptor(node.typeName, program);
|
||||
}
|
||||
|
||||
if (ts.isImportSpecifier(node)) {
|
||||
if (ts.isImportSpecifier(node) || ts.isExportSpecifier(node)) {
|
||||
const source = node.getSourceFile();
|
||||
const importedModuleName = getModuleSpecifier(node);
|
||||
|
||||
|
|
|
@ -15,6 +15,8 @@ import { parsedExternallyDefinedCollector } from './__fixture__/parsed_externall
|
|||
import { parsedImportedUsageInterface } from './__fixture__/parsed_imported_usage_interface';
|
||||
import { parsedImportedSchemaCollector } from './__fixture__/parsed_imported_schema';
|
||||
import { parsedSchemaDefinedWithSpreadsCollector } from './__fixture__/parsed_schema_defined_with_spreads_collector';
|
||||
import { parsedStatsCollector } from './__fixture__/parsed_stats_collector';
|
||||
import { parsedImportedInterfaceFromExport } from './__fixture__/parsed_imported_interface_from_export';
|
||||
|
||||
export function loadFixtureProgram(fixtureName: string) {
|
||||
const fixturePath = path.resolve(
|
||||
|
@ -89,6 +91,18 @@ describe('parseUsageCollection', () => {
|
|||
expect(result).toEqual(parsedImportedUsageInterface);
|
||||
});
|
||||
|
||||
it('parses stats collectors, discarding those without schemas', () => {
|
||||
const { program, sourceFile } = loadFixtureProgram('stats_collector.ts');
|
||||
const result = [...parseUsageCollection(sourceFile, program)];
|
||||
expect(result).toEqual(parsedStatsCollector);
|
||||
});
|
||||
|
||||
it('follows `export { Usage } from "./path"` expressions', () => {
|
||||
const { program, sourceFile } = loadFixtureProgram('imported_interface_from_export/index.ts');
|
||||
const result = [...parseUsageCollection(sourceFile, program)];
|
||||
expect(result).toEqual(parsedImportedInterfaceFromExport);
|
||||
});
|
||||
|
||||
it('skips files that do not define a collector', () => {
|
||||
const { program, sourceFile } = loadFixtureProgram('file_with_no_collector.ts');
|
||||
const result = [...parseUsageCollection(sourceFile, program)];
|
||||
|
|
|
@ -41,6 +41,24 @@ export function isMakeUsageCollectorFunction(
|
|||
return false;
|
||||
}
|
||||
|
||||
export function isMakeStatsCollectorFunctionWithSchema(
|
||||
node: ts.Node,
|
||||
sourceFile: ts.SourceFile
|
||||
): node is ts.CallExpression {
|
||||
if (ts.isCallExpression(node)) {
|
||||
const isMakeStatsCollector = /makeStatsCollector$/.test(node.expression.getText(sourceFile));
|
||||
if (isMakeStatsCollector) {
|
||||
const collectorConfig = getCollectionConfigNode(node, sourceFile);
|
||||
const schemaProperty = getProperty(collectorConfig, 'schema');
|
||||
if (schemaProperty) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export interface CollectorDetails {
|
||||
collectorName: string;
|
||||
fetch: { typeName: string; typeDescriptor: Descriptor };
|
||||
|
@ -140,6 +158,7 @@ function extractCollectorDetails(
|
|||
throw Error(`usageCollector.schema must be be an object.`);
|
||||
}
|
||||
|
||||
// TODO: Try to infer the output type from fetch instead of being explicit
|
||||
const collectorNodeType = collectorNode.typeArguments;
|
||||
if (!collectorNodeType || collectorNodeType?.length === 0) {
|
||||
throw Error(`makeUsageCollector requires a Usage type makeUsageCollector<Usage>({ ... }).`);
|
||||
|
@ -172,7 +191,19 @@ export function sourceHasUsageCollector(sourceFile: ts.SourceFile) {
|
|||
}
|
||||
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
export function sourceHasStatsCollector(sourceFile: ts.SourceFile) {
|
||||
if (sourceFile.isDeclarationFile === true || (sourceFile as any).identifierCount === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const identifiers = (sourceFile as any).identifiers;
|
||||
if (identifiers.get('makeStatsCollector')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export type ParsedUsageCollection = [string, CollectorDetails];
|
||||
|
@ -182,9 +213,12 @@ export function* parseUsageCollection(
|
|||
program: ts.Program
|
||||
): Generator<ParsedUsageCollection> {
|
||||
const relativePath = path.relative(process.cwd(), sourceFile.fileName);
|
||||
if (sourceHasUsageCollector(sourceFile)) {
|
||||
if (sourceHasUsageCollector(sourceFile) || sourceHasStatsCollector(sourceFile)) {
|
||||
for (const node of traverseNodes(sourceFile)) {
|
||||
if (isMakeUsageCollectorFunction(node, sourceFile)) {
|
||||
if (
|
||||
isMakeUsageCollectorFunction(node, sourceFile) ||
|
||||
isMakeStatsCollectorFunctionWithSchema(node, sourceFile)
|
||||
) {
|
||||
try {
|
||||
const collectorDetails = extractCollectorDetails(node, program, sourceFile);
|
||||
yield [relativePath, collectorDetails];
|
||||
|
|
|
@ -65,7 +65,9 @@ export function getIdentifierDeclarationFromSource(node: ts.Node, source: ts.Sou
|
|||
}
|
||||
|
||||
const identifierName = node.getText();
|
||||
const identifierDefinition: ts.Node = (source as any).locals.get(identifierName);
|
||||
const identifierDefinition: ts.Node =
|
||||
(source as any).locals.get(identifierName) ||
|
||||
(source as any).symbol.exports.get(identifierName);
|
||||
if (!identifierDefinition) {
|
||||
throw new Error(`Unable to find identifier in source ${identifierName}`);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CollectorSet } from '../../../plugins/usage_collection/server/collector';
|
||||
import { loggerMock } from '../../../core/server/logging/logger.mock';
|
||||
import type { Usage } from './types';
|
||||
|
||||
const { makeUsageCollector } = new CollectorSet({
|
||||
logger: loggerMock.create(),
|
||||
maximumWaitTimeForAllCollectorsInS: 0,
|
||||
});
|
||||
|
||||
export const myCollector = makeUsageCollector<Usage, false>({
|
||||
type: 'importing_from_export_collector',
|
||||
isReady: () => true,
|
||||
fetch() {
|
||||
return {
|
||||
some_field: 'abc',
|
||||
};
|
||||
},
|
||||
schema: {
|
||||
some_field: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
});
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type { Usage } from './usage_type';
|
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export interface Usage {
|
||||
some_field: string;
|
||||
}
|
49
src/fixtures/telemetry_collectors/stats_collector.ts
Normal file
49
src/fixtures/telemetry_collectors/stats_collector.ts
Normal file
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { CollectorSet } from '../../plugins/usage_collection/server/collector';
|
||||
import { loggerMock } from '../../core/server/logging/logger.mock';
|
||||
|
||||
const { makeStatsCollector } = new CollectorSet({
|
||||
logger: loggerMock.create(),
|
||||
maximumWaitTimeForAllCollectorsInS: 0,
|
||||
});
|
||||
|
||||
interface Usage {
|
||||
some_field: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stats Collectors are allowed with schema and without them.
|
||||
* We should collect them when the schema is defined.
|
||||
*/
|
||||
|
||||
export const myCollectorWithSchema = makeStatsCollector<Usage, false>({
|
||||
type: 'my_stats_collector_with_schema',
|
||||
isReady: () => true,
|
||||
fetch() {
|
||||
return {
|
||||
some_field: 'abc',
|
||||
};
|
||||
},
|
||||
schema: {
|
||||
some_field: {
|
||||
type: 'keyword',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export const myCollectorWithoutSchema = makeStatsCollector({
|
||||
type: 'my_stats_collector_without_schema',
|
||||
isReady: () => true,
|
||||
fetch() {
|
||||
return {
|
||||
some_field: 'abc',
|
||||
};
|
||||
},
|
||||
});
|
|
@ -151,5 +151,23 @@ describe(`assertTelemetryPayload`, () => {
|
|||
{ im_only_passing_through_data: [{ docs: { field: 1 } }] }
|
||||
)
|
||||
).not.toThrow();
|
||||
|
||||
// Even when properties exist
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
{
|
||||
root: {
|
||||
properties: {
|
||||
im_only_passing_through_data: {
|
||||
type: 'pass_through',
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: { properties: {} },
|
||||
},
|
||||
{ im_only_passing_through_data: [{ docs: { field: 1 } }] }
|
||||
)
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { schema, ObjectType, Type } from '@kbn/config-schema';
|
||||
import type { ObjectType, Type } from '@kbn/config-schema';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { get } from 'lodash';
|
||||
import { set } from '@elastic/safer-lodash-set';
|
||||
import type { AllowedSchemaTypes } from 'src/plugins/usage_collection/server';
|
||||
|
@ -38,6 +39,11 @@ function isOneOfCandidate(
|
|||
* @param value
|
||||
*/
|
||||
function valueSchemaToConfigSchema(value: TelemetrySchemaValue): Type<unknown> {
|
||||
// We need to check the pass_through type on top of everything
|
||||
if ((value as { type: 'pass_through' }).type === 'pass_through') {
|
||||
return schema.any();
|
||||
}
|
||||
|
||||
if ('properties' in value) {
|
||||
const { DYNAMIC_KEY, ...properties } = value.properties;
|
||||
const schemas: Array<Type<unknown>> = [objectSchemaToConfigSchema({ properties })];
|
||||
|
@ -48,8 +54,6 @@ function valueSchemaToConfigSchema(value: TelemetrySchemaValue): Type<unknown> {
|
|||
} else {
|
||||
const valueType = value.type; // Copied in here because of TS reasons, it's not available in the `default` case
|
||||
switch (value.type) {
|
||||
case 'pass_through':
|
||||
return schema.any();
|
||||
case 'boolean':
|
||||
return schema.boolean();
|
||||
case 'keyword':
|
||||
|
@ -77,9 +81,11 @@ function valueSchemaToConfigSchema(value: TelemetrySchemaValue): Type<unknown> {
|
|||
}
|
||||
|
||||
function objectSchemaToConfigSchema(objectSchema: TelemetrySchemaObject): ObjectType {
|
||||
const objectEntries = Object.entries(objectSchema.properties);
|
||||
|
||||
return schema.object(
|
||||
Object.fromEntries(
|
||||
Object.entries(objectSchema.properties).map(([key, value]) => {
|
||||
objectEntries.map(([key, value]) => {
|
||||
try {
|
||||
return [key, schema.maybe(valueSchemaToConfigSchema(value))];
|
||||
} catch (err) {
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
{
|
||||
"output": "plugins/telemetry_collection_xpack/schema/xpack_plugins.json",
|
||||
"root": "plugins/",
|
||||
"exclude": []
|
||||
}
|
||||
[
|
||||
{
|
||||
"output": "plugins/telemetry_collection_xpack/schema/xpack_plugins.json",
|
||||
"root": "plugins/",
|
||||
"exclude": [
|
||||
"plugins/monitoring/server/telemetry_collection/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"output": "plugins/telemetry_collection_xpack/schema/xpack_monitoring.json",
|
||||
"root": "plugins/monitoring/server/telemetry_collection/",
|
||||
"exclude": []
|
||||
}
|
||||
]
|
||||
|
|
|
@ -93,7 +93,7 @@ export function getSettingsCollector(
|
|||
false,
|
||||
KibanaSettingsCollectorExtraOptions
|
||||
>({
|
||||
type: KIBANA_SETTINGS_TYPE,
|
||||
type: 'kibana_settings',
|
||||
isReady: () => true,
|
||||
schema: {
|
||||
xpack: {
|
||||
|
|
|
@ -77,7 +77,7 @@ export interface BeatsStats {
|
|||
queue?: {
|
||||
name?: string;
|
||||
};
|
||||
heartbeat?: HeartbeatBase;
|
||||
heartbeat?: Heartbeat;
|
||||
functionbeat?: {
|
||||
functions?: {
|
||||
count?: number;
|
||||
|
@ -91,11 +91,11 @@ export interface BeatsStats {
|
|||
};
|
||||
}
|
||||
|
||||
type Heartbeat = HeartbeatBase & { [key: string]: HeartbeatBase | undefined };
|
||||
|
||||
interface HeartbeatBase {
|
||||
monitors: number;
|
||||
endpoints: number;
|
||||
// I have to add the '| number' bit because otherwise TS complains about 'monitors' and 'endpoints' not being of type HeartbeatBase
|
||||
[key: string]: HeartbeatBase | number | undefined;
|
||||
}
|
||||
|
||||
export interface BeatsBaseStats {
|
||||
|
@ -122,7 +122,7 @@ export interface BeatsBaseStats {
|
|||
count: number;
|
||||
architectures: BeatsArchitecture[];
|
||||
};
|
||||
heartbeat?: HeartbeatBase;
|
||||
heartbeat?: Heartbeat;
|
||||
functionbeat?: {
|
||||
functions: {
|
||||
count: number;
|
||||
|
@ -237,7 +237,7 @@ export function processResults(
|
|||
clusters[clusterUuid].heartbeat = {
|
||||
monitors: 0,
|
||||
endpoints: 0,
|
||||
};
|
||||
} as Heartbeat; // Needed because TS complains about the additional index signature
|
||||
}
|
||||
const clusterHb = clusters[clusterUuid].heartbeat!;
|
||||
|
||||
|
|
|
@ -5,16 +5,28 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ILegacyClusterClient } from 'kibana/server';
|
||||
import { UsageStatsPayload } from 'src/plugins/telemetry_collection_manager/server';
|
||||
import { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
|
||||
import type { ILegacyClusterClient } from 'kibana/server';
|
||||
import type { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
|
||||
import type { UsageStatsPayload } from '../../../../../src/plugins/telemetry_collection_manager/server';
|
||||
import type { LogstashBaseStats } from './get_logstash_stats';
|
||||
import type { BeatsBaseStats } from './get_beats_stats';
|
||||
import { getAllStats } from './get_all_stats';
|
||||
import { getClusterUuids } from './get_cluster_uuids';
|
||||
import { getLicenses } from './get_licenses';
|
||||
|
||||
// TODO: To be removed in https://github.com/elastic/kibana/pull/83546
|
||||
interface MonitoringCollectorOptions {
|
||||
ignoreForInternalUploader: boolean; // Allow the additional property required by bulk_uploader to be filtered out
|
||||
interface MonitoringStats extends UsageStatsPayload {
|
||||
stack_stats: {
|
||||
logstash?: LogstashBaseStats;
|
||||
beats?: BeatsBaseStats;
|
||||
// Intentionally not declaring "kibana" to avoid repetition with "local" telemetry,
|
||||
// and since it should only report it for old versions reporting "too much" monitoring data
|
||||
// [KIBANA_SYSTEM_ID]?: KibanaClusterStat;
|
||||
};
|
||||
}
|
||||
|
||||
// We need to nest it under a property because fetch must return an object (the schema mandates that)
|
||||
interface MonitoringTelemetryUsage {
|
||||
stats: MonitoringStats[];
|
||||
}
|
||||
|
||||
export function registerMonitoringTelemetryCollection(
|
||||
|
@ -23,14 +35,108 @@ export function registerMonitoringTelemetryCollection(
|
|||
maxBucketSize: number
|
||||
) {
|
||||
const monitoringStatsCollector = usageCollection.makeStatsCollector<
|
||||
UsageStatsPayload[],
|
||||
true,
|
||||
MonitoringCollectorOptions
|
||||
MonitoringTelemetryUsage,
|
||||
true
|
||||
>({
|
||||
type: 'monitoringTelemetry',
|
||||
isReady: () => true,
|
||||
ignoreForInternalUploader: true, // Used only by monitoring's bulk_uploader to filter out unwanted collectors
|
||||
extendFetchContext: { kibanaRequest: true },
|
||||
schema: {
|
||||
stats: {
|
||||
type: 'array',
|
||||
items: {
|
||||
timestamp: { type: 'date' },
|
||||
cluster_uuid: { type: 'keyword' },
|
||||
cluster_name: { type: 'keyword' },
|
||||
version: { type: 'keyword' },
|
||||
cluster_stats: {},
|
||||
stack_stats: {
|
||||
logstash: {
|
||||
versions: {
|
||||
type: 'array',
|
||||
items: {
|
||||
version: { type: 'keyword' },
|
||||
count: { type: 'long' },
|
||||
},
|
||||
},
|
||||
count: { type: 'long' },
|
||||
cluster_stats: {
|
||||
collection_types: {
|
||||
DYNAMIC_KEY: { type: 'long' },
|
||||
},
|
||||
queues: {
|
||||
DYNAMIC_KEY: { type: 'long' },
|
||||
},
|
||||
plugins: {
|
||||
type: 'array',
|
||||
items: {
|
||||
name: { type: 'keyword' },
|
||||
count: { type: 'long' },
|
||||
},
|
||||
},
|
||||
pipelines: {
|
||||
count: { type: 'long' },
|
||||
batch_size_max: { type: 'long' },
|
||||
batch_size_avg: { type: 'long' },
|
||||
batch_size_min: { type: 'long' },
|
||||
batch_size_total: { type: 'long' },
|
||||
workers_max: { type: 'long' },
|
||||
workers_avg: { type: 'long' },
|
||||
workers_min: { type: 'long' },
|
||||
workers_total: { type: 'long' },
|
||||
sources: {
|
||||
DYNAMIC_KEY: { type: 'boolean' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
beats: {
|
||||
versions: { DYNAMIC_KEY: { type: 'long' } },
|
||||
types: { DYNAMIC_KEY: { type: 'long' } },
|
||||
outputs: { DYNAMIC_KEY: { type: 'long' } },
|
||||
queue: { DYNAMIC_KEY: { type: 'long' } },
|
||||
count: { type: 'long' },
|
||||
eventsPublished: { type: 'long' },
|
||||
hosts: { type: 'long' },
|
||||
input: {
|
||||
count: { type: 'long' },
|
||||
names: { type: 'array', items: { type: 'keyword' } },
|
||||
},
|
||||
module: {
|
||||
count: { type: 'long' },
|
||||
names: { type: 'array', items: { type: 'keyword' } },
|
||||
},
|
||||
architecture: {
|
||||
count: { type: 'long' },
|
||||
architectures: {
|
||||
type: 'array',
|
||||
items: {
|
||||
name: { type: 'keyword' },
|
||||
architecture: { type: 'keyword' },
|
||||
count: { type: 'long' },
|
||||
},
|
||||
},
|
||||
},
|
||||
heartbeat: {
|
||||
monitors: { type: 'long' },
|
||||
endpoints: { type: 'long' },
|
||||
DYNAMIC_KEY: {
|
||||
monitors: { type: 'long' },
|
||||
endpoints: { type: 'long' },
|
||||
},
|
||||
},
|
||||
functionbeat: {
|
||||
functions: {
|
||||
count: { type: 'long' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
collection: { type: 'keyword' },
|
||||
collectionSource: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
fetch: async ({ kibanaRequest }) => {
|
||||
const timestamp = Date.now(); // Collect the telemetry from the monitoring indices for this moment.
|
||||
// NOTE: Usually, the monitoring indices index stats for each product every 10s (by default).
|
||||
|
@ -45,14 +151,16 @@ export function registerMonitoringTelemetryCollection(
|
|||
getLicenses(clusterDetails, callCluster, maxBucketSize),
|
||||
getAllStats(clusterDetails, callCluster, timestamp, maxBucketSize),
|
||||
]);
|
||||
return stats.map((stat) => {
|
||||
const license = licenses[stat.cluster_uuid];
|
||||
return {
|
||||
...(license ? { license } : {}),
|
||||
...stat,
|
||||
collectionSource: 'monitoring',
|
||||
};
|
||||
});
|
||||
return {
|
||||
stats: stats.map((stat) => {
|
||||
const license = licenses[stat.cluster_uuid];
|
||||
return {
|
||||
...(license ? { license } : {}),
|
||||
...stat,
|
||||
collectionSource: 'monitoring',
|
||||
};
|
||||
}),
|
||||
};
|
||||
},
|
||||
});
|
||||
usageCollection.registerCollector(monitoringStatsCollector);
|
||||
|
|
|
@ -9,6 +9,7 @@ There are currently 2 files:
|
|||
- `xpack_plugins.json`: The X-Pack related schema for the content that will be nested in `stack_stats.kibana.plugins`.
|
||||
It is automatically generated by `@kbn/telemetry-tools` based on the `schema` property provided by all the registered Usage Collectors via the `usageCollection.makeUsageCollector` API.
|
||||
More details in the [Schema field](../../usage_collection/README.md#schema-field) chapter in the UsageCollection's docs.
|
||||
- `xpack_monitoring.json`: It declares the payload sent by the monitoring-sourced telemetry. The actual schema for the payload is declared under `properties.monitoringTelemetry.properties.stats.items`, but due to the general behaviour in the `@kbn/telemetry-tools`, it gets nested down in that path.
|
||||
|
||||
NOTE: Despite its similarities to ES mappings, the intention of these files is not to define any index mappings. They should be considered as a tool to understand the format of the payload that will be sent when reporting telemetry to the Remote Service.
|
||||
|
||||
|
|
|
@ -0,0 +1,250 @@
|
|||
{
|
||||
"properties": {
|
||||
"monitoringTelemetry": {
|
||||
"properties": {
|
||||
"stats": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"cluster_uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cluster_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cluster_stats": {
|
||||
"properties": {}
|
||||
},
|
||||
"stack_stats": {
|
||||
"properties": {
|
||||
"logstash": {
|
||||
"properties": {
|
||||
"versions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"count": {
|
||||
"type": "long"
|
||||
},
|
||||
"cluster_stats": {
|
||||
"properties": {
|
||||
"collection_types": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queues": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"plugins": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pipelines": {
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "long"
|
||||
},
|
||||
"batch_size_max": {
|
||||
"type": "long"
|
||||
},
|
||||
"batch_size_avg": {
|
||||
"type": "long"
|
||||
},
|
||||
"batch_size_min": {
|
||||
"type": "long"
|
||||
},
|
||||
"batch_size_total": {
|
||||
"type": "long"
|
||||
},
|
||||
"workers_max": {
|
||||
"type": "long"
|
||||
},
|
||||
"workers_avg": {
|
||||
"type": "long"
|
||||
},
|
||||
"workers_min": {
|
||||
"type": "long"
|
||||
},
|
||||
"workers_total": {
|
||||
"type": "long"
|
||||
},
|
||||
"sources": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"beats": {
|
||||
"properties": {
|
||||
"versions": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"types": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"outputs": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queue": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"count": {
|
||||
"type": "long"
|
||||
},
|
||||
"eventsPublished": {
|
||||
"type": "long"
|
||||
},
|
||||
"hosts": {
|
||||
"type": "long"
|
||||
},
|
||||
"input": {
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "long"
|
||||
},
|
||||
"names": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "long"
|
||||
},
|
||||
"names": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"architecture": {
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "long"
|
||||
},
|
||||
"architectures": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"architecture": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"heartbeat": {
|
||||
"properties": {
|
||||
"monitors": {
|
||||
"type": "long"
|
||||
},
|
||||
"endpoints": {
|
||||
"type": "long"
|
||||
},
|
||||
"DYNAMIC_KEY": {
|
||||
"properties": {
|
||||
"monitors": {
|
||||
"type": "long"
|
||||
},
|
||||
"endpoints": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"functionbeat": {
|
||||
"properties": {
|
||||
"functions": {
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"collection": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"collectionSource": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2336,6 +2336,17 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"kibana_settings": {
|
||||
"properties": {
|
||||
"xpack": {
|
||||
"properties": {
|
||||
"default_admin_email": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"monitoring": {
|
||||
"properties": {
|
||||
"hasMonitoringData": {
|
||||
|
|
|
@ -5,6 +5,9 @@
|
|||
"uid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"hkey": {
|
||||
"type": "long"
|
||||
},
|
||||
"issue_date": {
|
||||
"type": "date"
|
||||
},
|
||||
|
|
|
@ -11,15 +11,17 @@ import { ElasticsearchClient } from 'src/core/server';
|
|||
export interface ESLicense {
|
||||
status: string;
|
||||
uid: string;
|
||||
hkey: string;
|
||||
type: string;
|
||||
issue_date: string;
|
||||
issue_date_in_millis: number;
|
||||
expiry_date: string;
|
||||
expirty_date_in_millis: number;
|
||||
expiry_date_in_millis: number;
|
||||
max_nodes: number;
|
||||
issued_to: string;
|
||||
issuer: string;
|
||||
start_date_in_millis: number;
|
||||
max_resource_units: number;
|
||||
}
|
||||
|
||||
let cachedLicense: ESLicense | undefined;
|
||||
|
|
|
@ -139,9 +139,9 @@ describe('Telemetry Collection: Get Aggregated Stats', () => {
|
|||
const esClient = mockEsClient();
|
||||
const usageCollection = mockUsageCollection({
|
||||
...kibana,
|
||||
monitoringTelemetry: [
|
||||
{ collectionSource: 'monitoring', timestamp: new Date().toISOString() },
|
||||
],
|
||||
monitoringTelemetry: {
|
||||
stats: [{ collectionSource: 'monitoring', timestamp: new Date().toISOString() }],
|
||||
},
|
||||
});
|
||||
const context = getContext();
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ export const getStatsWithXpack: StatsGetter<TelemetryAggregatedStats> = async fu
|
|||
})
|
||||
.reduce((acc, stats) => {
|
||||
// Concatenate the telemetry reported via monitoring as additional payloads instead of reporting it inside of stack_stats.kibana.plugins.monitoringTelemetry
|
||||
const monitoringTelemetry = stats.stack_stats.kibana?.plugins?.monitoringTelemetry;
|
||||
const monitoringTelemetry = stats.stack_stats.kibana?.plugins?.monitoringTelemetry?.stats;
|
||||
if (monitoringTelemetry) {
|
||||
delete stats.stack_stats.kibana!.plugins.monitoringTelemetry;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export default function ({ loadTestFile }) {
|
||||
import type { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
export default function ({ loadTestFile }: FtrProviderContext) {
|
||||
describe('Telemetry', () => {
|
||||
loadTestFile(require.resolve('./telemetry'));
|
||||
loadTestFile(require.resolve('./telemetry_local'));
|
|
@ -7,8 +7,19 @@
|
|||
|
||||
import expect from '@kbn/expect';
|
||||
import moment from 'moment';
|
||||
import multiClusterFixture from './fixtures/multicluster';
|
||||
import basicClusterFixture from './fixtures/basiccluster';
|
||||
import type { SuperTest } from 'supertest';
|
||||
import type supertestAsPromised from 'supertest-as-promised';
|
||||
import deepmerge from 'deepmerge';
|
||||
import type { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
import multiClusterFixture from './fixtures/multicluster.json';
|
||||
import basicClusterFixture from './fixtures/basiccluster.json';
|
||||
import ossRootTelemetrySchema from '../../../../../src/plugins/telemetry/schema/oss_root.json';
|
||||
import xpackRootTelemetrySchema from '../../../../plugins/telemetry_collection_xpack/schema/xpack_root.json';
|
||||
import monitoringRootTelemetrySchema from '../../../../plugins/telemetry_collection_xpack/schema/xpack_monitoring.json';
|
||||
import ossPluginsTelemetrySchema from '../../../../../src/plugins/telemetry/schema/oss_plugins.json';
|
||||
import xpackPluginsTelemetrySchema from '../../../../plugins/telemetry_collection_xpack/schema/xpack_plugins.json';
|
||||
import { assertTelemetryPayload } from '../../../../../test/api_integration/apis/telemetry/utils';
|
||||
|
||||
/**
|
||||
* Update the .monitoring-* documents loaded via the archiver to the recent `timestamp`
|
||||
|
@ -17,7 +28,12 @@ import basicClusterFixture from './fixtures/basiccluster';
|
|||
* @param toTimestamp The upper timestamp limit to query the documents from
|
||||
* @param timestamp The new timestamp to be set
|
||||
*/
|
||||
function updateMonitoringDates(esSupertest, fromTimestamp, toTimestamp, timestamp) {
|
||||
function updateMonitoringDates(
|
||||
esSupertest: SuperTest<supertestAsPromised.Test>,
|
||||
fromTimestamp: string,
|
||||
toTimestamp: string,
|
||||
timestamp: string
|
||||
) {
|
||||
return Promise.all([
|
||||
esSupertest
|
||||
.post('/.monitoring-es-*/_update_by_query?refresh=true')
|
||||
|
@ -58,7 +74,7 @@ function updateMonitoringDates(esSupertest, fromTimestamp, toTimestamp, timestam
|
|||
]);
|
||||
}
|
||||
|
||||
export default function ({ getService }) {
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const supertest = getService('supertest');
|
||||
const esArchiver = getService('esArchiver');
|
||||
const esSupertest = getService('esSupertest');
|
||||
|
@ -66,23 +82,52 @@ export default function ({ getService }) {
|
|||
describe('/api/telemetry/v2/clusters/_stats', () => {
|
||||
const timestamp = new Date().toISOString();
|
||||
describe('monitoring/multicluster', () => {
|
||||
let localXPack: Record<string, unknown>;
|
||||
let monitoring: Array<Record<string, unknown>>;
|
||||
|
||||
const archive = 'monitoring/multicluster';
|
||||
const fromTimestamp = '2017-08-15T21:00:00.000Z';
|
||||
const toTimestamp = '2017-08-16T00:00:00.000Z';
|
||||
|
||||
before(async () => {
|
||||
await esArchiver.load(archive);
|
||||
await updateMonitoringDates(esSupertest, fromTimestamp, toTimestamp, timestamp);
|
||||
});
|
||||
after(() => esArchiver.unload(archive));
|
||||
it('should load multiple trial-license clusters', async () => {
|
||||
|
||||
const { body } = await supertest
|
||||
.post('/api/telemetry/v2/clusters/_stats')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ unencrypted: true })
|
||||
.expect(200);
|
||||
|
||||
expect(body).length(4);
|
||||
const [localXPack, ...monitoring] = body;
|
||||
expect(body.length).to.be.greaterThan(1);
|
||||
localXPack = body.shift();
|
||||
monitoring = body;
|
||||
});
|
||||
after(() => esArchiver.unload(archive));
|
||||
|
||||
it('should pass the schema validations', () => {
|
||||
const root = deepmerge(ossRootTelemetrySchema, xpackRootTelemetrySchema);
|
||||
|
||||
// Merging root to monitoring because `kibana` may be passed in some cases for old collection methods reporting to a newer monitoring cluster
|
||||
const monitoringRoot = deepmerge(
|
||||
root,
|
||||
// It's nested because of the way it's collected and declared
|
||||
monitoringRootTelemetrySchema.properties.monitoringTelemetry.properties.stats.items
|
||||
);
|
||||
const plugins = deepmerge(ossPluginsTelemetrySchema, xpackPluginsTelemetrySchema);
|
||||
try {
|
||||
assertTelemetryPayload({ root, plugins }, localXPack);
|
||||
monitoring.forEach((stats) => {
|
||||
assertTelemetryPayload({ root: monitoringRoot, plugins }, stats);
|
||||
});
|
||||
} catch (err) {
|
||||
err.message = `The telemetry schemas in 'x-pack/plugins/telemetry_collection_xpack/schema/' are out-of-date, please update it as required: ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
it('should load multiple trial-license clusters', async () => {
|
||||
expect(monitoring).length(3);
|
||||
expect(localXPack.collectionSource).to.eql('local_xpack');
|
||||
expect(monitoring).to.eql(multiClusterFixture.map((item) => ({ ...item, timestamp })));
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue