mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Telemetry] Full schema
definition (#90273)
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
4ea10d9a90
commit
f44916b6aa
23 changed files with 989 additions and 189 deletions
|
@ -2,15 +2,6 @@
|
|||
{
|
||||
"output": "src/plugins/telemetry/schema/oss_plugins.json",
|
||||
"root": "src/plugins/",
|
||||
"exclude": [
|
||||
"src/plugins/kibana_react/",
|
||||
"src/plugins/testbed/",
|
||||
"src/plugins/kibana_utils/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"output": "src/plugins/telemetry/schema/legacy_plugins.json",
|
||||
"root": "src/legacy/server/",
|
||||
"exclude": []
|
||||
}
|
||||
]
|
||||
|
|
17
src/plugins/telemetry/schema/README.md
Normal file
17
src/plugins/telemetry/schema/README.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Telemetry Schemas
|
||||
|
||||
This list of `.json` files describes the format of the payloads sent to the Remote Telemetry Service. All the files should follow the schema convention as defined in the `usage_collection` plugin and `@kbn/telemetry-tools`, with the addition of the type `pass_through`. This additional `type` indicates Kibana sends the payload as-is from the output of an external ES query.
|
||||
|
||||
There are currently 2 files:
|
||||
|
||||
- `oss_root.json`: Defines the schema for the payload from the root keys.
|
||||
Manually maintained for now because the frequency it changes should be pretty low.
|
||||
- `oss_plugins.json`: The schema for the content that will be nested in `stack_stats.kibana.plugins`.
|
||||
It is automatically generated by `@kbn/telemetry-tools` based on the `schema` property provided by all the registered Usage Collectors via the `usageCollection.makeUsageCollector` API.
|
||||
More details in the [Schema field](../../usage_collection/README.md#schema-field) chapter in the UsageCollection's docs.
|
||||
|
||||
NOTE: Despite its similarities to ES mappings, the intention of these files is not to define any index mappings. They should be considered as a tool to understand the format of the payload that will be sent when reporting telemetry to the Remote Service.
|
||||
|
||||
## Testing
|
||||
|
||||
Functional tests are defined at `test/api_integration/apis/telemetry/telemetry_local.ts`. They merge both files, and validates the actual output of the telemetry endpoint against the final schema.
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"properties": {}
|
||||
}
|
199
src/plugins/telemetry/schema/oss_root.json
Normal file
199
src/plugins/telemetry/schema/oss_root.json
Normal file
|
@ -0,0 +1,199 @@
|
|||
{
|
||||
"properties": {
|
||||
"timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"cluster_uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"cluster_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"collection": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"collectionSource": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"stack_stats": {
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"data_stream": {
|
||||
"properties": {
|
||||
"dataset": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"package": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"shipper": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"pattern_name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"index_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"ecs_index_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"doc_count": {
|
||||
"type": "long"
|
||||
},
|
||||
"size_in_bytes": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"kibana": {
|
||||
"properties": {
|
||||
"timelion_sheet": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"visualization": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"search": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"index_pattern": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dashboard": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"graph_workspace": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"count": {
|
||||
"type": "short"
|
||||
},
|
||||
"indices": {
|
||||
"type": "short"
|
||||
},
|
||||
"os": {
|
||||
"properties": {
|
||||
"platforms": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"platform": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "short"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"platformReleases": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"platformRelease": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "short"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"distros": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"distro": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "short"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"distroReleases": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"distroRelease": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "short"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"versions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"count": {
|
||||
"type": "short"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"plugins": {
|
||||
"properties": {
|
||||
"THIS_WILL_BE_REPLACED_BY_THE_PLUGINS_JSON": {
|
||||
"type": "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cluster_stats": {
|
||||
"type": "pass_through"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -7,27 +7,12 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import _ from 'lodash';
|
||||
import { basicUiCounters } from './__fixtures__/ui_counters';
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import { SavedObject } from '../../../../src/core/server';
|
||||
/*
|
||||
* Create a single-level array with strings for all the paths to values in the
|
||||
* source object, up to 3 deep. Going deeper than 3 causes a bit too much churn
|
||||
* in the tests.
|
||||
*/
|
||||
function flatKeys(source: Record<string, unknown>) {
|
||||
const recursivelyFlatKeys = (obj: unknown, path: string[] = [], depth = 0): string[] => {
|
||||
return depth < 3 && _.isObject(obj)
|
||||
? Object.entries(obj).reduce(
|
||||
(acc, [k, v]) => [...acc, ...recursivelyFlatKeys(v, [...path, k], depth + 1)],
|
||||
[] as string[]
|
||||
)
|
||||
: [path.join('.')];
|
||||
};
|
||||
|
||||
return _.uniq(_.flattenDeep(recursivelyFlatKeys(source))).sort((a, b) => a.localeCompare(b));
|
||||
}
|
||||
import ossRootTelemetrySchema from '../../../../src/plugins/telemetry/schema/oss_root.json';
|
||||
import ossPluginsTelemetrySchema from '../../../../src/plugins/telemetry/schema/oss_plugins.json';
|
||||
import { assertTelemetryPayload, flatKeys } from './utils';
|
||||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const supertest = getService('supertest');
|
||||
|
@ -46,46 +31,110 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
await es.indices.delete({ index: 'filebeat-telemetry_tests_logs' });
|
||||
});
|
||||
|
||||
it('should pull local stats and validate data types', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/telemetry/v2/clusters/_stats')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ unencrypted: true })
|
||||
.expect(200);
|
||||
describe('validate data types', () => {
|
||||
let stats: Record<string, any>;
|
||||
|
||||
expect(body.length).to.be(1);
|
||||
const stats = body[0];
|
||||
expect(stats.collection).to.be('local');
|
||||
expect(stats.collectionSource).to.be('local');
|
||||
expect(stats.license).to.be(undefined); // OSS cannot get the license
|
||||
expect(stats.stack_stats.kibana.count).to.be.a('number');
|
||||
expect(stats.stack_stats.kibana.indices).to.be.a('number');
|
||||
expect(stats.stack_stats.kibana.os.platforms[0].platform).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.os.platforms[0].count).to.be(1);
|
||||
expect(stats.stack_stats.kibana.os.platformReleases[0].platformRelease).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.os.platformReleases[0].count).to.be(1);
|
||||
expect(stats.stack_stats.kibana.plugins.telemetry.opt_in_status).to.be(false);
|
||||
expect(stats.stack_stats.kibana.plugins.telemetry.usage_fetcher).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.plugins.stack_management).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.ui_metric).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.ui_counters).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.application_usage).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.kql.defaultQueryLanguage).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.plugins.localization).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.csp.strict).to.be(true);
|
||||
expect(stats.stack_stats.kibana.plugins.csp.warnLegacyBrowsers).to.be(true);
|
||||
expect(stats.stack_stats.kibana.plugins.csp.rulesChangedFromDefault).to.be(false);
|
||||
before('pull local stats', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/telemetry/v2/clusters/_stats')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ unencrypted: true })
|
||||
.expect(200);
|
||||
|
||||
// Testing stack_stats.data
|
||||
expect(stats.stack_stats.data).to.be.an('object');
|
||||
expect(stats.stack_stats.data).to.be.an('array');
|
||||
expect(stats.stack_stats.data[0]).to.be.an('object');
|
||||
expect(stats.stack_stats.data[0].pattern_name).to.be('filebeat');
|
||||
expect(stats.stack_stats.data[0].shipper).to.be('filebeat');
|
||||
expect(stats.stack_stats.data[0].index_count).to.be(1);
|
||||
expect(stats.stack_stats.data[0].doc_count).to.be(0);
|
||||
expect(stats.stack_stats.data[0].ecs_index_count).to.be(0);
|
||||
expect(stats.stack_stats.data[0].size_in_bytes).to.be.a('number');
|
||||
expect(body.length).to.be(1);
|
||||
stats = body[0];
|
||||
});
|
||||
|
||||
it('should pass the schema validation', () => {
|
||||
try {
|
||||
assertTelemetryPayload(
|
||||
{ root: ossRootTelemetrySchema, plugins: ossPluginsTelemetrySchema },
|
||||
stats
|
||||
);
|
||||
} catch (err) {
|
||||
err.message = `The telemetry schemas in 'src/plugins/telemetry/schema/' are out-of-date, please update it as required: ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass ad-hoc enforced validations', () => {
|
||||
expect(stats.collection).to.be('local');
|
||||
expect(stats.collectionSource).to.be('local');
|
||||
expect(stats.license).to.be(undefined); // OSS cannot get the license
|
||||
expect(stats.stack_stats.kibana.count).to.be.a('number');
|
||||
expect(stats.stack_stats.kibana.indices).to.be.a('number');
|
||||
expect(stats.stack_stats.kibana.os.platforms[0].platform).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.os.platforms[0].count).to.be(1);
|
||||
expect(stats.stack_stats.kibana.os.platformReleases[0].platformRelease).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.os.platformReleases[0].count).to.be(1);
|
||||
expect(stats.stack_stats.kibana.plugins.telemetry.opt_in_status).to.be(false);
|
||||
expect(stats.stack_stats.kibana.plugins.telemetry.usage_fetcher).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.plugins.stack_management).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.ui_metric).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.ui_counters).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.application_usage).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.kql.defaultQueryLanguage).to.be.a('string');
|
||||
expect(stats.stack_stats.kibana.plugins.localization).to.be.an('object');
|
||||
expect(stats.stack_stats.kibana.plugins.csp.strict).to.be(true);
|
||||
expect(stats.stack_stats.kibana.plugins.csp.warnLegacyBrowsers).to.be(true);
|
||||
expect(stats.stack_stats.kibana.plugins.csp.rulesChangedFromDefault).to.be(false);
|
||||
|
||||
// Testing stack_stats.data
|
||||
expect(stats.stack_stats.data).to.be.an('object');
|
||||
expect(stats.stack_stats.data).to.be.an('array');
|
||||
expect(stats.stack_stats.data[0]).to.be.an('object');
|
||||
expect(stats.stack_stats.data[0].pattern_name).to.be('filebeat');
|
||||
expect(stats.stack_stats.data[0].shipper).to.be('filebeat');
|
||||
expect(stats.stack_stats.data[0].index_count).to.be(1);
|
||||
expect(stats.stack_stats.data[0].doc_count).to.be(0);
|
||||
expect(stats.stack_stats.data[0].ecs_index_count).to.be(0);
|
||||
expect(stats.stack_stats.data[0].size_in_bytes).to.be.a('number');
|
||||
});
|
||||
|
||||
it('should validate mandatory fields exist', () => {
|
||||
const actual = flatKeys(stats);
|
||||
expect(actual).to.be.an('array');
|
||||
const expected = [
|
||||
'cluster_name',
|
||||
'cluster_stats.cluster_uuid',
|
||||
'cluster_stats.indices.analysis',
|
||||
'cluster_stats.indices.completion',
|
||||
'cluster_stats.indices.count',
|
||||
'cluster_stats.indices.docs',
|
||||
'cluster_stats.indices.fielddata',
|
||||
'cluster_stats.indices.mappings',
|
||||
'cluster_stats.indices.query_cache',
|
||||
'cluster_stats.indices.segments',
|
||||
'cluster_stats.indices.shards',
|
||||
'cluster_stats.indices.store',
|
||||
'cluster_stats.nodes.count',
|
||||
'cluster_stats.nodes.discovery_types',
|
||||
'cluster_stats.nodes.fs',
|
||||
'cluster_stats.nodes.ingest',
|
||||
'cluster_stats.nodes.jvm',
|
||||
'cluster_stats.nodes.network_types',
|
||||
'cluster_stats.nodes.os',
|
||||
'cluster_stats.nodes.packaging_types',
|
||||
'cluster_stats.nodes.plugins',
|
||||
'cluster_stats.nodes.process',
|
||||
'cluster_stats.nodes.versions',
|
||||
'cluster_stats.nodes.usage',
|
||||
'cluster_stats.status',
|
||||
'cluster_stats.timestamp',
|
||||
'cluster_uuid',
|
||||
'collection',
|
||||
'collectionSource',
|
||||
'stack_stats.kibana.count',
|
||||
'stack_stats.kibana.indices',
|
||||
'stack_stats.kibana.os',
|
||||
'stack_stats.kibana.plugins',
|
||||
'stack_stats.kibana.versions',
|
||||
'timestamp',
|
||||
'version',
|
||||
];
|
||||
|
||||
expect(expected.every((m) => actual.includes(m))).to.be.ok();
|
||||
});
|
||||
});
|
||||
|
||||
describe('UI Counters telemetry', () => {
|
||||
|
@ -104,59 +153,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
});
|
||||
});
|
||||
|
||||
it('should pull local stats and validate fields', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/telemetry/v2/clusters/_stats')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ unencrypted: true })
|
||||
.expect(200);
|
||||
|
||||
const stats = body[0];
|
||||
|
||||
const actual = flatKeys(stats);
|
||||
expect(actual).to.be.an('array');
|
||||
const expected = [
|
||||
'cluster_name',
|
||||
'cluster_stats.cluster_uuid',
|
||||
'cluster_stats.indices.analysis',
|
||||
'cluster_stats.indices.completion',
|
||||
'cluster_stats.indices.count',
|
||||
'cluster_stats.indices.docs',
|
||||
'cluster_stats.indices.fielddata',
|
||||
'cluster_stats.indices.mappings',
|
||||
'cluster_stats.indices.query_cache',
|
||||
'cluster_stats.indices.segments',
|
||||
'cluster_stats.indices.shards',
|
||||
'cluster_stats.indices.store',
|
||||
'cluster_stats.nodes.count',
|
||||
'cluster_stats.nodes.discovery_types',
|
||||
'cluster_stats.nodes.fs',
|
||||
'cluster_stats.nodes.ingest',
|
||||
'cluster_stats.nodes.jvm',
|
||||
'cluster_stats.nodes.network_types',
|
||||
'cluster_stats.nodes.os',
|
||||
'cluster_stats.nodes.packaging_types',
|
||||
'cluster_stats.nodes.plugins',
|
||||
'cluster_stats.nodes.process',
|
||||
'cluster_stats.nodes.versions',
|
||||
'cluster_stats.nodes.usage',
|
||||
'cluster_stats.status',
|
||||
'cluster_stats.timestamp',
|
||||
'cluster_uuid',
|
||||
'collection',
|
||||
'collectionSource',
|
||||
'stack_stats.kibana.count',
|
||||
'stack_stats.kibana.indices',
|
||||
'stack_stats.kibana.os',
|
||||
'stack_stats.kibana.plugins',
|
||||
'stack_stats.kibana.versions',
|
||||
'timestamp',
|
||||
'version',
|
||||
];
|
||||
|
||||
expect(expected.every((m) => actual.includes(m))).to.be.ok();
|
||||
});
|
||||
|
||||
describe('application usage limits', () => {
|
||||
function createSavedObject(viewId?: string) {
|
||||
return supertest
|
||||
|
|
119
test/api_integration/apis/telemetry/utils/flat_keys.test.js
Normal file
119
test/api_integration/apis/telemetry/utils/flat_keys.test.js
Normal file
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/*
|
||||
* It's a JS file because we cannot use Jest types in here because of a clash in the `expect` types
|
||||
*/
|
||||
|
||||
import { flatKeys } from './flat_keys';
|
||||
|
||||
describe(`flatKeys`, () => {
|
||||
test('no keys to be listed', () => {
|
||||
expect(flatKeys({})).toStrictEqual([]);
|
||||
});
|
||||
test('one-level list', () => {
|
||||
expect(
|
||||
flatKeys({
|
||||
prop1: 1,
|
||||
prop2: 'a',
|
||||
prop3: true,
|
||||
prop4: [],
|
||||
})
|
||||
).toStrictEqual(['prop1', 'prop2', 'prop3', 'prop4']);
|
||||
});
|
||||
test('two-level list', () => {
|
||||
expect(
|
||||
flatKeys({
|
||||
prop1: 1,
|
||||
prop2: 'a',
|
||||
prop3: true,
|
||||
prop4: [],
|
||||
prop5: [1],
|
||||
prop6: {
|
||||
prop6_1: 1,
|
||||
},
|
||||
})
|
||||
).toStrictEqual(['prop1', 'prop2', 'prop3', 'prop4', 'prop5.0', 'prop6.prop6_1']);
|
||||
});
|
||||
test('three-level list', () => {
|
||||
expect(
|
||||
flatKeys({
|
||||
prop1: 1,
|
||||
prop2: 'a',
|
||||
prop3: true,
|
||||
prop4: [],
|
||||
prop5: [1],
|
||||
prop6: {
|
||||
prop6_1: 1,
|
||||
prop6_2: {
|
||||
prop6_2_1: 1,
|
||||
},
|
||||
},
|
||||
prop7: [{ a: 1, b: [] }],
|
||||
prop8: [1, true, { a: 1 }],
|
||||
})
|
||||
).toStrictEqual([
|
||||
'prop1',
|
||||
'prop2',
|
||||
'prop3',
|
||||
'prop4',
|
||||
'prop5.0',
|
||||
'prop6.prop6_1',
|
||||
'prop6.prop6_2.prop6_2_1',
|
||||
'prop7.0.a',
|
||||
'prop7.0.b',
|
||||
'prop8.0',
|
||||
'prop8.1',
|
||||
'prop8.2.a',
|
||||
]);
|
||||
});
|
||||
test('four-level+ list: it stays at 3 levels only', () => {
|
||||
expect(
|
||||
flatKeys({
|
||||
prop1: 1,
|
||||
prop2: 'a',
|
||||
prop3: true,
|
||||
prop4: [],
|
||||
prop5: [1],
|
||||
prop6: {
|
||||
prop6_1: 1,
|
||||
prop6_2: {
|
||||
prop6_2_1: 1,
|
||||
prop6_2_2: {
|
||||
prop6_2_2_1: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
prop7: [{ a: 1, b: [], c: [1], d: [{ a: 1 }], e: [1, { a: 1 }] }],
|
||||
prop8: [1, true, { a: 1 }],
|
||||
})
|
||||
).toStrictEqual([
|
||||
'prop1',
|
||||
'prop2',
|
||||
'prop3',
|
||||
'prop4',
|
||||
'prop5.0',
|
||||
'prop6.prop6_1',
|
||||
'prop6.prop6_2.prop6_2_1',
|
||||
'prop6.prop6_2.prop6_2_2',
|
||||
// 'prop6.prop6_2.prop6_2_2.prop6_2_2_1', Not reported because of the depth-limit
|
||||
'prop7.0.a',
|
||||
'prop7.0.b',
|
||||
'prop7.0.c',
|
||||
// 'prop7.0.c.0', Not reported because of the depth-limit
|
||||
'prop7.0.d',
|
||||
// 'prop7.0.d.0.a', Not reported because of the depth-limit
|
||||
'prop7.0.e',
|
||||
// 'prop7.0.e.0', Not reported because of the depth-limit
|
||||
// 'prop7.0.e.1.a', Not reported because of the depth-limit
|
||||
'prop8.0',
|
||||
'prop8.1',
|
||||
'prop8.2.a',
|
||||
]);
|
||||
});
|
||||
});
|
28
test/api_integration/apis/telemetry/utils/flat_keys.ts
Normal file
28
test/api_integration/apis/telemetry/utils/flat_keys.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
|
||||
/**
|
||||
* Create a single-level array with strings for all the paths to values in the
|
||||
* source object, up to 3 deep. Going deeper than 3 causes a bit too much churn
|
||||
* in the tests.
|
||||
* @param source The object to extract the keys from.
|
||||
*/
|
||||
export function flatKeys(source: Record<string, unknown>) {
|
||||
const recursivelyFlatKeys = (obj: unknown, path: string[] = [], depth = 0): string[] => {
|
||||
return depth < 3 && _.isObject(obj) && _.size(obj) > 0
|
||||
? Object.entries(obj).reduce(
|
||||
(acc, [k, v]) => [...acc, ...recursivelyFlatKeys(v, [...path, k], depth + 1)],
|
||||
[] as string[]
|
||||
)
|
||||
: [path.join('.')].filter(Boolean);
|
||||
};
|
||||
|
||||
return _.uniq(_.flattenDeep(recursivelyFlatKeys(source))).sort((a, b) => a.localeCompare(b));
|
||||
}
|
10
test/api_integration/apis/telemetry/utils/index.ts
Normal file
10
test/api_integration/apis/telemetry/utils/index.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { flatKeys } from './flat_keys';
|
||||
export { assertTelemetryPayload } from './schema_to_config_schema';
|
|
@ -0,0 +1,155 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/*
|
||||
* It's a JS file because we cannot use Jest types in here because of a clash in the `expect` types
|
||||
*/
|
||||
|
||||
import { assertTelemetryPayload } from './schema_to_config_schema';
|
||||
|
||||
describe(`assertTelemetryPayload`, () => {
|
||||
test('empty schemas => errors with malformed schema', () => {
|
||||
// @ts-expect-error: root and plugins don't match expected types
|
||||
expect(() => assertTelemetryPayload({ root: {}, plugins: {} }, {})).toThrow(/Malformed schema/);
|
||||
});
|
||||
test('minimal schemas and empty stats => pass', () => {
|
||||
expect(() =>
|
||||
// @ts-expect-error: root doesn't match expected types
|
||||
assertTelemetryPayload({ root: {}, plugins: { properties: {} } }, {})
|
||||
).not.toThrow();
|
||||
});
|
||||
test('stats has fields not defined in the schema => fail', () => {
|
||||
expect(() =>
|
||||
// @ts-expect-error: root doesn't match expected types
|
||||
assertTelemetryPayload({ root: {}, plugins: { properties: {} } }, { version: 'some-version' })
|
||||
).toThrow('[version]: definition for this key is missing. Received `"some-version"`');
|
||||
});
|
||||
test('stats has nested-fields not defined in the schema => fail', () => {
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
// @ts-expect-error: root doesn't match expected types
|
||||
{ root: {}, plugins: { properties: {} } },
|
||||
{ an_array: [{ docs: { missing: 1 } }] }
|
||||
)
|
||||
).toThrow(
|
||||
'[an_array]: definition for this key is missing. Received `[{"docs":{"missing":1}}]`'
|
||||
);
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
{
|
||||
root: {
|
||||
properties: {
|
||||
an_array: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: { properties: {} },
|
||||
},
|
||||
{ an_array: [{ docs: { missing: 1 } }] }
|
||||
)
|
||||
).toThrow('[an_array.0.docs]: definition for this key is missing. Received `{"missing":1}`');
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
{
|
||||
root: {
|
||||
properties: {
|
||||
an_array: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
docs: {
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: { properties: {} },
|
||||
},
|
||||
{ an_array: [{ docs: { missing: 1 } }] }
|
||||
)
|
||||
).toThrow('[an_array.0.docs.missing]: definition for this key is missing. Received `1`');
|
||||
});
|
||||
test('stats has nested-fields defined in the schema, but with wrong type => fail', () => {
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
{
|
||||
root: {
|
||||
properties: {
|
||||
an_array: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
docs: {
|
||||
properties: {
|
||||
field: { type: 'short' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: { properties: {} },
|
||||
},
|
||||
{ an_array: [{ docs: { field: 'abc' } }] }
|
||||
)
|
||||
).toThrow(`[an_array.0.docs.field]: types that failed validation:
|
||||
- [an_array.0.docs.field.0]: expected value of type [number] but got [string]
|
||||
- [an_array.0.docs.field.1]: expected value to equal [null]`);
|
||||
});
|
||||
test('stats has nested-fields defined in the schema => succeed', () => {
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
{
|
||||
root: {
|
||||
properties: {
|
||||
an_array: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
docs: {
|
||||
properties: {
|
||||
field: { type: 'short' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: { properties: {} },
|
||||
},
|
||||
{ an_array: [{ docs: { field: 1 } }] }
|
||||
)
|
||||
).not.toThrow();
|
||||
});
|
||||
|
||||
test('allow pass_through properties', () => {
|
||||
expect(() =>
|
||||
assertTelemetryPayload(
|
||||
{
|
||||
root: {
|
||||
properties: {
|
||||
im_only_passing_through_data: {
|
||||
type: 'pass_through',
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: { properties: {} },
|
||||
},
|
||||
{ im_only_passing_through_data: [{ docs: { field: 1 } }] }
|
||||
)
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,140 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { schema, ObjectType, Type } from '@kbn/config-schema';
|
||||
import { get } from 'lodash';
|
||||
import { set } from '@elastic/safer-lodash-set';
|
||||
import type { AllowedSchemaTypes } from 'src/plugins/usage_collection/server';
|
||||
|
||||
/**
|
||||
* Type that defines all the possible values that the Telemetry Schema accepts.
|
||||
* These types definitions are helping to identify earlier the possible missing `properties` nesting when
|
||||
* manually defining the schemas.
|
||||
*/
|
||||
export type TelemetrySchemaValue =
|
||||
| {
|
||||
type: AllowedSchemaTypes | 'pass_through' | string;
|
||||
}
|
||||
| { type: 'array'; items: TelemetrySchemaValue }
|
||||
| TelemetrySchemaObject;
|
||||
|
||||
export interface TelemetrySchemaObject {
|
||||
properties: Record<string, TelemetrySchemaValue>;
|
||||
}
|
||||
|
||||
function isOneOfCandidate(
|
||||
schemas: Array<Type<unknown>>
|
||||
): schemas is [Type<unknown> | Type<unknown>] {
|
||||
return schemas.length === 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts each telemetry schema value to the @kbn/config-schema equivalent
|
||||
* @param value
|
||||
*/
|
||||
function valueSchemaToConfigSchema(value: TelemetrySchemaValue): Type<unknown> {
|
||||
if ('properties' in value) {
|
||||
const { DYNAMIC_KEY, ...properties } = value.properties;
|
||||
const schemas: Array<Type<unknown>> = [objectSchemaToConfigSchema({ properties })];
|
||||
if (DYNAMIC_KEY) {
|
||||
schemas.push(schema.recordOf(schema.string(), valueSchemaToConfigSchema(DYNAMIC_KEY)));
|
||||
}
|
||||
return isOneOfCandidate(schemas) ? schema.oneOf(schemas) : schemas[0];
|
||||
} else {
|
||||
const valueType = value.type; // Copied in here because of TS reasons, it's not available in the `default` case
|
||||
switch (value.type) {
|
||||
case 'pass_through':
|
||||
return schema.any();
|
||||
case 'boolean':
|
||||
return schema.boolean();
|
||||
case 'keyword':
|
||||
case 'text':
|
||||
case 'date':
|
||||
return schema.string();
|
||||
case 'byte':
|
||||
case 'double':
|
||||
case 'float':
|
||||
case 'integer':
|
||||
case 'long':
|
||||
case 'short':
|
||||
// Some plugins return `null` when there is no number to report
|
||||
return schema.oneOf([schema.number(), schema.literal(null)]);
|
||||
case 'array':
|
||||
if ('items' in value) {
|
||||
return schema.arrayOf(valueSchemaToConfigSchema(value.items));
|
||||
}
|
||||
default:
|
||||
throw new Error(
|
||||
`Unsupported schema type ${valueType}. Did you forget to wrap your object definition in a nested 'properties' field?`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function objectSchemaToConfigSchema(objectSchema: TelemetrySchemaObject): ObjectType {
|
||||
return schema.object(
|
||||
Object.fromEntries(
|
||||
Object.entries(objectSchema.properties).map(([key, value]) => {
|
||||
try {
|
||||
return [key, schema.maybe(valueSchemaToConfigSchema(value))];
|
||||
} catch (err) {
|
||||
err.failedKey = [key, ...(err.failedKey || [])];
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the JSON generated from the Usage Collection schema to a @kbn/config-schema object
|
||||
* so it can be used for validation. All entries are considered optional.
|
||||
* @param telemetrySchema JSON generated by @kbn/telemetry-tools from the Usage Collection schemas
|
||||
*/
|
||||
function convertSchemaToConfigSchema(telemetrySchema: {
|
||||
properties: Record<string, TelemetrySchemaValue>;
|
||||
}): ObjectType {
|
||||
try {
|
||||
return objectSchemaToConfigSchema(telemetrySchema);
|
||||
} catch (err) {
|
||||
if (err.failedKey) {
|
||||
err.message = `Malformed schema for key [${err.failedKey.join('.')}]: ${err.message}`;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges the telemetrySchema, generates a @kbn/config-schema version from it, and uses it to validate stats.
|
||||
* @param telemetrySchema The JSON schema definitions for root and plugins
|
||||
* @param stats The full output of the telemetry plugin
|
||||
*/
|
||||
export function assertTelemetryPayload(
|
||||
telemetrySchema: { root: TelemetrySchemaObject; plugins: TelemetrySchemaObject },
|
||||
stats: unknown
|
||||
): void {
|
||||
const fullSchema = telemetrySchema.root;
|
||||
set(
|
||||
fullSchema,
|
||||
'properties.stack_stats.properties.kibana.properties.plugins',
|
||||
telemetrySchema.plugins
|
||||
);
|
||||
const ossTelemetryValidationSchema = convertSchemaToConfigSchema(fullSchema);
|
||||
|
||||
// Run @kbn/config-schema validation to the entire payload
|
||||
try {
|
||||
ossTelemetryValidationSchema.validate(stats);
|
||||
} catch (err) {
|
||||
// "[path.to.key]: definition for this key is missing"
|
||||
const [, pathToKey] = err.message.match(/^\[(.*)\]\: definition for this key is missing/) ?? [];
|
||||
if (pathToKey) {
|
||||
err.message += `. Received \`${JSON.stringify(get(stats, pathToKey))}\``;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
13
test/api_integration/jest.config.js
Normal file
13
test/api_integration/jest.config.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/test/api_integration'],
|
||||
};
|
|
@ -45,7 +45,7 @@ export function createActionsUsageCollector(
|
|||
try {
|
||||
const doc = await getLatestTaskState(await taskManager);
|
||||
// get the accumulated state from the recurring task
|
||||
const state: ActionsUsage = get(doc, 'state') as ActionsUsage;
|
||||
const { runs, ...state } = get(doc, 'state') as ActionsUsage & { runs: number };
|
||||
|
||||
return {
|
||||
...state,
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { LegacyAPICaller } from 'kibana/server';
|
||||
import { SearchResponse } from 'elasticsearch';
|
||||
import { AlertsUsage } from './types';
|
||||
|
||||
const alertTypeMetric = {
|
||||
scripted_metric: {
|
||||
|
@ -34,14 +35,22 @@ const alertTypeMetric = {
|
|||
},
|
||||
};
|
||||
|
||||
export async function getTotalCountAggregations(callCluster: LegacyAPICaller, kibanaInex: string) {
|
||||
export async function getTotalCountAggregations(
|
||||
callCluster: LegacyAPICaller,
|
||||
kibanaInex: string
|
||||
): Promise<
|
||||
Pick<
|
||||
AlertsUsage,
|
||||
'count_total' | 'count_by_type' | 'throttle_time' | 'schedule_time' | 'connectors_per_alert'
|
||||
>
|
||||
> {
|
||||
const throttleTimeMetric = {
|
||||
scripted_metric: {
|
||||
init_script: 'state.min = 0; state.max = 0; state.totalSum = 0; state.totalCount = 0;',
|
||||
map_script: `
|
||||
if (doc['alert.throttle'].size() > 0) {
|
||||
def throttle = doc['alert.throttle'].value;
|
||||
|
||||
|
||||
if (throttle.length() > 1) {
|
||||
// get last char
|
||||
String timeChar = throttle.substring(throttle.length() - 1);
|
||||
|
@ -51,7 +60,7 @@ export async function getTotalCountAggregations(callCluster: LegacyAPICaller, ki
|
|||
if (throttle.chars().allMatch(Character::isDigit)) {
|
||||
// using of regex is not allowed in painless language
|
||||
int parsed = Integer.parseInt(throttle);
|
||||
|
||||
|
||||
if (timeChar.equals("s")) {
|
||||
parsed = parsed;
|
||||
} else if (timeChar.equals("m")) {
|
||||
|
@ -107,7 +116,7 @@ export async function getTotalCountAggregations(callCluster: LegacyAPICaller, ki
|
|||
map_script: `
|
||||
if (doc['alert.schedule.interval'].size() > 0) {
|
||||
def interval = doc['alert.schedule.interval'].value;
|
||||
|
||||
|
||||
if (interval.length() > 1) {
|
||||
// get last char
|
||||
String timeChar = interval.substring(interval.length() - 1);
|
||||
|
@ -117,7 +126,7 @@ export async function getTotalCountAggregations(callCluster: LegacyAPICaller, ki
|
|||
if (interval.chars().allMatch(Character::isDigit)) {
|
||||
// using of regex is not allowed in painless language
|
||||
int parsed = Integer.parseInt(interval);
|
||||
|
||||
|
||||
if (timeChar.equals("s")) {
|
||||
parsed = parsed;
|
||||
} else if (timeChar.equals("m")) {
|
||||
|
|
|
@ -57,7 +57,7 @@ export function createAlertsUsageCollector(
|
|||
try {
|
||||
const doc = await getLatestTaskState(await taskManager);
|
||||
// get the accumulated state from the recurring task
|
||||
const state: AlertsUsage = get(doc, 'state') as AlertsUsage;
|
||||
const { runs, ...state } = get(doc, 'state') as AlertsUsage & { runs: number };
|
||||
|
||||
return {
|
||||
...state,
|
||||
|
@ -68,14 +68,14 @@ export function createAlertsUsageCollector(
|
|||
count_active_total: 0,
|
||||
count_disabled_total: 0,
|
||||
throttle_time: {
|
||||
min: 0,
|
||||
avg: 0,
|
||||
max: 0,
|
||||
min: '0s',
|
||||
avg: '0s',
|
||||
max: '0s',
|
||||
},
|
||||
schedule_time: {
|
||||
min: 0,
|
||||
avg: 0,
|
||||
max: 0,
|
||||
min: '0s',
|
||||
avg: '0s',
|
||||
max: '0s',
|
||||
},
|
||||
connectors_per_alert: {
|
||||
min: 0,
|
||||
|
@ -92,14 +92,14 @@ export function createAlertsUsageCollector(
|
|||
count_active_total: { type: 'long' },
|
||||
count_disabled_total: { type: 'long' },
|
||||
throttle_time: {
|
||||
min: { type: 'long' },
|
||||
avg: { type: 'float' },
|
||||
max: { type: 'long' },
|
||||
min: { type: 'keyword' },
|
||||
avg: { type: 'keyword' },
|
||||
max: { type: 'keyword' },
|
||||
},
|
||||
schedule_time: {
|
||||
min: { type: 'long' },
|
||||
avg: { type: 'float' },
|
||||
max: { type: 'long' },
|
||||
min: { type: 'keyword' },
|
||||
avg: { type: 'keyword' },
|
||||
max: { type: 'keyword' },
|
||||
},
|
||||
connectors_per_alert: {
|
||||
min: { type: 'long' },
|
||||
|
|
|
@ -12,14 +12,14 @@ export interface AlertsUsage {
|
|||
count_by_type: Record<string, number>;
|
||||
count_active_by_type: Record<string, number>;
|
||||
throttle_time: {
|
||||
min: number;
|
||||
avg: number;
|
||||
max: number;
|
||||
min: string;
|
||||
avg: string;
|
||||
max: string;
|
||||
};
|
||||
schedule_time: {
|
||||
min: number;
|
||||
avg: number;
|
||||
max: number;
|
||||
min: string;
|
||||
avg: string;
|
||||
max: string;
|
||||
};
|
||||
connectors_per_alert: {
|
||||
min: number;
|
||||
|
|
|
@ -530,7 +530,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the
|
|||
},
|
||||
"environments": {
|
||||
"properties": {
|
||||
"services_without_environments": {
|
||||
"services_without_environment": {
|
||||
"type": "long"
|
||||
},
|
||||
"services_with_multiple_environments": {
|
||||
|
@ -1008,6 +1008,17 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"environments": {
|
||||
"properties": {
|
||||
"took": {
|
||||
"properties": {
|
||||
"ms": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,7 +116,7 @@ export const apmSchema: MakeSchemaFrom<APMUsage> = {
|
|||
},
|
||||
},
|
||||
environments: {
|
||||
services_without_environments: long,
|
||||
services_without_environment: long,
|
||||
services_with_multiple_environments: long,
|
||||
top_environments: { type: 'array', items: { type: 'keyword' } },
|
||||
},
|
||||
|
@ -192,5 +192,6 @@ export const apmSchema: MakeSchemaFrom<APMUsage> = {
|
|||
agents: { took: { ms: long } },
|
||||
indices_stats: { took: { ms: long } },
|
||||
cardinality: { took: { ms: long } },
|
||||
environments: { took: { ms: long } },
|
||||
},
|
||||
};
|
||||
|
|
|
@ -35,7 +35,7 @@ export interface APMUsage {
|
|||
};
|
||||
};
|
||||
environments: {
|
||||
services_without_environments: number;
|
||||
services_without_environment: number;
|
||||
services_with_multiple_environments: number;
|
||||
top_environments: string[];
|
||||
};
|
||||
|
@ -140,7 +140,8 @@ export interface APMUsage {
|
|||
| 'integrations'
|
||||
| 'agents'
|
||||
| 'indices_stats'
|
||||
| 'cardinality',
|
||||
| 'cardinality'
|
||||
| 'environments',
|
||||
{ took: { ms: number } }
|
||||
>;
|
||||
}
|
||||
|
|
|
@ -131,6 +131,14 @@ export interface UsageData extends UsageStats {
|
|||
count?: number;
|
||||
usesFeatureControls?: boolean;
|
||||
disabledFeatures: {
|
||||
// "feature": number;
|
||||
[key: string]: number | undefined;
|
||||
// Known registered features
|
||||
stackAlerts?: number;
|
||||
actions?: number;
|
||||
enterpriseSearch?: number;
|
||||
fleet?: number;
|
||||
savedObjectsTagging?: number;
|
||||
indexPatterns?: number;
|
||||
discover?: number;
|
||||
canvas?: number;
|
||||
|
@ -173,6 +181,14 @@ export function getSpacesUsageCollector(
|
|||
schema: {
|
||||
usesFeatureControls: { type: 'boolean' },
|
||||
disabledFeatures: {
|
||||
// "feature": number;
|
||||
DYNAMIC_KEY: { type: 'long' },
|
||||
// Known registered features
|
||||
stackAlerts: { type: 'long' },
|
||||
actions: { type: 'long' },
|
||||
enterpriseSearch: { type: 'long' },
|
||||
fleet: { type: 'long' },
|
||||
savedObjectsTagging: { type: 'long' },
|
||||
indexPatterns: { type: 'long' },
|
||||
discover: { type: 'long' },
|
||||
canvas: { type: 'long' },
|
||||
|
|
17
x-pack/plugins/telemetry_collection_xpack/schema/README.md
Normal file
17
x-pack/plugins/telemetry_collection_xpack/schema/README.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
# X-Pack Telemetry Schemas
|
||||
|
||||
This is an extension of the [OSS Telemetry Schemas](../../../../src/plugins/telemetry/schema) to add the X-Pack-related data. The payloads described in these `.json` files must be merged to the OSS ones to get the structure of the full payload sent to the Remote Telemetry Service. All the files follow the schema convention as defined in the `usage_collection` plugin and `@kbn/telemetry-tools`.
|
||||
|
||||
There are currently 2 files:
|
||||
|
||||
- `xpack_root.json`: Defines the extra fields x-pack reports over the OSS payload defined in the `oss_root.json`.
|
||||
Manually maintained for now because the frequency it changes is expected to be pretty low.
|
||||
- `xpack_plugins.json`: The X-Pack related schema for the content that will be nested in `stack_stats.kibana.plugins`.
|
||||
It is automatically generated by `@kbn/telemetry-tools` based on the `schema` property provided by all the registered Usage Collectors via the `usageCollection.makeUsageCollector` API.
|
||||
More details in the [Schema field](../../usage_collection/README.md#schema-field) chapter in the UsageCollection's docs.
|
||||
|
||||
NOTE: Despite its similarities to ES mappings, the intention of these files is not to define any index mappings. They should be considered as a tool to understand the format of the payload that will be sent when reporting telemetry to the Remote Service.
|
||||
|
||||
## Testing
|
||||
|
||||
Functional tests are defined at `x-pack/test/api_integration/apis/telemetry/telemetry_local.ts`. They merge both files (+ the OSS definitions), and validates the actual output of the telemetry endpoint against the final schema.
|
|
@ -92,26 +92,26 @@
|
|||
"throttle_time": {
|
||||
"properties": {
|
||||
"min": {
|
||||
"type": "long"
|
||||
"type": "keyword"
|
||||
},
|
||||
"avg": {
|
||||
"type": "float"
|
||||
"type": "keyword"
|
||||
},
|
||||
"max": {
|
||||
"type": "long"
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"schedule_time": {
|
||||
"properties": {
|
||||
"min": {
|
||||
"type": "long"
|
||||
"type": "keyword"
|
||||
},
|
||||
"avg": {
|
||||
"type": "float"
|
||||
"type": "keyword"
|
||||
},
|
||||
"max": {
|
||||
"type": "long"
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -1031,7 +1031,7 @@
|
|||
},
|
||||
"environments": {
|
||||
"properties": {
|
||||
"services_without_environments": {
|
||||
"services_without_environment": {
|
||||
"type": "long"
|
||||
},
|
||||
"services_with_multiple_environments": {
|
||||
|
@ -1521,6 +1521,17 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"environments": {
|
||||
"properties": {
|
||||
"took": {
|
||||
"properties": {
|
||||
"ms": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3454,6 +3465,24 @@
|
|||
},
|
||||
"disabledFeatures": {
|
||||
"properties": {
|
||||
"DYNAMIC_KEY": {
|
||||
"type": "long"
|
||||
},
|
||||
"stackAlerts": {
|
||||
"type": "long"
|
||||
},
|
||||
"actions": {
|
||||
"type": "long"
|
||||
},
|
||||
"enterpriseSearch": {
|
||||
"type": "long"
|
||||
},
|
||||
"fleet": {
|
||||
"type": "long"
|
||||
},
|
||||
"savedObjectsTagging": {
|
||||
"type": "long"
|
||||
},
|
||||
"indexPatterns": {
|
||||
"type": "long"
|
||||
},
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
{
|
||||
"properties": {
|
||||
"license": {
|
||||
"properties": {
|
||||
"uid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"issue_date": {
|
||||
"type": "date"
|
||||
},
|
||||
"expiry_date": {
|
||||
"type": "date"
|
||||
},
|
||||
"expiry_date_in_millis": {
|
||||
"type": "long"
|
||||
},
|
||||
"issue_date_in_millis": {
|
||||
"type": "long"
|
||||
},
|
||||
"start_date_in_millis": {
|
||||
"type": "long"
|
||||
},
|
||||
"issued_to": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"issuer": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"max_nodes": {
|
||||
"type": "long"
|
||||
},
|
||||
"max_resource_units": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"stack_stats": {
|
||||
"properties": {
|
||||
"xpack": {
|
||||
"type": "pass_through"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,22 +6,16 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import _ from 'lodash';
|
||||
|
||||
/*
|
||||
* Create a single-level array with strings for all the paths to values in the
|
||||
* source object, up to 3 deep. Going deeper than 3 causes a bit too much churn
|
||||
* in the tests.
|
||||
*/
|
||||
function flatKeys(source) {
|
||||
const recursivelyFlatKeys = (obj, path = [], depth = 0) => {
|
||||
return depth < 3 && _.isObject(obj)
|
||||
? _.map(obj, (v, k) => recursivelyFlatKeys(v, [...path, k], depth + 1))
|
||||
: path.join('.');
|
||||
};
|
||||
|
||||
return _.uniq(_.flattenDeep(recursivelyFlatKeys(source))).sort((a, b) => a.localeCompare(b));
|
||||
}
|
||||
import deepmerge from 'deepmerge';
|
||||
import type { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import {
|
||||
assertTelemetryPayload,
|
||||
flatKeys,
|
||||
} from '../../../../../test/api_integration/apis/telemetry/utils';
|
||||
import ossRootTelemetrySchema from '../../../../../src/plugins/telemetry/schema/oss_root.json';
|
||||
import ossPluginsTelemetrySchema from '../../../../../src/plugins/telemetry/schema/oss_plugins.json';
|
||||
import xpackRootTelemetrySchema from '../../../../plugins/telemetry_collection_xpack/schema/xpack_root.json';
|
||||
import xpackPluginsTelemetrySchema from '../../../../plugins/telemetry_collection_xpack/schema/xpack_plugins.json';
|
||||
|
||||
const disableCollection = {
|
||||
persistent: {
|
||||
|
@ -35,17 +29,17 @@ const disableCollection = {
|
|||
},
|
||||
};
|
||||
|
||||
export default function ({ getService }) {
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const supertest = getService('supertest');
|
||||
const esSupertest = getService('esSupertest');
|
||||
const es = getService('es');
|
||||
|
||||
describe('/api/telemetry/v2/clusters/_stats with monitoring disabled', () => {
|
||||
before('', async () => {
|
||||
await esSupertest.put('/_cluster/settings').send(disableCollection).expect(200);
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
});
|
||||
let stats: Record<string, any>;
|
||||
|
||||
before('disable monitoring and pull local stats', async () => {
|
||||
await es.cluster.put_settings({ body: disableCollection });
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
|
||||
it('should pull local stats and validate data types', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/telemetry/v2/clusters/_stats')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
|
@ -53,8 +47,21 @@ export default function ({ getService }) {
|
|||
.expect(200);
|
||||
|
||||
expect(body.length).to.be(1);
|
||||
const stats = body[0];
|
||||
stats = body[0];
|
||||
});
|
||||
|
||||
it('should pass the schema validation', () => {
|
||||
const root = deepmerge(ossRootTelemetrySchema, xpackRootTelemetrySchema);
|
||||
const plugins = deepmerge(ossPluginsTelemetrySchema, xpackPluginsTelemetrySchema);
|
||||
try {
|
||||
assertTelemetryPayload({ root, plugins }, stats);
|
||||
} catch (err) {
|
||||
err.message = `The telemetry schemas in 'x-pack/plugins/telemetry_collection_xpack/schema/' are out-of-date, please update it as required: ${err.message}`;
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass ad-hoc enforced validations', () => {
|
||||
expect(stats.collection).to.be('local');
|
||||
expect(stats.collectionSource).to.be('local_xpack');
|
||||
|
||||
|
@ -103,14 +110,7 @@ export default function ({ getService }) {
|
|||
expect(stats.stack_stats.xpack.rollup).to.be.an('object');
|
||||
});
|
||||
|
||||
it('should pull local stats and validate fields', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/telemetry/v2/clusters/_stats')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ unencrypted: true })
|
||||
.expect(200);
|
||||
|
||||
const stats = body[0];
|
||||
it('should validate mandatory fields exist', () => {
|
||||
const actual = flatKeys(stats);
|
||||
|
||||
const expected = [
|
Loading…
Add table
Add a link
Reference in a new issue