[Infra UI] Revert ECS changes for 6.7 (#31961)

* Revert "[Infra UI] Clean up Docker and Kubernetes fields for ECS (#31175)"

This reverts commit cb3dad1317.

* Revert "[Infra UI] Fixing group by labels by fixing the field names (post ECS migration) (#30416) (#31012)"

This reverts commit 6bd74e0abd.

* Revert "[Infra UI] ECS Migration (#28205) (#29965)"

This reverts commit fe9748583e.

* Fixing tests

* Fixing tests

* Adding test data for docker and some basic sanity checks to ensure ids and names work correctly with real data
This commit is contained in:
Chris Cowan 2019-03-05 11:21:32 -07:00 committed by GitHub
parent 72c7595742
commit 2ff4418b94
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 2203 additions and 31396 deletions

View file

@ -10,8 +10,8 @@
`xpack.infra.sources.default.fields.tiebreaker`:: Field used to break ties between two entries with the same timestamp. Defaults to `_doc`.
`xpack.infra.sources.default.fields.host`:: Field used to identify hosts. Defaults to `host.name`.
`xpack.infra.sources.default.fields.host`:: Field used to identify hosts. Defaults to `beat.hostname`.
`xpack.infra.sources.default.fields.container`:: Field used to identify Docker containers. Defaults to `container.id`.
`xpack.infra.sources.default.fields.container`:: Field used to identify Docker containers. Defaults to `docker.container.name`.
`xpack.infra.sources.default.fields.pod`:: Field used to identify Kubernetes pods. Defaults to `kubernetes.pod.uid`.
`xpack.infra.sources.default.fields.pod`:: Field used to identify Kubernetes pods. Defaults to `kubernetes.pod.name`.

View file

@ -94,7 +94,7 @@ export const FieldsConfigurationPanel = ({
id="xpack.infra.sourceConfiguration.containerFieldDescription"
defaultMessage="Field used to identify Docker containers. The recommended value is {defaultValue}."
values={{
defaultValue: <EuiCode>container.id</EuiCode>,
defaultValue: <EuiCode>docker.container.id</EuiCode>,
}}
/>
}

View file

@ -24,19 +24,19 @@ export const fieldToName = (field: string, intl: InjectedIntl) => {
id: 'xpack.infra.groupByDisplayNames.hostName',
defaultMessage: 'Host',
}),
'cloud.availability_zone': intl.formatMessage({
'meta.cloud.availability_zone': intl.formatMessage({
id: 'xpack.infra.groupByDisplayNames.availabilityZone',
defaultMessage: 'Availability Zone',
}),
'cloud.machine.type': intl.formatMessage({
'meta.cloud.machine_type': intl.formatMessage({
id: 'xpack.infra.groupByDisplayNames.machineType',
defaultMessage: 'Machine Type',
}),
'cloud.project.id': intl.formatMessage({
'meta.cloud.project_id': intl.formatMessage({
id: 'xpack.infra.groupByDisplayNames.projectID',
defaultMessage: 'Project ID',
}),
'cloud.provider': intl.formatMessage({
'meta.cloud.provider': intl.formatMessage({
id: 'xpack.infra.groupByDisplayNames.provider',
defaultMessage: 'Cloud Provider',
}),

View file

@ -47,16 +47,16 @@ const getOptions = (
[InfraNodeType.pod]: ['kubernetes.namespace', 'kubernetes.node.name'].map(mapFieldToOption),
[InfraNodeType.container]: [
'host.name',
'cloud.availability_zone',
'cloud.machine.type',
'cloud.project.id',
'cloud.provider',
'meta.cloud.availability_zone',
'meta.cloud.machine_type',
'meta.cloud.project_id',
'meta.cloud.provider',
].map(mapFieldToOption),
[InfraNodeType.host]: [
'cloud.availability_zone',
'cloud.machine.type',
'cloud.project.id',
'cloud.provider',
'meta.cloud.availability_zone',
'meta.cloud.machine_type',
'meta.cloud.project_id',
'meta.cloud.provider',
].map(mapFieldToOption),
};
}

View file

@ -157,6 +157,9 @@ export interface InfraDateRangeAggregationResponse {
export interface InfraMetadataAggregationBucket {
key: string;
names?: {
buckets: InfraMetadataAggregationBucket[];
};
}
export interface InfraMetadataAggregationResponse {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { get } from 'lodash';
import { first, get } from 'lodash';
import { InfraSourceConfiguration } from '../../sources';
import {
InfraBackendFrameworkAdapter,
@ -37,19 +37,22 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
},
},
},
size: 0,
size: 1,
_source: [NAME_FIELDS[nodeType]],
aggs: {
nodeName: {
terms: {
field: NAME_FIELDS[nodeType],
size: 1,
},
},
metrics: {
terms: {
field: 'event.dataset',
field: 'metricset.module',
size: 1000,
},
aggs: {
names: {
terms: {
field: 'metricset.name',
size: 1000,
},
},
},
},
},
},
@ -57,7 +60,7 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
const response = await this.framework.callWithRequest<
any,
{ metrics?: InfraMetadataAggregationResponse; nodeName?: InfraMetadataAggregationResponse }
{ metrics?: InfraMetadataAggregationResponse }
>(req, 'search', metricQuery);
const buckets =
@ -65,9 +68,11 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
? response.aggregations.metrics.buckets
: [];
const sampleDoc = first(response.hits.hits);
return {
id: nodeId,
name: get(response, ['aggregations', 'nodeName', 'buckets', 0, 'key'], nodeId),
name: get(sampleDoc, `_source.${NAME_FIELDS[nodeType]}`),
buckets,
};
}
@ -89,19 +94,22 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
},
},
},
size: 0,
size: 1,
_source: [NAME_FIELDS[nodeType]],
aggs: {
nodeName: {
terms: {
field: NAME_FIELDS[nodeType],
size: 1,
},
},
metrics: {
terms: {
field: 'event.dataset',
field: 'fileset.module',
size: 1000,
},
aggs: {
names: {
terms: {
field: 'fileset.name',
size: 1000,
},
},
},
},
},
},
@ -109,7 +117,7 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
const response = await this.framework.callWithRequest<
any,
{ metrics?: InfraMetadataAggregationResponse; nodeName?: InfraMetadataAggregationResponse }
{ metrics?: InfraMetadataAggregationResponse }
>(req, 'search', logQuery);
const buckets =
@ -117,9 +125,11 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
? response.aggregations.metrics.buckets
: [];
const sampleDoc = first(response.hits.hits);
return {
id: nodeId,
name: get(response, ['aggregations', 'nodeName', 'buckets', 0, 'key'], nodeId),
name: get(sampleDoc, `_source.${NAME_FIELDS[nodeType]}`),
buckets,
};
}

View file

@ -122,7 +122,7 @@ export const hostK8sOverview: InfraMetricModelCreator = (timeField, indexPattern
type: InfraMetricModelMetricType.max,
},
{
field: 'kubernetes.pod.uid',
field: 'kubernetes.pod.name',
id: 'card-pod-name',
type: InfraMetricModelMetricType.cardinality,
},

View file

@ -31,7 +31,7 @@ export const hostK8sPodCap: InfraMetricModelCreator = (timeField, indexPattern,
id: 'used',
metrics: [
{
field: 'kubernetes.pod.uid',
field: 'kubernetes.pod.name',
id: 'avg-pod',
type: InfraMetricModelMetricType.cardinality,
},

View file

@ -23,7 +23,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
},
],
split_mode: 'filter',
filter: 'http.response.status_code:[200 TO 299]',
filter: 'nginx.access.response_code:[200 TO 299]',
},
{
id: '300s',
@ -34,7 +34,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
},
],
split_mode: 'filter',
filter: 'http.response.status_code:[300 TO 399]',
filter: 'nginx.access.response_code:[300 TO 399]',
},
{
id: '400s',
@ -45,7 +45,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
},
],
split_mode: 'filter',
filter: 'http.response.status_code:[400 TO 499]',
filter: 'nginx.access.response_code:[400 TO 499]',
},
{
id: '500s',
@ -56,7 +56,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
},
],
split_mode: 'filter',
filter: 'http.response.status_code:[500 TO 599]',
filter: 'nginx.access.response_code:[500 TO 599]',
},
],
});

View file

@ -11,5 +11,5 @@ export const NODE_REQUEST_PARTITION_FACTOR = 1.2;
export const NAME_FIELDS = {
[InfraNodeType.host]: 'host.name',
[InfraNodeType.pod]: 'kubernetes.pod.name',
[InfraNodeType.container]: 'container.name',
[InfraNodeType.container]: 'docker.container.name',
};

View file

@ -9,6 +9,7 @@ import moment from 'moment';
import { InfraMetricType, InfraNode, InfraNodeMetric } from '../../../../graphql/types';
import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types';
import { NAME_FIELDS } from '../constants';
import { getBucketSizeInSeconds } from './get_bucket_size_in_seconds';
// TODO: Break these function into seperate files and expand beyond just documnet count
@ -71,9 +72,9 @@ export function createNodeItem(
node: InfraBucket,
bucket: InfraBucket
): InfraNode {
const nodeDetails = get(node, ['nodeDetails', 'buckets', 0]);
const nodeDoc = get(node, ['nodeDetails', 'hits', 'hits', 0]);
return {
metric: createNodeMetrics(options, node, bucket),
path: [{ value: node.key, label: get(nodeDetails, 'key', node.key) }],
path: [{ value: node.key, label: get(nodeDoc, `_source.${NAME_FIELDS[options.nodeType]}`) }],
} as InfraNode;
}

View file

@ -26,6 +26,7 @@ const nodeTypeToField = (options: InfraProcesorRequestOptions): string => {
};
export const nodesProcessor = (options: InfraProcesorRequestOptions) => {
const { fields } = options.nodeOptions.sourceConfiguration;
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
const field = nodeTypeToField(options);
@ -42,9 +43,10 @@ export const nodesProcessor = (options: InfraProcesorRequestOptions) => {
set(result, 'aggs.waffle.aggs.nodes.aggs', {
nodeDetails: {
terms: {
field: NAME_FIELDS[options.nodeType],
top_hits: {
size: 1,
_source: { includes: [NAME_FIELDS[options.nodeType]] },
sort: [{ [fields.timestamp]: { order: 'desc' } }],
},
},
});

View file

@ -4,255 +4,106 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { builtinRules } from '.';
import { compileFormattingRules } from '../message';
import { filebeatApache2Rules } from './filebeat_apache2';
const { format } = compileFormattingRules(builtinRules);
const { format } = compileFormattingRules(filebeatApache2Rules);
describe('Filebeat Rules', () => {
describe('in ECS format', () => {
test('Apache2 Access', () => {
const flattenedDocument = {
'@timestamp': '2016-12-26T16:22:13.000Z',
'ecs.version': '1.0.0-beta2',
'event.dataset': 'apache.access',
'event.module': 'apache',
'fileset.name': 'access',
'http.request.method': 'GET',
'http.request.referrer': '-',
'http.response.body.bytes': 499,
'http.response.status_code': 404,
'http.version': '1.1',
'input.type': 'log',
'log.offset': 73,
'service.type': 'apache',
'source.address': '192.168.33.1',
'source.ip': '192.168.33.1',
'url.original': '/hello',
'user.name': '-',
'user_agent.device': 'Other',
'user_agent.major': '50',
'user_agent.minor': '0',
'user_agent.name': 'Firefox',
'user_agent.original':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:50.0) Gecko/20100101 Firefox/50.0',
'user_agent.os.full_name': 'Mac OS X 10.12',
'user_agent.os.major': '10',
'user_agent.os.minor': '12',
'user_agent.os.name': 'Mac OS X',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[",
},
Object {
"field": "event.module",
"highlights": Array [],
"value": "apache",
},
Object {
"constant": "][access] ",
},
Object {
"field": "source.ip",
"highlights": Array [],
"value": "192.168.33.1",
},
Object {
"constant": " ",
},
Object {
"field": "user.name",
"highlights": Array [],
"value": "-",
},
Object {
"constant": " \\"",
},
Object {
"field": "http.request.method",
"highlights": Array [],
"value": "GET",
},
Object {
"constant": " ",
},
Object {
"field": "url.original",
"highlights": Array [],
"value": "/hello",
},
Object {
"constant": " HTTP/",
},
Object {
"field": "http.version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "http.response.status_code",
"highlights": Array [],
"value": "404",
},
Object {
"constant": " ",
},
Object {
"field": "http.response.body.bytes",
"highlights": Array [],
"value": "499",
},
]
`);
});
test('Apache2 Error', () => {
const flattenedDocument = {
'@timestamp': '2016-12-26T16:22:08.000Z',
'ecs.version': '1.0.0-beta2',
'event.dataset': 'apache.error',
'event.module': 'apache',
'fileset.name': 'error',
'input.type': 'log',
'log.level': 'error',
'log.offset': 0,
message: 'File does not exist: /var/www/favicon.ico',
'service.type': 'apache',
'source.address': '192.168.33.1',
'source.ip': '192.168.33.1',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[apache][",
},
Object {
"field": "log.level",
"highlights": Array [],
"value": "error",
},
Object {
"constant": "] ",
},
Object {
"field": "message",
"highlights": Array [],
"value": "File does not exist: /var/www/favicon.ico",
},
]
`);
});
test('Apache2 Access', () => {
const event = {
'apache2.access': true,
'apache2.access.remote_ip': '192.168.1.42',
'apache2.access.user_name': 'admin',
'apache2.access.method': 'GET',
'apache2.access.url': '/faqs',
'apache2.access.http_version': '1.1',
'apache2.access.response_code': '200',
'apache2.access.body_sent.bytes': 1024,
};
const message = format(event);
expect(message).toEqual([
{
constant: '[Apache][access] ',
},
{
field: 'apache2.access.remote_ip',
highlights: [],
value: '192.168.1.42',
},
{
constant: ' ',
},
{
field: 'apache2.access.user_name',
highlights: [],
value: 'admin',
},
{
constant: ' "',
},
{
field: 'apache2.access.method',
highlights: [],
value: 'GET',
},
{
constant: ' ',
},
{
field: 'apache2.access.url',
highlights: [],
value: '/faqs',
},
{
constant: ' HTTP/',
},
{
field: 'apache2.access.http_version',
highlights: [],
value: '1.1',
},
{
constant: '" ',
},
{
field: 'apache2.access.response_code',
highlights: [],
value: '200',
},
{
constant: ' ',
},
{
field: 'apache2.access.body_sent.bytes',
highlights: [],
value: '1024',
},
]);
});
describe('in pre-ECS format', () => {
test('Apache2 Access', () => {
const flattenedDocument = {
'apache2.access': true,
'apache2.access.remote_ip': '192.168.1.42',
'apache2.access.user_name': 'admin',
'apache2.access.method': 'GET',
'apache2.access.url': '/faqs',
'apache2.access.http_version': '1.1',
'apache2.access.response_code': '200',
'apache2.access.body_sent.bytes': 1024,
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[apache][access] ",
},
Object {
"field": "apache2.access.remote_ip",
"highlights": Array [],
"value": "192.168.1.42",
},
Object {
"constant": " ",
},
Object {
"field": "apache2.access.user_name",
"highlights": Array [],
"value": "admin",
},
Object {
"constant": " \\"",
},
Object {
"field": "apache2.access.method",
"highlights": Array [],
"value": "GET",
},
Object {
"constant": " ",
},
Object {
"field": "apache2.access.url",
"highlights": Array [],
"value": "/faqs",
},
Object {
"constant": " HTTP/",
},
Object {
"field": "apache2.access.http_version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "apache2.access.response_code",
"highlights": Array [],
"value": "200",
},
Object {
"constant": " ",
},
Object {
"field": "apache2.access.body_sent.bytes",
"highlights": Array [],
"value": "1024",
},
]
`);
});
test('Apache2 Error', () => {
const flattenedDocument = {
'apache2.error.message':
'AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations',
'apache2.error.level': 'notice',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[apache][",
},
Object {
"field": "apache2.error.level",
"highlights": Array [],
"value": "notice",
},
Object {
"constant": "] ",
},
Object {
"field": "apache2.error.message",
"highlights": Array [],
"value": "AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations",
},
]
`);
});
test('Apache2 Error', () => {
const event = {
'apache2.error.message':
'AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations',
'apache2.error.level': 'notice',
};
const message = format(event);
expect(message).toEqual([
{
constant: '[Apache][',
},
{
field: 'apache2.error.level',
highlights: [],
value: 'notice',
},
{
constant: '] ',
},
{
field: 'apache2.error.message',
highlights: [],
value: 'AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations',
},
]);
});
});

View file

@ -6,13 +6,12 @@
export const filebeatApache2Rules = [
{
// pre-ECS
when: {
exists: ['apache2.access'],
},
format: [
{
constant: '[apache][access] ',
constant: '[Apache][access] ',
},
{
field: 'apache2.access.remote_ip',
@ -56,35 +55,12 @@ export const filebeatApache2Rules = [
],
},
{
// ECS
when: {
values: {
'event.dataset': 'apache.error',
},
},
format: [
{
constant: '[apache][',
},
{
field: 'log.level',
},
{
constant: '] ',
},
{
field: 'message',
},
],
},
{
// pre-ECS
when: {
exists: ['apache2.error.message'],
},
format: [
{
constant: '[apache][',
constant: '[Apache][',
},
{
field: 'apache2.error.level',

View file

@ -4,261 +4,110 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { builtinRules } from '.';
import { compileFormattingRules } from '../message';
import { filebeatNginxRules } from './filebeat_nginx';
const { format } = compileFormattingRules(builtinRules);
const { format } = compileFormattingRules(filebeatNginxRules);
describe('Filebeat Rules', () => {
describe('in ECS format', () => {
test('Nginx Access', () => {
const flattenedDocument = {
'@timestamp': '2017-05-29T19:02:48.000Z',
'ecs.version': '1.0.0-beta2',
'event.dataset': 'nginx.access',
'event.module': 'nginx',
'fileset.name': 'access',
'http.request.method': 'GET',
'http.request.referrer': '-',
'http.response.body.bytes': 612,
'http.response.status_code': 404,
'http.version': '1.1',
'input.type': 'log',
'log.offset': 183,
'service.type': 'nginx',
'source.ip': '172.17.0.1',
'url.original': '/stringpatch',
'user.name': '-',
'user_agent.device': 'Other',
'user_agent.major': '15',
'user_agent.minor': '0',
'user_agent.name': 'Firefox Alpha',
'user_agent.original':
'Mozilla/5.0 (Windows NT 6.1; rv:15.0) Gecko/20120716 Firefox/15.0a2',
'user_agent.os.full_name': 'Windows 7',
'user_agent.os.name': 'Windows 7',
'user_agent.patch': 'a2',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[",
},
Object {
"field": "event.module",
"highlights": Array [],
"value": "nginx",
},
Object {
"constant": "][access] ",
},
Object {
"field": "source.ip",
"highlights": Array [],
"value": "172.17.0.1",
},
Object {
"constant": " ",
},
Object {
"field": "user.name",
"highlights": Array [],
"value": "-",
},
Object {
"constant": " \\"",
},
Object {
"field": "http.request.method",
"highlights": Array [],
"value": "GET",
},
Object {
"constant": " ",
},
Object {
"field": "url.original",
"highlights": Array [],
"value": "/stringpatch",
},
Object {
"constant": " HTTP/",
},
Object {
"field": "http.version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "http.response.status_code",
"highlights": Array [],
"value": "404",
},
Object {
"constant": " ",
},
Object {
"field": "http.response.body.bytes",
"highlights": Array [],
"value": "612",
},
]
`);
});
test('Nginx Error', () => {
const flattenedDocument = {
'@timestamp': '2016-10-25T14:49:34.000Z',
'ecs.version': '1.0.0-beta2',
'event.dataset': 'nginx.error',
'event.module': 'nginx',
'fileset.name': 'error',
'input.type': 'log',
'log.level': 'error',
'log.offset': 0,
message:
'open() "/usr/local/Cellar/nginx/1.10.2_1/html/favicon.ico" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: "GET /favicon.ico HTTP/1.1", host: "localhost:8080", referrer: "http://localhost:8080/"',
'nginx.error.connection_id': 1,
'process.pid': 54053,
'process.thread.id': 0,
'service.type': 'nginx',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[nginx]",
},
Object {
"constant": "[",
},
Object {
"field": "log.level",
"highlights": Array [],
"value": "error",
},
Object {
"constant": "] ",
},
Object {
"field": "message",
"highlights": Array [],
"value": "open() \\"/usr/local/Cellar/nginx/1.10.2_1/html/favicon.ico\\" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: \\"GET /favicon.ico HTTP/1.1\\", host: \\"localhost:8080\\", referrer: \\"http://localhost:8080/\\"",
},
]
`);
});
test('Nginx Access Rule', () => {
const event = {
'nginx.access': true,
'nginx.access.remote_ip': '192.168.1.42',
'nginx.access.user_name': 'admin',
'nginx.access.method': 'GET',
'nginx.access.url': '/faq',
'nginx.access.http_version': '1.1',
'nginx.access.body_sent.bytes': 1024,
'nginx.access.response_code': 200,
};
const message = format(event);
expect(message).toEqual([
{
constant: '[Nginx][access] ',
},
{
field: 'nginx.access.remote_ip',
highlights: [],
value: '192.168.1.42',
},
{
constant: ' ',
},
{
field: 'nginx.access.user_name',
highlights: [],
value: 'admin',
},
{
constant: ' "',
},
{
field: 'nginx.access.method',
highlights: [],
value: 'GET',
},
{
constant: ' ',
},
{
field: 'nginx.access.url',
highlights: [],
value: '/faq',
},
{
constant: ' HTTP/',
},
{
field: 'nginx.access.http_version',
highlights: [],
value: '1.1',
},
{
constant: '" ',
},
{
field: 'nginx.access.response_code',
highlights: [],
value: '200',
},
{
constant: ' ',
},
{
field: 'nginx.access.body_sent.bytes',
highlights: [],
value: '1024',
},
]);
});
describe('in pre-ECS format', () => {
test('Nginx Access', () => {
const flattenedDocument = {
'nginx.access': true,
'nginx.access.remote_ip': '192.168.1.42',
'nginx.access.user_name': 'admin',
'nginx.access.method': 'GET',
'nginx.access.url': '/faq',
'nginx.access.http_version': '1.1',
'nginx.access.body_sent.bytes': 1024,
'nginx.access.response_code': 200,
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[nginx][access] ",
},
Object {
"field": "nginx.access.remote_ip",
"highlights": Array [],
"value": "192.168.1.42",
},
Object {
"constant": " ",
},
Object {
"field": "nginx.access.user_name",
"highlights": Array [],
"value": "admin",
},
Object {
"constant": " \\"",
},
Object {
"field": "nginx.access.method",
"highlights": Array [],
"value": "GET",
},
Object {
"constant": " ",
},
Object {
"field": "nginx.access.url",
"highlights": Array [],
"value": "/faq",
},
Object {
"constant": " HTTP/",
},
Object {
"field": "nginx.access.http_version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "nginx.access.response_code",
"highlights": Array [],
"value": "200",
},
Object {
"constant": " ",
},
Object {
"field": "nginx.access.body_sent.bytes",
"highlights": Array [],
"value": "1024",
},
]
`);
});
test('Nginx Error', () => {
const flattenedDocument = {
'nginx.error.message':
test('Nginx Access Rule', () => {
const event = {
'nginx.error.message':
'connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: "GET /php-status?json= HTTP/1.1", upstream: "fastcgi://[::1]:9000", host: "localhost"',
'nginx.error.level': 'error',
};
const message = format(event);
expect(message).toEqual([
{
constant: '[Nginx]',
},
{
constant: '[',
},
{
field: 'nginx.error.level',
highlights: [],
value: 'error',
},
{
constant: '] ',
},
{
field: 'nginx.error.message',
highlights: [],
value:
'connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: "GET /php-status?json= HTTP/1.1", upstream: "fastcgi://[::1]:9000", host: "localhost"',
'nginx.error.level': 'error',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[nginx]",
},
Object {
"constant": "[",
},
Object {
"field": "nginx.error.level",
"highlights": Array [],
"value": "error",
},
Object {
"constant": "] ",
},
Object {
"field": "nginx.error.message",
"highlights": Array [],
"value": "connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: \\"GET /php-status?json= HTTP/1.1\\", upstream: \\"fastcgi://[::1]:9000\\", host: \\"localhost\\"",
},
]
`);
});
},
]);
});
});

View file

@ -6,13 +6,12 @@
export const filebeatNginxRules = [
{
// pre-ECS
when: {
exists: ['nginx.access.method'],
},
format: [
{
constant: '[nginx][access] ',
constant: '[Nginx][access] ',
},
{
field: 'nginx.access.remote_ip',
@ -56,38 +55,12 @@ export const filebeatNginxRules = [
],
},
{
// ECS
when: {
values: {
'event.dataset': 'nginx.error',
},
},
format: [
{
constant: '[nginx]',
},
{
constant: '[',
},
{
field: 'log.level',
},
{
constant: '] ',
},
{
field: 'message',
},
],
},
{
// pre-ECS
when: {
exists: ['nginx.error.message'],
},
format: [
{
constant: '[nginx]',
constant: '[Nginx]',
},
{
constant: '[',

View file

@ -39,14 +39,14 @@ export const builtinRules = [
...genericRules,
{
when: {
exists: ['log.path'],
exists: ['source'],
},
format: [
{
constant: 'failed to format message from ',
},
{
field: 'log.path',
field: 'source',
},
],
},

View file

@ -43,7 +43,17 @@ export class InfraMetadataDomain {
const pickMetadata = (buckets: InfraMetadataAggregationBucket[]): string[] => {
if (buckets) {
const metadata = buckets.map(bucket => bucket.key);
const metadata = buckets
.map(module => {
if (module.names) {
return module.names.buckets.map(name => {
return `${module.key}.${name.key}`;
});
} else {
return [];
}
})
.reduce((a: string[], b: string[]) => a.concat(b), []);
return metadata;
} else {
return [];

View file

@ -10,7 +10,7 @@ export const defaultSourceConfiguration = {
metricAlias: 'metricbeat-*',
logAlias: 'filebeat-*,kibana_sample_data_logs*',
fields: {
container: 'container.id',
container: 'docker.container.id',
host: 'host.name',
pod: 'kubernetes.pod.uid',
tiebreaker: '_doc',

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
export default function ({ loadTestFile }) {
describe('InfraOps GraphQL Endpoints', () => {
loadTestFile(require.resolve('./metadata'));

View file

@ -15,25 +15,25 @@ const metadataTests: KbnTestProvider = ({ getService }) => {
const client = getService('infraOpsGraphQLClient');
describe('metadata', () => {
describe('7.0.0', () => {
before(() => esArchiver.load('infra/7.0.0/hosts'));
after(() => esArchiver.unload('infra/7.0.0/hosts'));
describe('docker', () => {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
it('hosts', () => {
it('supports the metadata container query', () => {
return client
.query<MetadataQuery.Query>({
query: metadataQuery,
variables: {
sourceId: 'default',
nodeId: 'demo-stack-mysql-01',
nodeType: 'host',
nodeId: '242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a',
nodeType: 'container',
},
})
.then(resp => {
const metadata = resp.data.source.metadataByNode;
if (metadata) {
expect(metadata.features.length).to.be(14);
expect(metadata.name).to.equal('demo-stack-mysql-01');
expect(metadata.features.length).to.be(8);
expect(metadata.name).to.equal('docker-autodiscovery_nginx_1');
} else {
throw new Error('Metadata should never be empty');
}
@ -41,25 +41,25 @@ const metadataTests: KbnTestProvider = ({ getService }) => {
});
});
describe('6.6.0', () => {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
describe('hosts', () => {
before(() => esArchiver.load('infra/metrics_and_logs'));
after(() => esArchiver.unload('infra/metrics_and_logs'));
it('docker', () => {
it('supports the metadata container query', () => {
return client
.query<MetadataQuery.Query>({
query: metadataQuery,
variables: {
sourceId: 'default',
nodeId: '631f36a845514442b93c3fdd2dc91bcd8feb680b8ac5832c7fb8fdc167bb938e',
nodeType: 'container',
nodeId: 'demo-stack-nginx-01',
nodeType: 'host',
},
})
.then(resp => {
const metadata = resp.data.source.metadataByNode;
if (metadata) {
expect(metadata.features.length).to.be(10);
expect(metadata.name).to.equal('docker-autodiscovery_elasticsearch_1');
expect(metadata.features.length).to.be(14);
expect(metadata.name).to.equal('demo-stack-nginx-01');
} else {
throw new Error('Metadata should never be empty');
}

View file

@ -9,70 +9,106 @@ import { first, last } from 'lodash';
import { metricsQuery } from '../../../../plugins/infra/public/containers/metrics/metrics.gql_query';
import { MetricsQuery } from '../../../../plugins/infra/public/graphql/types';
import { KbnTestProvider } from './types';
import { DATES } from './constants';
const { min, max } = DATES['7.0.0'].hosts;
import { KbnTestProvider } from './types';
const metricTests: KbnTestProvider = ({ getService }) => {
const esArchiver = getService('esArchiver');
const client = getService('infraOpsGraphQLClient');
describe('metrics', () => {
before(() => esArchiver.load('infra/7.0.0/hosts'));
after(() => esArchiver.unload('infra/7.0.0/hosts'));
describe('docker', () => {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
it('should basically work', () => {
return client
.query<MetricsQuery.Query>({
query: metricsQuery,
variables: {
sourceId: 'default',
metrics: ['hostCpuUsage'],
timerange: {
to: max,
from: min,
interval: '>=1m',
it('should basically work', () => {
return client
.query<MetricsQuery.Query>({
query: metricsQuery,
variables: {
sourceId: 'default',
metrics: ['containerMemory'],
timerange: {
to: DATES['6.6.0'].docker.max,
from: DATES['6.6.0'].docker.min,
interval: '>=1m',
},
nodeId: '242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a',
nodeType: 'container',
},
nodeId: 'demo-stack-mysql-01',
nodeType: 'host',
},
})
.then(resp => {
const { metrics } = resp.data.source;
expect(metrics.length).to.equal(1);
const metric = first(metrics);
expect(metric).to.have.property('id', 'hostCpuUsage');
expect(metric).to.have.property('series');
const series = first(metric.series);
expect(series).to.have.property('id', 'user');
expect(series).to.have.property('data');
const datapoint = last(series.data);
expect(datapoint).to.have.property('timestamp', 1547571720000);
expect(datapoint).to.have.property('value', 0.0018333333333333333);
});
})
.then(resp => {
const { metrics } = resp.data.source;
expect(metrics.length).to.equal(1);
const metric = first(metrics);
expect(metric).to.have.property('id', 'containerMemory');
expect(metric).to.have.property('series');
const series = first(metric.series);
expect(series).to.have.property('id', 'memory');
expect(series).to.have.property('data');
const datapoint = last(series.data);
expect(datapoint).to.have.property('timestamp', 1547578980000);
expect(datapoint).to.have.property('value', 0.001);
});
});
});
it('should support multiple metrics', () => {
return client
.query<MetricsQuery.Query>({
query: metricsQuery,
variables: {
sourceId: 'default',
metrics: ['hostCpuUsage', 'hostLoad'],
timerange: {
to: max,
from: min,
interval: '>=1m',
describe('hosts', () => {
before(() => esArchiver.load('infra/metrics_and_logs'));
after(() => esArchiver.unload('infra/metrics_and_logs'));
it('should basically work', () => {
return client
.query<MetricsQuery.Query>({
query: metricsQuery,
variables: {
sourceId: 'default',
metrics: ['hostCpuUsage'],
timerange: {
to: 1539806283952,
from: 1539805341208,
interval: '>=1m',
},
nodeId: 'demo-stack-nginx-01',
nodeType: 'host',
},
nodeId: 'demo-stack-mysql-01',
nodeType: 'host',
},
})
.then(resp => {
const { metrics } = resp.data.source;
expect(metrics.length).to.equal(2);
});
})
.then(resp => {
const { metrics } = resp.data.source;
expect(metrics.length).to.equal(1);
const metric = first(metrics);
expect(metric).to.have.property('id', 'hostCpuUsage');
expect(metric).to.have.property('series');
const series = first(metric.series);
expect(series).to.have.property('id', 'user');
expect(series).to.have.property('data');
const datapoint = last(series.data);
expect(datapoint).to.have.property('timestamp', 1539806220000);
expect(datapoint).to.have.property('value', 0.0065);
});
});
it('should support multiple metrics', () => {
return client
.query<MetricsQuery.Query>({
query: metricsQuery,
variables: {
sourceId: 'default',
metrics: ['hostCpuUsage', 'hostLoad'],
timerange: {
to: 1539806283952,
from: 1539805341208,
interval: '>=1m',
},
nodeId: 'demo-stack-nginx-01',
nodeType: 'host',
},
})
.then(resp => {
const { metrics } = resp.data.source;
expect(metrics.length).to.equal(2);
});
});
});
});
};

View file

@ -37,7 +37,7 @@ const sourcesTests: KbnTestProvider = ({ getService }) => {
expect(sourceConfiguration.name).to.be('Default');
expect(sourceConfiguration.metricAlias).to.be('metricbeat-*');
expect(sourceConfiguration.logAlias).to.be('filebeat-*,kibana_sample_data_logs*');
expect(sourceConfiguration.fields.container).to.be('container.id');
expect(sourceConfiguration.fields.container).to.be('docker.container.id');
expect(sourceConfiguration.fields.host).to.be('host.name');
expect(sourceConfiguration.fields.pod).to.be('kubernetes.pod.uid');
@ -108,7 +108,7 @@ const sourcesTests: KbnTestProvider = ({ getService }) => {
expect(configuration.description).to.be('');
expect(configuration.metricAlias).to.be('metricbeat-*');
expect(configuration.logAlias).to.be('filebeat-*,kibana_sample_data_logs*');
expect(configuration.fields.container).to.be('container.id');
expect(configuration.fields.container).to.be('docker.container.id');
expect(configuration.fields.host).to.be('host.name');
expect(configuration.fields.pod).to.be('kubernetes.pod.uid');
expect(configuration.fields.tiebreaker).to.be('_doc');

View file

@ -9,17 +9,15 @@ import { first, last } from 'lodash';
import { waffleNodesQuery } from '../../../../plugins/infra/public/containers/waffle/waffle_nodes.gql_query';
import { WaffleNodesQuery } from '../../../../plugins/infra/public/graphql/types';
import { KbnTestProvider } from './types';
import { DATES } from './constants';
import { KbnTestProvider } from './types';
const waffleTests: KbnTestProvider = ({ getService }) => {
const esArchiver = getService('esArchiver');
const client = getService('infraOpsGraphQLClient');
describe('waffle nodes', () => {
describe('6.6.0', () => {
const { min, max } = DATES['6.6.0'].docker;
describe('docker', () => {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
@ -30,11 +28,11 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
to: DATES['6.6.0'].docker.max,
from: DATES['6.6.0'].docker.min,
interval: '1m',
},
metric: { type: 'cpu' },
metric: { type: 'memory' },
path: [{ type: 'containers' }],
},
})
@ -57,10 +55,10 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
);
expect(firstNode).to.have.property('metric');
expect(firstNode.metric).to.eql({
name: 'cpu',
value: 0,
max: 0,
avg: 0,
name: 'memory',
value: 0.001,
avg: 0.0009444444444444449,
max: 0.001,
__typename: 'InfraNodeMetric',
});
}
@ -68,10 +66,9 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
});
});
describe('7.0.0', () => {
const { min, max } = DATES['7.0.0'].hosts;
before(() => esArchiver.load('infra/7.0.0/hosts'));
after(() => esArchiver.unload('infra/7.0.0/hosts'));
describe('hosts', () => {
before(() => esArchiver.load('infra/metrics_and_logs'));
after(() => esArchiver.unload('infra/metrics_and_logs'));
it('should basically work', () => {
return client
@ -80,8 +77,8 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
to: 1539806283952,
from: 1539805341208,
interval: '1m',
},
metric: { type: 'cpu' },
@ -93,18 +90,17 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
expect(nodes.length).to.equal(1);
expect(nodes.length).to.equal(6);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(1);
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-apache-01');
expect(firstNode).to.have.property('metric');
expect(firstNode.metric).to.eql({
name: 'cpu',
value: 0.0035,
avg: 0.009066666666666666,
max: 0.0684,
value: 0.011,
avg: 0.012215686274509805,
max: 0.020999999999999998,
__typename: 'InfraNodeMetric',
});
}
@ -118,12 +114,12 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
to: 1539806283952,
from: 1539805341208,
interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'terms', field: 'cloud.availability_zone' }, { type: 'hosts' }],
path: [{ type: 'terms', field: 'meta.cloud.availability_zone' }, { type: 'hosts' }],
},
})
.then(resp => {
@ -131,12 +127,15 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
expect(nodes.length).to.equal(1);
expect(nodes.length).to.equal(6);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(2);
expect(first(firstNode.path)).to.have.property('value', 'virtualbox');
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
expect(first(firstNode.path)).to.have.property(
'value',
'projects/189716325846/zones/us-central1-f'
);
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-apache-01');
}
});
});
@ -148,14 +147,14 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
to: 1539806283952,
from: 1539805341208,
interval: '1m',
},
metric: { type: 'cpu' },
path: [
{ type: 'terms', field: 'cloud.provider' },
{ type: 'terms', field: 'cloud.availability_zone' },
{ type: 'terms', field: 'meta.cloud.provider' },
{ type: 'terms', field: 'meta.cloud.availability_zone' },
{ type: 'hosts' },
],
},
@ -165,13 +164,12 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
expect(nodes.length).to.equal(1);
expect(nodes.length).to.equal(6);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(3);
expect(first(firstNode.path)).to.have.property('value', 'vagrant');
expect(firstNode.path[1]).to.have.property('value', 'virtualbox');
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
expect(first(firstNode.path)).to.have.property('value', 'gce');
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-apache-01');
}
});
});

File diff suppressed because it is too large Load diff