[Infra UI] ECS Migration (#28205) (#29965)

* updating fields

* Migrate to ESC event.dataset

* Migragte fields to ECS fields

* renaming variable

* Reverting back to host.name

* Changing from Top Hits to Terms Agg for getting node name; change host.name back to host.hostname for name.

* Changing back to host.name

* Moving from using the document source to an aggregation for node name

* Updating tests with new data format and data.

* removing unused fields

* adding test data for docker 6.6.0

* Adding docker tests for 6.6.0

* Fixing jest tests

* Fixing tests

* Adding the most critical line of code in the entire project

* Fix ECS-compatible apache rules and restore old ones

* Fix ECS-compatible nginx rules and restore old ones

* Add tests for apache2 in ECS and pre-ECS format

* Add tests for nginx in ECS and pre-ECS format

* removing console.log

* Fixing tests
This commit is contained in:
Chris Cowan 2019-02-04 10:31:54 -07:00 committed by GitHub
parent d8efde6907
commit fe9748583e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 45158 additions and 391 deletions

View file

@ -47,16 +47,16 @@ const getOptions = (
[InfraNodeType.pod]: ['kubernetes.namespace', 'kubernetes.node.name'].map(mapFieldToOption), [InfraNodeType.pod]: ['kubernetes.namespace', 'kubernetes.node.name'].map(mapFieldToOption),
[InfraNodeType.container]: [ [InfraNodeType.container]: [
'host.name', 'host.name',
'meta.cloud.availability_zone', 'cloud.availability_zone',
'meta.cloud.machine_type', 'cloud.machine_type',
'meta.cloud.project_id', 'cloud.project_id',
'meta.cloud.provider', 'cloud.provider',
].map(mapFieldToOption), ].map(mapFieldToOption),
[InfraNodeType.host]: [ [InfraNodeType.host]: [
'meta.cloud.availability_zone', 'cloud.availability_zone',
'meta.cloud.machine_type', 'cloud.machine_type',
'meta.cloud.project_id', 'cloud.project_id',
'meta.cloud.provider', 'cloud.provider',
].map(mapFieldToOption), ].map(mapFieldToOption),
}; };
} }

View file

@ -157,9 +157,6 @@ export interface InfraDateRangeAggregationResponse {
export interface InfraMetadataAggregationBucket { export interface InfraMetadataAggregationBucket {
key: string; key: string;
names?: {
buckets: InfraMetadataAggregationBucket[];
};
} }
export interface InfraMetadataAggregationResponse { export interface InfraMetadataAggregationResponse {

View file

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
import { first, get } from 'lodash'; import { get } from 'lodash';
import { InfraSourceConfiguration } from '../../sources'; import { InfraSourceConfiguration } from '../../sources';
import { import {
InfraBackendFrameworkAdapter, InfraBackendFrameworkAdapter,
@ -37,22 +37,19 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
}, },
}, },
}, },
size: 1, size: 0,
_source: [NAME_FIELDS[nodeType]],
aggs: { aggs: {
nodeName: {
terms: {
field: NAME_FIELDS[nodeType],
size: 1,
},
},
metrics: { metrics: {
terms: { terms: {
field: 'metricset.module', field: 'event.dataset',
size: 1000, size: 1000,
}, },
aggs: {
names: {
terms: {
field: 'metricset.name',
size: 1000,
},
},
},
}, },
}, },
}, },
@ -60,7 +57,7 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
const response = await this.framework.callWithRequest< const response = await this.framework.callWithRequest<
any, any,
{ metrics?: InfraMetadataAggregationResponse } { metrics?: InfraMetadataAggregationResponse; nodeName?: InfraMetadataAggregationResponse }
>(req, 'search', metricQuery); >(req, 'search', metricQuery);
const buckets = const buckets =
@ -68,11 +65,9 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
? response.aggregations.metrics.buckets ? response.aggregations.metrics.buckets
: []; : [];
const sampleDoc = first(response.hits.hits);
return { return {
id: nodeId, id: nodeId,
name: get(sampleDoc, `_source.${NAME_FIELDS[nodeType]}`), name: get(response, ['aggregations', 'nodeName', 'buckets', 0, 'key'], nodeId),
buckets, buckets,
}; };
} }
@ -94,22 +89,19 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
}, },
}, },
}, },
size: 1, size: 0,
_source: [NAME_FIELDS[nodeType]],
aggs: { aggs: {
nodeName: {
terms: {
field: NAME_FIELDS[nodeType],
size: 1,
},
},
metrics: { metrics: {
terms: { terms: {
field: 'fileset.module', field: 'event.dataset',
size: 1000, size: 1000,
}, },
aggs: {
names: {
terms: {
field: 'fileset.name',
size: 1000,
},
},
},
}, },
}, },
}, },
@ -117,7 +109,7 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
const response = await this.framework.callWithRequest< const response = await this.framework.callWithRequest<
any, any,
{ metrics?: InfraMetadataAggregationResponse } { metrics?: InfraMetadataAggregationResponse; nodeName?: InfraMetadataAggregationResponse }
>(req, 'search', logQuery); >(req, 'search', logQuery);
const buckets = const buckets =
@ -125,11 +117,9 @@ export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
? response.aggregations.metrics.buckets ? response.aggregations.metrics.buckets
: []; : [];
const sampleDoc = first(response.hits.hits);
return { return {
id: nodeId, id: nodeId,
name: get(sampleDoc, `_source.${NAME_FIELDS[nodeType]}`), name: get(response, ['aggregations', 'nodeName', 'buckets', 0, 'key'], nodeId),
buckets, buckets,
}; };
} }

View file

@ -23,7 +23,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
}, },
], ],
split_mode: 'filter', split_mode: 'filter',
filter: 'nginx.access.response_code:[200 TO 299]', filter: 'http.response.status_code:[200 TO 299]',
}, },
{ {
id: '300s', id: '300s',
@ -34,7 +34,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
}, },
], ],
split_mode: 'filter', split_mode: 'filter',
filter: 'nginx.access.response_code:[300 TO 399]', filter: 'http.response.status_code:[300 TO 399]',
}, },
{ {
id: '400s', id: '400s',
@ -45,7 +45,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
}, },
], ],
split_mode: 'filter', split_mode: 'filter',
filter: 'nginx.access.response_code:[400 TO 499]', filter: 'http.response.status_code:[400 TO 499]',
}, },
{ {
id: '500s', id: '500s',
@ -56,7 +56,7 @@ export const nginxHits: InfraMetricModelCreator = (timeField, indexPattern, inte
}, },
], ],
split_mode: 'filter', split_mode: 'filter',
filter: 'nginx.access.response_code:[500 TO 599]', filter: 'http.response.status_code:[500 TO 599]',
}, },
], ],
}); });

View file

@ -11,5 +11,5 @@ export const NODE_REQUEST_PARTITION_FACTOR = 1.2;
export const NAME_FIELDS = { export const NAME_FIELDS = {
[InfraNodeType.host]: 'host.name', [InfraNodeType.host]: 'host.name',
[InfraNodeType.pod]: 'kubernetes.pod.name', [InfraNodeType.pod]: 'kubernetes.pod.name',
[InfraNodeType.container]: 'docker.container.name', [InfraNodeType.container]: 'container.name',
}; };

View file

@ -9,7 +9,6 @@ import moment from 'moment';
import { InfraMetricType, InfraNode, InfraNodeMetric } from '../../../../graphql/types'; import { InfraMetricType, InfraNode, InfraNodeMetric } from '../../../../graphql/types';
import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types'; import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types';
import { NAME_FIELDS } from '../constants';
import { getBucketSizeInSeconds } from './get_bucket_size_in_seconds'; import { getBucketSizeInSeconds } from './get_bucket_size_in_seconds';
// TODO: Break these function into seperate files and expand beyond just documnet count // TODO: Break these function into seperate files and expand beyond just documnet count
@ -72,9 +71,9 @@ export function createNodeItem(
node: InfraBucket, node: InfraBucket,
bucket: InfraBucket bucket: InfraBucket
): InfraNode { ): InfraNode {
const nodeDoc = get(node, ['nodeDetails', 'hits', 'hits', 0]); const nodeDetails = get(node, ['nodeDetails', 'buckets', 0]);
return { return {
metric: createNodeMetrics(options, node, bucket), metric: createNodeMetrics(options, node, bucket),
path: [{ value: node.key, label: get(nodeDoc, `_source.${NAME_FIELDS[options.nodeType]}`) }], path: [{ value: node.key, label: get(nodeDetails, 'key', node.key) }],
} as InfraNode; } as InfraNode;
} }

View file

@ -26,7 +26,6 @@ const nodeTypeToField = (options: InfraProcesorRequestOptions): string => {
}; };
export const nodesProcessor = (options: InfraProcesorRequestOptions) => { export const nodesProcessor = (options: InfraProcesorRequestOptions) => {
const { fields } = options.nodeOptions.sourceConfiguration;
return (doc: InfraESSearchBody) => { return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc); const result = cloneDeep(doc);
const field = nodeTypeToField(options); const field = nodeTypeToField(options);
@ -43,10 +42,9 @@ export const nodesProcessor = (options: InfraProcesorRequestOptions) => {
set(result, 'aggs.waffle.aggs.nodes.aggs', { set(result, 'aggs.waffle.aggs.nodes.aggs', {
nodeDetails: { nodeDetails: {
top_hits: { terms: {
field: NAME_FIELDS[options.nodeType],
size: 1, size: 1,
_source: { includes: [NAME_FIELDS[options.nodeType]] },
sort: [{ [fields.timestamp]: { order: 'desc' } }],
}, },
}, },
}); });

View file

@ -8,102 +8,243 @@ import { compileFormattingRules } from '../message';
import { filebeatApache2Rules } from './filebeat_apache2'; import { filebeatApache2Rules } from './filebeat_apache2';
const { format } = compileFormattingRules(filebeatApache2Rules); const { format } = compileFormattingRules(filebeatApache2Rules);
describe('Filebeat Rules', () => { describe('Filebeat Rules', () => {
test('Apache2 Access', () => { describe('in ECS format', () => {
const event = { test('Apache2 Access', () => {
'apache2.access': true, const flattenedDocument = {
'apache2.access.remote_ip': '192.168.1.42', '@timestamp': '2016-12-26T16:22:13.000Z',
'apache2.access.user_name': 'admin', 'ecs.version': '1.0.0-beta2',
'apache2.access.method': 'GET', 'event.dataset': 'apache.access',
'apache2.access.url': '/faqs', 'event.module': 'apache',
'apache2.access.http_version': '1.1', 'fileset.name': 'access',
'apache2.access.response_code': '200', 'http.request.method': 'GET',
'apache2.access.body_sent.bytes': 1024, 'http.request.referrer': '-',
}; 'http.response.body.bytes': 499,
const message = format(event); 'http.response.status_code': 404,
expect(message).toEqual([ 'http.version': '1.1',
{ 'input.type': 'log',
constant: '[Apache][access] ', 'log.offset': 73,
}, 'service.type': 'apache',
{ 'source.address': '192.168.33.1',
field: 'apache2.access.remote_ip', 'source.ip': '192.168.33.1',
highlights: [], 'url.original': '/hello',
value: '192.168.1.42', 'user.name': '-',
}, 'user_agent.device': 'Other',
{ 'user_agent.major': '50',
constant: ' ', 'user_agent.minor': '0',
}, 'user_agent.name': 'Firefox',
{ 'user_agent.original':
field: 'apache2.access.user_name', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:50.0) Gecko/20100101 Firefox/50.0',
highlights: [], 'user_agent.os.full_name': 'Mac OS X 10.12',
value: 'admin', 'user_agent.os.major': '10',
}, 'user_agent.os.minor': '12',
{ 'user_agent.os.name': 'Mac OS X',
constant: ' "', };
},
{ expect(format(flattenedDocument)).toMatchInlineSnapshot(`
field: 'apache2.access.method', Array [
highlights: [], Object {
value: 'GET', "constant": "[Apache][access] ",
}, },
{ Object {
constant: ' ', "field": "source.ip",
}, "highlights": Array [],
{ "value": "192.168.33.1",
field: 'apache2.access.url', },
highlights: [], Object {
value: '/faqs', "constant": " ",
}, },
{ Object {
constant: ' HTTP/', "field": "user.name",
}, "highlights": Array [],
{ "value": "-",
field: 'apache2.access.http_version', },
highlights: [], Object {
value: '1.1', "constant": " \\"",
}, },
{ Object {
constant: '" ', "field": "http.request.method",
}, "highlights": Array [],
{ "value": "GET",
field: 'apache2.access.response_code', },
highlights: [], Object {
value: '200', "constant": " ",
}, },
{ Object {
constant: ' ', "field": "url.original",
}, "highlights": Array [],
{ "value": "/hello",
field: 'apache2.access.body_sent.bytes', },
highlights: [], Object {
value: '1024', "constant": " HTTP/",
}, },
]); Object {
"field": "http.version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "http.response.status_code",
"highlights": Array [],
"value": "404",
},
Object {
"constant": " ",
},
Object {
"field": "http.response.body.bytes",
"highlights": Array [],
"value": "499",
},
]
`);
});
test('Apache2 Error', () => {
const flattenedDocument = {
'@timestamp': '2016-12-26T16:22:08.000Z',
'ecs.version': '1.0.0-beta2',
'event.dataset': 'apache.error',
'event.module': 'apache',
'fileset.name': 'error',
'input.type': 'log',
'log.level': 'error',
'log.offset': 0,
message: 'File does not exist: /var/www/favicon.ico',
'service.type': 'apache',
'source.address': '192.168.33.1',
'source.ip': '192.168.33.1',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[Apache][",
},
Object {
"field": "log.level",
"highlights": Array [],
"value": "error",
},
Object {
"constant": "] ",
},
Object {
"field": "message",
"highlights": Array [],
"value": "File does not exist: /var/www/favicon.ico",
},
]
`);
});
}); });
test('Apache2 Error', () => {
const event = { describe('in pre-ECS format', () => {
'apache2.error.message': test('Apache2 Access', () => {
'AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations', const flattenedDocument = {
'apache2.error.level': 'notice', 'apache2.access': true,
}; 'apache2.access.remote_ip': '192.168.1.42',
const message = format(event); 'apache2.access.user_name': 'admin',
expect(message).toEqual([ 'apache2.access.method': 'GET',
{ 'apache2.access.url': '/faqs',
constant: '[Apache][', 'apache2.access.http_version': '1.1',
}, 'apache2.access.response_code': '200',
{ 'apache2.access.body_sent.bytes': 1024,
field: 'apache2.error.level', };
highlights: [],
value: 'notice', expect(format(flattenedDocument)).toMatchInlineSnapshot(`
}, Array [
{ Object {
constant: '] ', "constant": "[Apache][access] ",
}, },
{ Object {
field: 'apache2.error.message', "field": "apache2.access.remote_ip",
highlights: [], "highlights": Array [],
value: 'AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations', "value": "192.168.1.42",
}, },
]); Object {
"constant": " ",
},
Object {
"field": "apache2.access.user_name",
"highlights": Array [],
"value": "admin",
},
Object {
"constant": " \\"",
},
Object {
"field": "apache2.access.method",
"highlights": Array [],
"value": "GET",
},
Object {
"constant": " ",
},
Object {
"field": "apache2.access.url",
"highlights": Array [],
"value": "/faqs",
},
Object {
"constant": " HTTP/",
},
Object {
"field": "apache2.access.http_version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "apache2.access.response_code",
"highlights": Array [],
"value": "200",
},
Object {
"constant": " ",
},
Object {
"field": "apache2.access.body_sent.bytes",
"highlights": Array [],
"value": "1024",
},
]
`);
});
test('Apache2 Error', () => {
const flattenedDocument = {
'apache2.error.message':
'AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations',
'apache2.error.level': 'notice',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[Apache][",
},
Object {
"field": "apache2.error.level",
"highlights": Array [],
"value": "notice",
},
Object {
"constant": "] ",
},
Object {
"field": "apache2.error.message",
"highlights": Array [],
"value": "AH00489: Apache/2.4.18 (Ubuntu) configured -- resuming normal operations",
},
]
`);
});
}); });
}); });

View file

@ -6,6 +6,59 @@
export const filebeatApache2Rules = [ export const filebeatApache2Rules = [
{ {
// ECS
when: {
values: {
'event.dataset': 'apache.access',
},
},
format: [
{
constant: '[Apache][access] ',
},
{
field: 'source.ip',
},
{
constant: ' ',
},
{
field: 'user.name',
},
{
constant: ' "',
},
{
field: 'http.request.method',
},
{
constant: ' ',
},
{
field: 'url.original',
},
{
constant: ' HTTP/',
},
{
field: 'http.version',
},
{
constant: '" ',
},
{
field: 'http.response.status_code',
},
{
constant: ' ',
},
{
field: 'http.response.body.bytes',
},
],
},
{
// pre-ECS
when: { when: {
exists: ['apache2.access'], exists: ['apache2.access'],
}, },
@ -55,6 +108,29 @@ export const filebeatApache2Rules = [
], ],
}, },
{ {
// ECS
when: {
values: {
'event.dataset': 'apache.error',
},
},
format: [
{
constant: '[Apache][',
},
{
field: 'log.level',
},
{
constant: '] ',
},
{
field: 'message',
},
],
},
{
// pre-ECS
when: { when: {
exists: ['apache2.error.message'], exists: ['apache2.error.message'],
}, },

View file

@ -8,106 +8,249 @@ import { compileFormattingRules } from '../message';
import { filebeatNginxRules } from './filebeat_nginx'; import { filebeatNginxRules } from './filebeat_nginx';
const { format } = compileFormattingRules(filebeatNginxRules); const { format } = compileFormattingRules(filebeatNginxRules);
describe('Filebeat Rules', () => { describe('Filebeat Rules', () => {
test('Nginx Access Rule', () => { describe('in ECS format', () => {
const event = { test('Nginx Access', () => {
'nginx.access': true, const flattenedDocument = {
'nginx.access.remote_ip': '192.168.1.42', '@timestamp': '2017-05-29T19:02:48.000Z',
'nginx.access.user_name': 'admin', 'ecs.version': '1.0.0-beta2',
'nginx.access.method': 'GET', 'event.dataset': 'nginx.access',
'nginx.access.url': '/faq', 'event.module': 'nginx',
'nginx.access.http_version': '1.1', 'fileset.name': 'access',
'nginx.access.body_sent.bytes': 1024, 'http.request.method': 'GET',
'nginx.access.response_code': 200, 'http.request.referrer': '-',
}; 'http.response.body.bytes': 612,
const message = format(event); 'http.response.status_code': 404,
expect(message).toEqual([ 'http.version': '1.1',
{ 'input.type': 'log',
constant: '[Nginx][access] ', 'log.offset': 183,
}, 'service.type': 'nginx',
{ 'source.ip': '172.17.0.1',
field: 'nginx.access.remote_ip', 'url.original': '/stringpatch',
highlights: [], 'user.name': '-',
value: '192.168.1.42', 'user_agent.device': 'Other',
}, 'user_agent.major': '15',
{ 'user_agent.minor': '0',
constant: ' ', 'user_agent.name': 'Firefox Alpha',
}, 'user_agent.original':
{ 'Mozilla/5.0 (Windows NT 6.1; rv:15.0) Gecko/20120716 Firefox/15.0a2',
field: 'nginx.access.user_name', 'user_agent.os.full_name': 'Windows 7',
highlights: [], 'user_agent.os.name': 'Windows 7',
value: 'admin', 'user_agent.patch': 'a2',
}, };
{
constant: ' "', expect(format(flattenedDocument)).toMatchInlineSnapshot(`
}, Array [
{ Object {
field: 'nginx.access.method', "constant": "[Nginx][access] ",
highlights: [], },
value: 'GET', Object {
}, "field": "source.ip",
{ "highlights": Array [],
constant: ' ', "value": "172.17.0.1",
}, },
{ Object {
field: 'nginx.access.url', "constant": " ",
highlights: [], },
value: '/faq', Object {
}, "field": "user.name",
{ "highlights": Array [],
constant: ' HTTP/', "value": "-",
}, },
{ Object {
field: 'nginx.access.http_version', "constant": " \\"",
highlights: [], },
value: '1.1', Object {
}, "field": "http.request.method",
{ "highlights": Array [],
constant: '" ', "value": "GET",
}, },
{ Object {
field: 'nginx.access.response_code', "constant": " ",
highlights: [], },
value: '200', Object {
}, "field": "url.original",
{ "highlights": Array [],
constant: ' ', "value": "/stringpatch",
}, },
{ Object {
field: 'nginx.access.body_sent.bytes', "constant": " HTTP/",
highlights: [], },
value: '1024', Object {
}, "field": "http.version",
]); "highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "http.response.status_code",
"highlights": Array [],
"value": "404",
},
Object {
"constant": " ",
},
Object {
"field": "http.response.body.bytes",
"highlights": Array [],
"value": "612",
},
]
`);
});
test('Nginx Error', () => {
const flattenedDocument = {
'@timestamp': '2016-10-25T14:49:34.000Z',
'ecs.version': '1.0.0-beta2',
'event.dataset': 'nginx.error',
'event.module': 'nginx',
'fileset.name': 'error',
'input.type': 'log',
'log.level': 'error',
'log.offset': 0,
message:
'open() "/usr/local/Cellar/nginx/1.10.2_1/html/favicon.ico" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: "GET /favicon.ico HTTP/1.1", host: "localhost:8080", referrer: "http://localhost:8080/"',
'nginx.error.connection_id': 1,
'process.pid': 54053,
'process.thread.id': 0,
'service.type': 'nginx',
};
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[Nginx]",
},
Object {
"constant": "[",
},
Object {
"field": "log.level",
"highlights": Array [],
"value": "error",
},
Object {
"constant": "] ",
},
Object {
"field": "message",
"highlights": Array [],
"value": "open() \\"/usr/local/Cellar/nginx/1.10.2_1/html/favicon.ico\\" failed (2: No such file or directory), client: 127.0.0.1, server: localhost, request: \\"GET /favicon.ico HTTP/1.1\\", host: \\"localhost:8080\\", referrer: \\"http://localhost:8080/\\"",
},
]
`);
});
}); });
test('Nginx Access Rule', () => {
const event = { describe('in pre-ECS format', () => {
'nginx.error.message': test('Nginx Access', () => {
'connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: "GET /php-status?json= HTTP/1.1", upstream: "fastcgi://[::1]:9000", host: "localhost"', const flattenedDocument = {
'nginx.error.level': 'error', 'nginx.access': true,
}; 'nginx.access.remote_ip': '192.168.1.42',
const message = format(event); 'nginx.access.user_name': 'admin',
expect(message).toEqual([ 'nginx.access.method': 'GET',
{ 'nginx.access.url': '/faq',
constant: '[Nginx]', 'nginx.access.http_version': '1.1',
}, 'nginx.access.body_sent.bytes': 1024,
{ 'nginx.access.response_code': 200,
constant: '[', };
},
{ expect(format(flattenedDocument)).toMatchInlineSnapshot(`
field: 'nginx.error.level', Array [
highlights: [], Object {
value: 'error', "constant": "[Nginx][access] ",
}, },
{ Object {
constant: '] ', "field": "nginx.access.remote_ip",
}, "highlights": Array [],
{ "value": "192.168.1.42",
field: 'nginx.error.message', },
highlights: [], Object {
value: "constant": " ",
},
Object {
"field": "nginx.access.user_name",
"highlights": Array [],
"value": "admin",
},
Object {
"constant": " \\"",
},
Object {
"field": "nginx.access.method",
"highlights": Array [],
"value": "GET",
},
Object {
"constant": " ",
},
Object {
"field": "nginx.access.url",
"highlights": Array [],
"value": "/faq",
},
Object {
"constant": " HTTP/",
},
Object {
"field": "nginx.access.http_version",
"highlights": Array [],
"value": "1.1",
},
Object {
"constant": "\\" ",
},
Object {
"field": "nginx.access.response_code",
"highlights": Array [],
"value": "200",
},
Object {
"constant": " ",
},
Object {
"field": "nginx.access.body_sent.bytes",
"highlights": Array [],
"value": "1024",
},
]
`);
});
test('Nginx Error', () => {
const flattenedDocument = {
'nginx.error.message':
'connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: "GET /php-status?json= HTTP/1.1", upstream: "fastcgi://[::1]:9000", host: "localhost"', 'connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: "GET /php-status?json= HTTP/1.1", upstream: "fastcgi://[::1]:9000", host: "localhost"',
}, 'nginx.error.level': 'error',
]); };
expect(format(flattenedDocument)).toMatchInlineSnapshot(`
Array [
Object {
"constant": "[Nginx]",
},
Object {
"constant": "[",
},
Object {
"field": "nginx.error.level",
"highlights": Array [],
"value": "error",
},
Object {
"constant": "] ",
},
Object {
"field": "nginx.error.message",
"highlights": Array [],
"value": "connect() failed (111: Connection refused) while connecting to upstream, client: 127.0.0.1, server: localhost, request: \\"GET /php-status?json= HTTP/1.1\\", upstream: \\"fastcgi://[::1]:9000\\", host: \\"localhost\\"",
},
]
`);
});
}); });
}); });

View file

@ -6,6 +6,59 @@
export const filebeatNginxRules = [ export const filebeatNginxRules = [
{ {
// ECS
when: {
values: {
'event.dataset': 'nginx.access',
},
},
format: [
{
constant: '[Nginx][access] ',
},
{
field: 'source.ip',
},
{
constant: ' ',
},
{
field: 'user.name',
},
{
constant: ' "',
},
{
field: 'http.request.method',
},
{
constant: ' ',
},
{
field: 'url.original',
},
{
constant: ' HTTP/',
},
{
field: 'http.version',
},
{
constant: '" ',
},
{
field: 'http.response.status_code',
},
{
constant: ' ',
},
{
field: 'http.response.body.bytes',
},
],
},
{
// pre-ECS
when: { when: {
exists: ['nginx.access'], exists: ['nginx.access'],
}, },
@ -55,6 +108,32 @@ export const filebeatNginxRules = [
], ],
}, },
{ {
// ECS
when: {
values: {
'event.dataset': 'nginx.error',
},
},
format: [
{
constant: '[Nginx]',
},
{
constant: '[',
},
{
field: 'log.level',
},
{
constant: '] ',
},
{
field: 'message',
},
],
},
{
// pre-ECS
when: { when: {
exists: ['nginx.error.message'], exists: ['nginx.error.message'],
}, },

View file

@ -23,14 +23,14 @@ export const builtinRules = [
...genericRules, ...genericRules,
{ {
when: { when: {
exists: ['source'], exists: ['log.path'],
}, },
format: [ format: [
{ {
constant: 'failed to format message from ', constant: 'failed to format message from ',
}, },
{ {
field: 'source', field: 'log.path',
}, },
], ],
}, },

View file

@ -43,17 +43,7 @@ export class InfraMetadataDomain {
const pickMetadata = (buckets: InfraMetadataAggregationBucket[]): string[] => { const pickMetadata = (buckets: InfraMetadataAggregationBucket[]): string[] => {
if (buckets) { if (buckets) {
const metadata = buckets const metadata = buckets.map(bucket => bucket.key);
.map(module => {
if (module.names) {
return module.names.buckets.map(name => {
return `${module.key}.${name.key}`;
});
} else {
return [];
}
})
.reduce((a: string[], b: string[]) => a.concat(b), []);
return metadata; return metadata;
} else { } else {
return []; return [];

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export const DATES = {
'7.0.0': {
hosts: {
min: 1547571261002,
max: 1547571831033,
},
},
'6.6.0': {
docker: {
min: 1547578132289,
max: 1547579090048,
},
},
};

View file

@ -4,7 +4,6 @@
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
export default function ({ loadTestFile }) { export default function ({ loadTestFile }) {
describe('InfraOps GraphQL Endpoints', () => { describe('InfraOps GraphQL Endpoints', () => {
loadTestFile(require.resolve('./metadata')); loadTestFile(require.resolve('./metadata'));

View file

@ -15,28 +15,56 @@ const metadataTests: KbnTestProvider = ({ getService }) => {
const client = getService('infraOpsGraphQLClient'); const client = getService('infraOpsGraphQLClient');
describe('metadata', () => { describe('metadata', () => {
before(() => esArchiver.load('infra/metrics_and_logs')); describe('7.0.0', () => {
after(() => esArchiver.unload('infra/metrics_and_logs')); before(() => esArchiver.load('infra/7.0.0/hosts'));
after(() => esArchiver.unload('infra/7.0.0/hosts'));
it('supports the metadata container query', () => { it('hosts', () => {
return client return client
.query<MetadataQuery.Query>({ .query<MetadataQuery.Query>({
query: metadataQuery, query: metadataQuery,
variables: { variables: {
sourceId: 'default', sourceId: 'default',
nodeId: 'demo-stack-nginx-01', nodeId: 'demo-stack-mysql-01',
nodeType: 'host', nodeType: 'host',
}, },
}) })
.then(resp => { .then(resp => {
const metadata = resp.data.source.metadataByNode; const metadata = resp.data.source.metadataByNode;
if (metadata) { if (metadata) {
expect(metadata.features.length).to.be(14); expect(metadata.features.length).to.be(14);
expect(metadata.name).to.equal('demo-stack-nginx-01'); expect(metadata.name).to.equal('demo-stack-mysql-01');
} else { } else {
throw new Error('Metadata should never be empty'); throw new Error('Metadata should never be empty');
} }
}); });
});
});
describe('6.6.0', () => {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
it('docker', () => {
return client
.query<MetadataQuery.Query>({
query: metadataQuery,
variables: {
sourceId: 'default',
nodeId: '631f36a845514442b93c3fdd2dc91bcd8feb680b8ac5832c7fb8fdc167bb938e',
nodeType: 'container',
},
})
.then(resp => {
const metadata = resp.data.source.metadataByNode;
if (metadata) {
expect(metadata.features.length).to.be(10);
expect(metadata.name).to.equal('docker-autodiscovery_elasticsearch_1');
} else {
throw new Error('Metadata should never be empty');
}
});
});
}); });
}); });
}; };

View file

@ -11,13 +11,16 @@ import { metricsQuery } from '../../../../plugins/infra/public/containers/metric
import { MetricsQuery } from '../../../../plugins/infra/public/graphql/types'; import { MetricsQuery } from '../../../../plugins/infra/public/graphql/types';
import { KbnTestProvider } from './types'; import { KbnTestProvider } from './types';
import { DATES } from './constants';
const { min, max } = DATES['7.0.0'].hosts;
const metricTests: KbnTestProvider = ({ getService }) => { const metricTests: KbnTestProvider = ({ getService }) => {
const esArchiver = getService('esArchiver'); const esArchiver = getService('esArchiver');
const client = getService('infraOpsGraphQLClient'); const client = getService('infraOpsGraphQLClient');
describe('metrics', () => { describe('metrics', () => {
before(() => esArchiver.load('infra/metrics_and_logs')); before(() => esArchiver.load('infra/7.0.0/hosts'));
after(() => esArchiver.unload('infra/metrics_and_logs')); after(() => esArchiver.unload('infra/7.0.0/hosts'));
it('should basically work', () => { it('should basically work', () => {
return client return client
@ -27,11 +30,11 @@ const metricTests: KbnTestProvider = ({ getService }) => {
sourceId: 'default', sourceId: 'default',
metrics: ['hostCpuUsage'], metrics: ['hostCpuUsage'],
timerange: { timerange: {
to: 1539806283952, to: max,
from: 1539805341208, from: min,
interval: '>=1m', interval: '>=1m',
}, },
nodeId: 'demo-stack-nginx-01', nodeId: 'demo-stack-mysql-01',
nodeType: 'host', nodeType: 'host',
}, },
}) })
@ -45,8 +48,8 @@ const metricTests: KbnTestProvider = ({ getService }) => {
expect(series).to.have.property('id', 'user'); expect(series).to.have.property('id', 'user');
expect(series).to.have.property('data'); expect(series).to.have.property('data');
const datapoint = last(series.data); const datapoint = last(series.data);
expect(datapoint).to.have.property('timestamp', 1539806220000); expect(datapoint).to.have.property('timestamp', 1547571720000);
expect(datapoint).to.have.property('value', 0.0065); expect(datapoint).to.have.property('value', 0.0018333333333333333);
}); });
}); });
@ -58,11 +61,11 @@ const metricTests: KbnTestProvider = ({ getService }) => {
sourceId: 'default', sourceId: 'default',
metrics: ['hostCpuUsage', 'hostLoad'], metrics: ['hostCpuUsage', 'hostLoad'],
timerange: { timerange: {
to: 1539806283952, to: max,
from: 1539805341208, from: min,
interval: '>=1m', interval: '>=1m',
}, },
nodeId: 'demo-stack-nginx-01', nodeId: 'demo-stack-mysql-01',
nodeType: 'host', nodeType: 'host',
}, },
}) })

View file

@ -11,116 +11,170 @@ import { waffleNodesQuery } from '../../../../plugins/infra/public/containers/wa
import { WaffleNodesQuery } from '../../../../plugins/infra/public/graphql/types'; import { WaffleNodesQuery } from '../../../../plugins/infra/public/graphql/types';
import { KbnTestProvider } from './types'; import { KbnTestProvider } from './types';
import { DATES } from './constants';
const waffleTests: KbnTestProvider = ({ getService }) => { const waffleTests: KbnTestProvider = ({ getService }) => {
const esArchiver = getService('esArchiver'); const esArchiver = getService('esArchiver');
const client = getService('infraOpsGraphQLClient'); const client = getService('infraOpsGraphQLClient');
describe('waffle nodes', () => { describe('waffle nodes', () => {
before(() => esArchiver.load('infra/metrics_and_logs')); describe('6.6.0', () => {
after(() => esArchiver.unload('infra/metrics_and_logs')); const { min, max } = DATES['6.6.0'].docker;
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
it('should basically work', () => { it('should basically work', () => {
return client return client
.query<WaffleNodesQuery.Query>({ .query<WaffleNodesQuery.Query>({
query: waffleNodesQuery, query: waffleNodesQuery,
variables: { variables: {
sourceId: 'default', sourceId: 'default',
timerange: { timerange: {
to: 1539806283952, to: max,
from: 1539805341208, from: min,
interval: '1m', interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'containers' }],
}, },
metric: { type: 'cpu' }, })
path: [{ type: 'hosts' }], .then(resp => {
}, const { map } = resp.data.source;
}) expect(map).to.have.property('nodes');
.then(resp => { if (map) {
const { map } = resp.data.source; const { nodes } = map;
expect(map).to.have.property('nodes'); expect(nodes.length).to.equal(5);
if (map) { const firstNode = first(nodes);
const { nodes } = map; expect(firstNode).to.have.property('path');
expect(nodes.length).to.equal(6); expect(firstNode.path.length).to.equal(1);
const firstNode = first(nodes); expect(first(firstNode.path)).to.have.property(
expect(firstNode).to.have.property('path'); 'value',
expect(firstNode.path.length).to.equal(1); '242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a'
expect(first(firstNode.path)).to.have.property('value', 'demo-stack-apache-01'); );
expect(firstNode).to.have.property('metric'); expect(first(firstNode.path)).to.have.property(
expect(firstNode.metric).to.eql({ 'label',
name: 'cpu', 'docker-autodiscovery_nginx_1'
value: 0.011, );
avg: 0.012215686274509805, expect(firstNode).to.have.property('metric');
max: 0.020999999999999998, expect(firstNode.metric).to.eql({
__typename: 'InfraNodeMetric', name: 'cpu',
}); value: 0,
} max: 0,
}); avg: 0,
__typename: 'InfraNodeMetric',
});
}
});
});
}); });
it('should basically work with 1 grouping', () => { describe('7.0.0', () => {
return client const { min, max } = DATES['7.0.0'].hosts;
.query<WaffleNodesQuery.Query>({ before(() => esArchiver.load('infra/7.0.0/hosts'));
query: waffleNodesQuery, after(() => esArchiver.unload('infra/7.0.0/hosts'));
variables: {
sourceId: 'default',
timerange: {
to: 1539806283952,
from: 1539805341208,
interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'terms', field: 'meta.cloud.availability_zone' }, { type: 'hosts' }],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
expect(nodes.length).to.equal(6);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(2);
expect(first(firstNode.path)).to.have.property(
'value',
'projects/189716325846/zones/us-central1-f'
);
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-apache-01');
}
});
});
it('should basically work with 2 grouping', () => { it('should basically work', () => {
return client return client
.query<WaffleNodesQuery.Query>({ .query<WaffleNodesQuery.Query>({
query: waffleNodesQuery, query: waffleNodesQuery,
variables: { variables: {
sourceId: 'default', sourceId: 'default',
timerange: { timerange: {
to: 1539806283952, to: max,
from: 1539805341208, from: min,
interval: '1m', interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'hosts' }],
}, },
metric: { type: 'cpu' }, })
path: [ .then(resp => {
{ type: 'terms', field: 'meta.cloud.provider' }, const { map } = resp.data.source;
{ type: 'terms', field: 'meta.cloud.availability_zone' }, expect(map).to.have.property('nodes');
{ type: 'hosts' }, if (map) {
], const { nodes } = map;
}, expect(nodes.length).to.equal(1);
}) const firstNode = first(nodes);
.then(resp => { expect(firstNode).to.have.property('path');
const { map } = resp.data.source; expect(firstNode.path.length).to.equal(1);
expect(map).to.have.property('nodes'); expect(first(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
if (map) { expect(first(firstNode.path)).to.have.property('label', 'demo-stack-mysql-01');
const { nodes } = map; expect(firstNode).to.have.property('metric');
expect(nodes.length).to.equal(6); expect(firstNode.metric).to.eql({
const firstNode = first(nodes); name: 'cpu',
expect(firstNode).to.have.property('path'); value: 0.0035,
expect(firstNode.path.length).to.equal(3); avg: 0.009066666666666666,
expect(first(firstNode.path)).to.have.property('value', 'gce'); max: 0.0684,
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-apache-01'); __typename: 'InfraNodeMetric',
} });
}); }
});
});
it('should basically work with 1 grouping', () => {
return client
.query<WaffleNodesQuery.Query>({
query: waffleNodesQuery,
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'terms', field: 'cloud.availability_zone' }, { type: 'hosts' }],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
expect(nodes.length).to.equal(1);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(2);
expect(first(firstNode.path)).to.have.property('value', 'virtualbox');
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
}
});
});
it('should basically work with 2 grouping', () => {
return client
.query<WaffleNodesQuery.Query>({
query: waffleNodesQuery,
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
interval: '1m',
},
metric: { type: 'cpu' },
path: [
{ type: 'terms', field: 'cloud.provider' },
{ type: 'terms', field: 'cloud.availability_zone' },
{ type: 'hosts' },
],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
expect(nodes.length).to.equal(1);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(3);
expect(first(firstNode.path)).to.have.property('value', 'vagrant');
expect(firstNode.path[1]).to.have.property('value', 'virtualbox');
expect(last(firstNode.path)).to.have.property('value', 'demo-stack-mysql-01');
}
});
});
}); });
}); });
}; };

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff