mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
## Summary This PR updates the Events Table with the following columns: * Module/Dataset * Event Category * User * Message And removes the Location column. Each column has also been updated to use the `getRowItemDraggables()` helper, so they're now draggable to the timeline as well. The Host column now also links out to HostDetails. Reference PR: https://github.com/elastic/kibana/pull/33952 Issue: https://github.com/elastic/ingest-dev/issues/426  ### Checklist Use ~~strikethroughs~~ to remove checklist items you don't feel are applicable to this PR. - [ ] ~This was checked for cross-browser compatibility, [including a check against IE11](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#cross-browser-compatibility)~ - [x] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/master/packages/kbn-i18n/README.md) - [ ] ~[Documentation](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#writing-documentation) was added for features that require explanation or tutorials~ - [x] [Unit or functional tests](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#cross-browser-compatibility) were updated or added to match the most common scenarios - [ ] ~This was checked for [keyboard-only and screenreader accessibility](https://developer.mozilla.org/en-US/docs/Learn/Tools_and_testing/Cross_browser_testing/Accessibility#Accessibility_testing_checklist)~ ### For maintainers - [ ] ~This was checked for breaking API changes and was [labeled appropriately](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#release-notes-process)~ - [ ] ~This includes a feature addition or change that requires a release note and was [labeled appropriately](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#release-notes-process)~
This commit is contained in:
parent
e142ecd6c1
commit
adb433530e
10 changed files with 268 additions and 1851 deletions
|
@ -5,80 +5,134 @@ exports[`Load More Events Table Component rendering it renders the default Event
|
|||
data={
|
||||
Array [
|
||||
Object {
|
||||
"_id": "Fo8nKWgBiyhPd5Zo3cib",
|
||||
"_index": "auditbeat-7.0.0-2019.01.07",
|
||||
"_id": "aXSdmmoB9v5HJNSHzQ0Y",
|
||||
"_index": "filebeat-8.0.0-2019.04.20-000003",
|
||||
"destination": Object {
|
||||
"ip": Array [
|
||||
"24.168.54.169",
|
||||
"10.0.0.111",
|
||||
],
|
||||
"port": Array [
|
||||
62123,
|
||||
37140,
|
||||
],
|
||||
},
|
||||
"event": Object {
|
||||
"action": null,
|
||||
"category": null,
|
||||
"dataset": Array [
|
||||
"suricata.eve",
|
||||
],
|
||||
"id": null,
|
||||
"module": Array [
|
||||
"system",
|
||||
"suricata",
|
||||
],
|
||||
"severity": Array [
|
||||
3,
|
||||
],
|
||||
"severity": null,
|
||||
"type": null,
|
||||
},
|
||||
"geo": null,
|
||||
"host": Object {
|
||||
"id": Array [
|
||||
"aa7ca589f1b8220002f2fc61c64cfbf1",
|
||||
],
|
||||
"ip": null,
|
||||
"name": Array [
|
||||
"siem-general",
|
||||
"siem-kibana",
|
||||
],
|
||||
},
|
||||
"message": Array [
|
||||
"Generic Protocol Command Decode",
|
||||
],
|
||||
"source": Object {
|
||||
"ip": Array [
|
||||
"10.142.0.6",
|
||||
"10.47.2.208",
|
||||
],
|
||||
"port": Array [
|
||||
9200,
|
||||
110,
|
||||
],
|
||||
},
|
||||
"suricata": null,
|
||||
"timestamp": "2019-01-07T16:31:44.036Z",
|
||||
"suricata": Object {
|
||||
"eve": Object {
|
||||
"alert": Object {
|
||||
"signature": Array [
|
||||
"SURICATA SMTP no server welcome message",
|
||||
],
|
||||
"signature_id": Array [
|
||||
2220006,
|
||||
],
|
||||
},
|
||||
"flow_id": Array [
|
||||
1026250231579890,
|
||||
],
|
||||
"proto": null,
|
||||
},
|
||||
},
|
||||
"timestamp": "2019-05-09T17:15:47.987Z",
|
||||
"user": null,
|
||||
"zeek": null,
|
||||
},
|
||||
Object {
|
||||
"_id": "F48nKWgBiyhPd5Zo3cib",
|
||||
"_index": "auditbeat-7.0.0-2019.01.07",
|
||||
"_id": "aHSdmmoB9v5HJNSHzQ0Y",
|
||||
"_index": "filebeat-8.0.0-2019.04.20-000003",
|
||||
"destination": Object {
|
||||
"ip": Array [
|
||||
"24.168.54.169",
|
||||
"10.225.222.243",
|
||||
],
|
||||
"port": Array [
|
||||
62145,
|
||||
8080,
|
||||
],
|
||||
},
|
||||
"event": Object {
|
||||
"action": null,
|
||||
"category": null,
|
||||
"dataset": Array [
|
||||
"suricata.eve",
|
||||
],
|
||||
"id": null,
|
||||
"module": Array [
|
||||
"system",
|
||||
"suricata",
|
||||
],
|
||||
"severity": Array [
|
||||
1,
|
||||
],
|
||||
"severity": null,
|
||||
"type": null,
|
||||
},
|
||||
"geo": null,
|
||||
"host": Object {
|
||||
"id": Array [
|
||||
"aa7ca589f1b8220002f2fc61c64cfbf1",
|
||||
],
|
||||
"ip": null,
|
||||
"name": Array [
|
||||
"siem-general",
|
||||
"siem-kibana",
|
||||
],
|
||||
},
|
||||
"message": Array [
|
||||
"A Network Trojan was detected",
|
||||
],
|
||||
"source": Object {
|
||||
"ip": Array [
|
||||
"10.142.0.6",
|
||||
"10.47.6.59",
|
||||
],
|
||||
"port": Array [
|
||||
9200,
|
||||
41155,
|
||||
],
|
||||
},
|
||||
"suricata": null,
|
||||
"timestamp": "2019-01-07T16:31:44.036Z",
|
||||
"suricata": Object {
|
||||
"eve": Object {
|
||||
"alert": Object {
|
||||
"signature": Array [
|
||||
"ET TROJAN Generic - POST To .php w/Extended ASCII Characters",
|
||||
],
|
||||
"signature_id": Array [
|
||||
2017259,
|
||||
],
|
||||
},
|
||||
"flow_id": Array [
|
||||
2102212323729057,
|
||||
],
|
||||
"proto": null,
|
||||
},
|
||||
},
|
||||
"timestamp": "2019-05-09T17:15:47.604Z",
|
||||
"user": null,
|
||||
"zeek": null,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
|
|
@ -4,25 +4,19 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import euiDarkVars from '@elastic/eui/dist/eui_theme_dark.json';
|
||||
import { shallow } from 'enzyme';
|
||||
import toJson from 'enzyme-to-json';
|
||||
import { cloneDeep, getOr } from 'lodash/fp';
|
||||
import { getOr } from 'lodash/fp';
|
||||
import * as React from 'react';
|
||||
import { Provider as ReduxStoreProvider } from 'react-redux';
|
||||
import { ThemeProvider } from 'styled-components';
|
||||
import { mountWithIntl } from 'test_utils/enzyme_helpers';
|
||||
|
||||
import { mockGlobalState } from '../../../../mock';
|
||||
import { createStore, hostsModel, State } from '../../../../store';
|
||||
import { getEmptyValue } from '../../../empty_value';
|
||||
|
||||
import { EventsTable, formatIpSafely } from './index';
|
||||
import { EventsTable } from './index';
|
||||
import { mockData } from './mock';
|
||||
|
||||
describe('Load More Events Table Component', () => {
|
||||
const theme = () => ({ eui: euiDarkVars, darkMode: true });
|
||||
|
||||
const loadMore = jest.fn();
|
||||
const state: State = mockGlobalState;
|
||||
|
||||
|
@ -52,64 +46,4 @@ describe('Load More Events Table Component', () => {
|
|||
expect(toJson(wrapper)).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatIpSafely', () => {
|
||||
test('formatIpSafely happy path', () => {
|
||||
const wrapperSourceIp = mountWithIntl(
|
||||
<ThemeProvider theme={theme}>
|
||||
<p>{formatIpSafely('source.ip[0]', mockData.Events.edges[0].node)}</p>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
const wrapperHostName = mountWithIntl(
|
||||
<ThemeProvider theme={theme}>
|
||||
<p>{formatIpSafely('host.name[0]', mockData.Events.edges[0].node)}</p>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
expect(wrapperSourceIp.text()).toBe('10.142.0.6');
|
||||
expect(wrapperHostName.text()).toBe('siem-general');
|
||||
});
|
||||
|
||||
test('formatIpSafely unhappy path', () => {
|
||||
const wrapperSourceIp = mountWithIntl(
|
||||
<ThemeProvider theme={theme}>
|
||||
<p>{formatIpSafely('.ip', mockData.Events.edges[0].node)}</p>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
const wrapperHostName = mountWithIntl(
|
||||
<ThemeProvider theme={theme}>
|
||||
<p>{formatIpSafely('.name', mockData.Events.edges[0].node)}</p>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
expect(wrapperSourceIp.text()).toBe(getEmptyValue());
|
||||
expect(wrapperHostName.text()).toBe(getEmptyValue());
|
||||
});
|
||||
|
||||
test('formatIpSafely not happy with IP ranges that are of a particular size', () => {
|
||||
const ecs = cloneDeep(mockData.Events.edges[0].node);
|
||||
ecs.source!.ip = ['255.255.255.255'];
|
||||
const wrapperSourceIp = mountWithIntl(
|
||||
<ThemeProvider theme={theme}>
|
||||
<p>{formatIpSafely('source.ip[0]', ecs)}</p>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
expect(wrapperSourceIp.text()).toBe('255.255.255.255');
|
||||
});
|
||||
|
||||
test('formatIpSafely test of IPv6 max string length of 45', () => {
|
||||
const ecs = cloneDeep(mockData.Events.edges[0].node);
|
||||
ecs.source!.ip = ['0000:0000:0000:0000:0000:ffff:192.168.100.228'];
|
||||
const wrapperSourceIp = mountWithIntl(
|
||||
<ThemeProvider theme={theme}>
|
||||
<p>{formatIpSafely('source.ip[0]', ecs)}</p>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
expect(wrapperSourceIp.text()).toBe('0000:0000:0000:0000:0000:ffff:192.168.100.228');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { get, has } from 'lodash/fp';
|
||||
import { getOr } from 'lodash/fp';
|
||||
import React from 'react';
|
||||
import { connect } from 'react-redux';
|
||||
import { pure } from 'recompose';
|
||||
|
@ -13,14 +13,12 @@ import { ActionCreator } from 'typescript-fsa';
|
|||
import { hostsActions } from '../../../../store/actions';
|
||||
import { Ecs, EcsEdges } from '../../../../graphql/types';
|
||||
import { hostsModel, hostsSelectors, State } from '../../../../store';
|
||||
import { DragEffects, DraggableWrapper } from '../../../drag_and_drop/draggable_wrapper';
|
||||
import { escapeDataProviderId } from '../../../drag_and_drop/helpers';
|
||||
import { getEmptyStringTag, getEmptyTagValue, getOrEmptyTag } from '../../../empty_value';
|
||||
import { IPDetailsLink } from '../../../links';
|
||||
import { getOrEmptyTag } from '../../../empty_value';
|
||||
import { HostDetailsLink, IPDetailsLink } from '../../../links';
|
||||
import { Columns, ItemsPerRow, LoadMoreTable } from '../../../load_more_table';
|
||||
import { Provider } from '../../../timeline/data_providers/provider';
|
||||
|
||||
import * as i18n from './translations';
|
||||
import { getRowItemDraggable, getRowItemDraggables } from '../../../tables/helpers';
|
||||
|
||||
interface OwnProps {
|
||||
data: Ecs[];
|
||||
|
@ -76,7 +74,7 @@ const EventsTableComponent = pure<EventsTableProps>(
|
|||
type,
|
||||
}) => (
|
||||
<LoadMoreTable
|
||||
columns={getEventsColumns()}
|
||||
columns={getEventsColumns(type)}
|
||||
hasNextPage={hasNextPage}
|
||||
headerCount={totalCount}
|
||||
headerTitle={i18n.EVENTS}
|
||||
|
@ -109,7 +107,12 @@ export const EventsTable = connect(
|
|||
}
|
||||
)(EventsTableComponent);
|
||||
|
||||
const getEventsColumns = (): [
|
||||
const getEventsColumns = (
|
||||
pageType: hostsModel.HostsType
|
||||
): [
|
||||
Columns<EcsEdges>,
|
||||
Columns<EcsEdges>,
|
||||
Columns<EcsEdges>,
|
||||
Columns<EcsEdges>,
|
||||
Columns<EcsEdges>,
|
||||
Columns<EcsEdges>,
|
||||
|
@ -121,54 +124,93 @@ const getEventsColumns = (): [
|
|||
sortable: true,
|
||||
truncateText: false,
|
||||
hideForMobile: false,
|
||||
render: ({ node }) => {
|
||||
const hostName: string | null | undefined = get('host.name[0]', node);
|
||||
if (hostName != null) {
|
||||
const id = escapeDataProviderId(`events-table-${node._id}-hostName-${hostName}`);
|
||||
return (
|
||||
<DraggableWrapper
|
||||
key={id}
|
||||
dataProvider={{
|
||||
and: [],
|
||||
enabled: true,
|
||||
id,
|
||||
name: hostName,
|
||||
excluded: false,
|
||||
kqlQuery: '',
|
||||
queryMatch: {
|
||||
field: 'host.name',
|
||||
value: hostName,
|
||||
},
|
||||
}}
|
||||
render={(dataProvider, _, snapshot) =>
|
||||
snapshot.isDragging ? (
|
||||
<DragEffects>
|
||||
<Provider dataProvider={dataProvider} />
|
||||
</DragEffects>
|
||||
) : (
|
||||
hostName
|
||||
)
|
||||
}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
return getEmptyTagValue();
|
||||
}
|
||||
},
|
||||
render: ({ node }) =>
|
||||
getRowItemDraggables({
|
||||
rowItems: getOr(null, 'host.name', node),
|
||||
attrName: 'host.name',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
render: item => <HostDetailsLink hostName={item} />,
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: i18n.EVENT_MODULE_DATASET,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
hideForMobile: true,
|
||||
render: ({ node }) => (
|
||||
<>
|
||||
{getRowItemDraggables({
|
||||
rowItems: getOr(null, 'event.module', node),
|
||||
attrName: 'event.module',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
})}
|
||||
/
|
||||
{getRowItemDraggables({
|
||||
rowItems: getOr(null, 'event.dataset', node),
|
||||
attrName: 'event.dataset',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
})}
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: i18n.EVENT_CATEGORY,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
hideForMobile: true,
|
||||
render: ({ node }) =>
|
||||
getRowItemDraggables({
|
||||
rowItems: getOr(null, 'event.category', node),
|
||||
attrName: 'event.category',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: i18n.EVENT_ACTION,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
hideForMobile: true,
|
||||
render: ({ node }) => getOrEmptyTag('event.action', node),
|
||||
render: ({ node }) =>
|
||||
getRowItemDraggables({
|
||||
rowItems: getOr(null, 'event.action', node),
|
||||
attrName: 'event.action',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: i18n.USER,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
render: ({ node }) =>
|
||||
getRowItemDraggables({
|
||||
rowItems: getOr(null, 'user.name', node),
|
||||
attrName: 'user.name',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: i18n.MESSAGE,
|
||||
sortable: false,
|
||||
truncateText: true,
|
||||
render: ({ node }) =>
|
||||
getRowItemDraggables({
|
||||
rowItems: getOr(null, 'message', node),
|
||||
attrName: 'message',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: i18n.SOURCE,
|
||||
truncateText: true,
|
||||
render: ({ node }) => (
|
||||
<>
|
||||
{formatIpSafely('source.ip[0]', node)}:{getOrEmptyTag('source.port', node)}
|
||||
{getRowItemDraggable({
|
||||
rowItem: getOr(null, 'source.ip[0]', node),
|
||||
attrName: 'source.ip',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
render: item => <IPDetailsLink ip={item} />,
|
||||
})}
|
||||
:{getOrEmptyTag('source.port', node)}
|
||||
</>
|
||||
),
|
||||
},
|
||||
|
@ -178,31 +220,14 @@ const getEventsColumns = (): [
|
|||
truncateText: true,
|
||||
render: ({ node }) => (
|
||||
<>
|
||||
{formatIpSafely('destination.ip[0]', node)}:{getOrEmptyTag('destination.port', node)}
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: i18n.LOCATION,
|
||||
sortable: true,
|
||||
truncateText: true,
|
||||
render: ({ node }) => (
|
||||
<>
|
||||
{getOrEmptyTag('geo.region_name', node)} : {getOrEmptyTag('geo.country_iso_code', node)}
|
||||
{getRowItemDraggable({
|
||||
rowItem: getOr(null, 'destination.ip[0]', node),
|
||||
attrName: 'destination.ip',
|
||||
idPrefix: `host-${pageType}-events-table-${node._id}`,
|
||||
render: item => <IPDetailsLink ip={item} />,
|
||||
})}
|
||||
:{getOrEmptyTag('destination.port', node)}
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
export const formatIpSafely = (path: string, data: Ecs): JSX.Element => {
|
||||
if (has(path, data)) {
|
||||
const txt = get(path, data);
|
||||
if (txt === '') {
|
||||
return getEmptyStringTag();
|
||||
} else {
|
||||
const ip = txt && txt.slice ? txt.slice(0, 45) : txt;
|
||||
return <IPDetailsLink ip={ip} />;
|
||||
}
|
||||
}
|
||||
return getEmptyTagValue();
|
||||
};
|
||||
|
|
|
@ -22,31 +22,45 @@ export const mockData: { Events: EventsData } = {
|
|||
value: '1546878704036',
|
||||
tiebreaker: '10656',
|
||||
},
|
||||
|
||||
node: {
|
||||
_id: 'Fo8nKWgBiyhPd5Zo3cib',
|
||||
timestamp: '2019-01-07T16:31:44.036Z',
|
||||
_index: 'auditbeat-7.0.0-2019.01.07',
|
||||
destination: {
|
||||
ip: ['24.168.54.169'],
|
||||
port: [62123],
|
||||
},
|
||||
_id: 'aXSdmmoB9v5HJNSHzQ0Y',
|
||||
_index: 'filebeat-8.0.0-2019.04.20-000003',
|
||||
timestamp: '2019-05-09T17:15:47.987Z',
|
||||
event: {
|
||||
action: null,
|
||||
category: null,
|
||||
dataset: ['suricata.eve'],
|
||||
id: null,
|
||||
module: ['system'],
|
||||
severity: null,
|
||||
type: null,
|
||||
module: ['suricata'],
|
||||
severity: [3],
|
||||
},
|
||||
geo: null,
|
||||
host: {
|
||||
name: ['siem-general'],
|
||||
name: ['siem-kibana'],
|
||||
ip: null,
|
||||
id: ['aa7ca589f1b8220002f2fc61c64cfbf1'],
|
||||
},
|
||||
message: ['Generic Protocol Command Decode'],
|
||||
source: {
|
||||
ip: ['10.142.0.6'],
|
||||
port: [9200],
|
||||
ip: ['10.47.2.208'],
|
||||
port: [110],
|
||||
},
|
||||
suricata: null,
|
||||
destination: {
|
||||
ip: ['10.0.0.111'],
|
||||
port: [37140],
|
||||
},
|
||||
suricata: {
|
||||
eve: {
|
||||
proto: null,
|
||||
flow_id: [1026250231579890],
|
||||
alert: {
|
||||
signature: ['SURICATA SMTP no server welcome message'],
|
||||
signature_id: [2220006],
|
||||
},
|
||||
},
|
||||
},
|
||||
user: null,
|
||||
zeek: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -54,31 +68,45 @@ export const mockData: { Events: EventsData } = {
|
|||
value: '1546878704036',
|
||||
tiebreaker: '10624',
|
||||
},
|
||||
|
||||
node: {
|
||||
_id: 'F48nKWgBiyhPd5Zo3cib',
|
||||
timestamp: '2019-01-07T16:31:44.036Z',
|
||||
_index: 'auditbeat-7.0.0-2019.01.07',
|
||||
destination: {
|
||||
ip: ['24.168.54.169'],
|
||||
port: [62145],
|
||||
},
|
||||
_id: 'aHSdmmoB9v5HJNSHzQ0Y',
|
||||
_index: 'filebeat-8.0.0-2019.04.20-000003',
|
||||
timestamp: '2019-05-09T17:15:47.604Z',
|
||||
event: {
|
||||
action: null,
|
||||
category: null,
|
||||
dataset: ['suricata.eve'],
|
||||
id: null,
|
||||
module: ['system'],
|
||||
severity: null,
|
||||
type: null,
|
||||
module: ['suricata'],
|
||||
severity: [1],
|
||||
},
|
||||
geo: null,
|
||||
host: {
|
||||
name: ['siem-general'],
|
||||
name: ['siem-kibana'],
|
||||
ip: null,
|
||||
id: ['aa7ca589f1b8220002f2fc61c64cfbf1'],
|
||||
},
|
||||
message: ['A Network Trojan was detected'],
|
||||
source: {
|
||||
ip: ['10.142.0.6'],
|
||||
port: [9200],
|
||||
ip: ['10.47.6.59'],
|
||||
port: [41155],
|
||||
},
|
||||
suricata: null,
|
||||
destination: {
|
||||
ip: ['10.225.222.243'],
|
||||
port: [8080],
|
||||
},
|
||||
suricata: {
|
||||
eve: {
|
||||
proto: null,
|
||||
flow_id: [2102212323729057],
|
||||
alert: {
|
||||
signature: ['ET TROJAN Generic - POST To .php w/Extended ASCII Characters'],
|
||||
signature_id: [2017259],
|
||||
},
|
||||
},
|
||||
},
|
||||
user: null,
|
||||
zeek: null,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -24,8 +24,8 @@ export const EVENT_ACTION = i18n.translate('xpack.siem.eventsTable.eventTypeActi
|
|||
defaultMessage: 'Event Action',
|
||||
});
|
||||
|
||||
export const EVENT_TYPE = i18n.translate('xpack.siem.eventsTable.eventTypeTitle', {
|
||||
defaultMessage: 'Event type',
|
||||
export const EVENT_CATEGORY = i18n.translate('xpack.siem.eventsTable.eventCategoryTitle', {
|
||||
defaultMessage: 'Event Category',
|
||||
});
|
||||
|
||||
export const SOURCE = i18n.translate('xpack.siem.eventsTable.sourceTitle', {
|
||||
|
@ -36,8 +36,16 @@ export const DESTINATION = i18n.translate('xpack.siem.eventsTable.destinationTit
|
|||
defaultMessage: 'Destination',
|
||||
});
|
||||
|
||||
export const LOCATION = i18n.translate('xpack.siem.eventsTable.locationTitle', {
|
||||
defaultMessage: 'Location',
|
||||
export const MESSAGE = i18n.translate('xpack.siem.eventsTable.messageTitle', {
|
||||
defaultMessage: 'Message',
|
||||
});
|
||||
|
||||
export const EVENT_MODULE_DATASET = i18n.translate('xpack.siem.eventsTable.moduleDatasetTitle', {
|
||||
defaultMessage: 'Module/Dataset',
|
||||
});
|
||||
|
||||
export const USER = i18n.translate('xpack.siem.eventsTable.userTitle', {
|
||||
defaultMessage: 'User',
|
||||
});
|
||||
|
||||
export const ROWS_5 = i18n.translate('xpack.siem.eventsTable.rows', {
|
||||
|
|
|
@ -37,16 +37,18 @@ export const eventsQuery = gql`
|
|||
timestamp
|
||||
event {
|
||||
action
|
||||
severity
|
||||
module
|
||||
category
|
||||
dataset
|
||||
id
|
||||
module
|
||||
severity
|
||||
}
|
||||
host {
|
||||
name
|
||||
ip
|
||||
id
|
||||
}
|
||||
message
|
||||
source {
|
||||
ip
|
||||
port
|
||||
|
@ -55,10 +57,6 @@ export const eventsQuery = gql`
|
|||
ip
|
||||
port
|
||||
}
|
||||
geo {
|
||||
region_name
|
||||
country_iso_code
|
||||
}
|
||||
suricata {
|
||||
eve {
|
||||
proto
|
||||
|
@ -69,6 +67,9 @@ export const eventsQuery = gql`
|
|||
}
|
||||
}
|
||||
}
|
||||
user {
|
||||
name
|
||||
}
|
||||
zeek {
|
||||
session_id
|
||||
}
|
||||
|
|
|
@ -1935,14 +1935,16 @@ export namespace GetEventsQuery {
|
|||
|
||||
host?: Host | null;
|
||||
|
||||
message?: ToStringArray | null;
|
||||
|
||||
source?: _Source | null;
|
||||
|
||||
destination?: Destination | null;
|
||||
|
||||
geo?: Geo | null;
|
||||
|
||||
suricata?: Suricata | null;
|
||||
|
||||
user?: User | null;
|
||||
|
||||
zeek?: Zeek | null;
|
||||
};
|
||||
|
||||
|
@ -1951,13 +1953,15 @@ export namespace GetEventsQuery {
|
|||
|
||||
action?: ToStringArray | null;
|
||||
|
||||
severity?: ToNumberArray | null;
|
||||
category?: ToStringArray | null;
|
||||
|
||||
dataset?: ToStringArray | null;
|
||||
|
||||
id?: ToStringArray | null;
|
||||
|
||||
module?: ToStringArray | null;
|
||||
|
||||
category?: ToStringArray | null;
|
||||
|
||||
id?: ToStringArray | null;
|
||||
severity?: ToNumberArray | null;
|
||||
};
|
||||
|
||||
export type Host = {
|
||||
|
@ -1986,14 +1990,6 @@ export namespace GetEventsQuery {
|
|||
port?: ToNumberArray | null;
|
||||
};
|
||||
|
||||
export type Geo = {
|
||||
__typename?: 'GeoEcsFields';
|
||||
|
||||
region_name?: ToStringArray | null;
|
||||
|
||||
country_iso_code?: ToStringArray | null;
|
||||
};
|
||||
|
||||
export type Suricata = {
|
||||
__typename?: 'SuricataEcsFields';
|
||||
|
||||
|
@ -2018,6 +2014,12 @@ export namespace GetEventsQuery {
|
|||
signature_id?: ToNumberArray | null;
|
||||
};
|
||||
|
||||
export type User = {
|
||||
__typename?: 'UserEcsFields';
|
||||
|
||||
name?: ToStringArray | null;
|
||||
};
|
||||
|
||||
export type Zeek = {
|
||||
__typename?: 'ZeekEcsFields';
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,180 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { GraphQLResolveInfo } from 'graphql';
|
||||
|
||||
import { Direction, Source } from '../../graphql/types';
|
||||
import { Events } from '../../lib/events';
|
||||
import { EventsAdapter } from '../../lib/events/types';
|
||||
import { FrameworkRequest, internalFrameworkRequest } from '../../lib/framework';
|
||||
import { SourceStatus } from '../../lib/source_status';
|
||||
import { Sources } from '../../lib/sources';
|
||||
import { createSourcesResolvers } from '../sources';
|
||||
import { SourcesResolversDeps } from '../sources/resolvers';
|
||||
import { mockSourcesAdapter, mockSourceStatusAdapter } from '../sources/resolvers.test';
|
||||
|
||||
import {
|
||||
mockEventsData,
|
||||
mockEventsFields,
|
||||
mockLastEventTimeData,
|
||||
mockTimelineData,
|
||||
mockTimelineDetailsData,
|
||||
} from './events.mock';
|
||||
import { createEventsResolvers, EventsResolversDeps } from './resolvers';
|
||||
|
||||
const mockGetFields = jest.fn();
|
||||
mockGetFields.mockResolvedValue({ fieldNodes: [mockEventsFields] });
|
||||
jest.doMock('../../utils/build_query/fields', () => ({
|
||||
getFields: mockGetFields,
|
||||
}));
|
||||
|
||||
const mockGetEvents = jest.fn();
|
||||
mockGetEvents.mockResolvedValue({
|
||||
Events: {
|
||||
...mockEventsData.Events,
|
||||
},
|
||||
});
|
||||
const mockGetTimeline = jest.fn();
|
||||
mockGetTimeline.mockResolvedValue({
|
||||
Timeline: {
|
||||
...mockTimelineData.Timeline,
|
||||
},
|
||||
});
|
||||
const mockGetLastEventTime = jest.fn();
|
||||
mockGetLastEventTime.mockResolvedValue({
|
||||
LastEventTime: {
|
||||
...mockLastEventTimeData.LastEventTime,
|
||||
},
|
||||
});
|
||||
const mockGetTimelineDetails = jest.fn();
|
||||
mockGetTimelineDetails.mockResolvedValue({
|
||||
TimelineDetails: {
|
||||
...mockTimelineDetailsData.TimelineDetails,
|
||||
},
|
||||
});
|
||||
const mockEventsAdapter: EventsAdapter = {
|
||||
getEvents: mockGetEvents,
|
||||
getTimelineDetails: mockGetTimelineDetails,
|
||||
getTimelineData: mockGetTimeline,
|
||||
getLastEventTimeData: mockGetLastEventTime,
|
||||
};
|
||||
|
||||
const mockEventsLibs: EventsResolversDeps = {
|
||||
events: new Events(mockEventsAdapter),
|
||||
};
|
||||
|
||||
const mockSrcLibs: SourcesResolversDeps = {
|
||||
sources: new Sources(mockSourcesAdapter),
|
||||
sourceStatus: new SourceStatus(mockSourceStatusAdapter, new Sources(mockSourcesAdapter)),
|
||||
};
|
||||
|
||||
const req: FrameworkRequest = {
|
||||
[internalFrameworkRequest]: {
|
||||
params: {},
|
||||
query: {},
|
||||
payload: {
|
||||
operationName: 'test',
|
||||
},
|
||||
},
|
||||
params: {},
|
||||
query: {},
|
||||
payload: {
|
||||
operationName: 'test',
|
||||
},
|
||||
};
|
||||
|
||||
const context = { req };
|
||||
|
||||
describe('Test Source Resolvers', () => {
|
||||
test('Make sure that getEvents have been called', async () => {
|
||||
context.req.payload.operationName = 'events';
|
||||
const source = await createSourcesResolvers(mockSrcLibs).Query.source(
|
||||
{},
|
||||
{ id: 'default' },
|
||||
context,
|
||||
{} as GraphQLResolveInfo
|
||||
);
|
||||
const data = await createEventsResolvers(mockEventsLibs).Source.Events(
|
||||
source as Source,
|
||||
{
|
||||
timerange: {
|
||||
interval: '12h',
|
||||
to: 1514782800000,
|
||||
from: 1546318799999,
|
||||
},
|
||||
pagination: {
|
||||
limit: 2,
|
||||
cursor: null,
|
||||
},
|
||||
sortField: {
|
||||
sortFieldId: 'timestamp',
|
||||
direction: Direction.desc,
|
||||
},
|
||||
},
|
||||
context,
|
||||
{} as GraphQLResolveInfo
|
||||
);
|
||||
expect(mockEventsAdapter.getEvents).toHaveBeenCalled();
|
||||
expect(data).toEqual({
|
||||
Events: {
|
||||
...mockEventsData.Events,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('Make sure that getTimelineData have been called', async () => {
|
||||
context.req.payload.operationName = 'timeline';
|
||||
const source = await createSourcesResolvers(mockSrcLibs).Query.source(
|
||||
{},
|
||||
{ id: 'default' },
|
||||
context,
|
||||
{} as GraphQLResolveInfo
|
||||
);
|
||||
const data = await createEventsResolvers(mockEventsLibs).Source.Timeline(
|
||||
source as Source,
|
||||
{
|
||||
timerange: {
|
||||
interval: '12h',
|
||||
to: 1514782800000,
|
||||
from: 1546318799999,
|
||||
},
|
||||
pagination: {
|
||||
limit: 2,
|
||||
cursor: null,
|
||||
},
|
||||
sortField: {
|
||||
sortFieldId: 'timestamp',
|
||||
direction: Direction.desc,
|
||||
},
|
||||
fieldRequested: ['@timestamp', 'host.name'],
|
||||
},
|
||||
context,
|
||||
{} as GraphQLResolveInfo
|
||||
);
|
||||
expect(mockEventsAdapter.getTimelineData).toHaveBeenCalled();
|
||||
expect(data).toEqual({ Timeline: { ...mockTimelineData.Timeline } });
|
||||
});
|
||||
|
||||
test('Make sure that getTimelineDetails have been called', async () => {
|
||||
context.req.payload.operationName = 'details';
|
||||
const source = await createSourcesResolvers(mockSrcLibs).Query.source(
|
||||
{},
|
||||
{ id: 'default' },
|
||||
context,
|
||||
{} as GraphQLResolveInfo
|
||||
);
|
||||
const data = await createEventsResolvers(mockEventsLibs).Source.TimelineDetails(
|
||||
source as Source,
|
||||
{
|
||||
indexName: 'filebeat-7.0.0-iot-2019.06',
|
||||
eventId: 'QRhG1WgBqd-n62SwZYDT',
|
||||
},
|
||||
context,
|
||||
{} as GraphQLResolveInfo
|
||||
);
|
||||
expect(mockEventsAdapter.getTimelineDetails).toHaveBeenCalled();
|
||||
expect(data).toEqual({ TimelineDetails: { ...mockTimelineDetailsData.TimelineDetails } });
|
||||
});
|
||||
});
|
|
@ -1,330 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { graphql } from 'graphql';
|
||||
import { addMockFunctionsToSchema, makeExecutableSchema } from 'graphql-tools';
|
||||
|
||||
import { rootSchema } from '../../../common/graphql/root/schema.gql';
|
||||
import { sharedSchema } from '../../../common/graphql/shared';
|
||||
import { Logger } from '../../utils/logger';
|
||||
import { ecsSchema } from '../ecs';
|
||||
import { dateSchema } from '../scalar_date';
|
||||
import { toBooleanSchema } from '../scalar_to_boolean_array';
|
||||
import { toDateSchema } from '../scalar_to_date_array';
|
||||
import { toNumberSchema } from '../scalar_to_number_array';
|
||||
import { sourceStatusSchema } from '../source_status/schema.gql';
|
||||
import { sourcesSchema } from '../sources/schema.gql';
|
||||
|
||||
import {
|
||||
getEventsQueryMock,
|
||||
mockEventsData,
|
||||
mockTimelineData,
|
||||
mockTimelineDetailsData,
|
||||
} from './events.mock';
|
||||
import { eventsSchema } from './schema.gql';
|
||||
|
||||
const cases = [
|
||||
{
|
||||
id: 'Test case to query Events',
|
||||
query: `
|
||||
query GetEventsQuery(
|
||||
$timerange: TimerangeInput!
|
||||
$pagination: PaginationInput!
|
||||
$sortField: SortField!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
Events(
|
||||
timerange: $timerange
|
||||
pagination: $pagination
|
||||
sortField: $sortField
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
totalCount
|
||||
pageInfo {
|
||||
endCursor {
|
||||
value
|
||||
tiebreaker
|
||||
}
|
||||
hasNextPage
|
||||
}
|
||||
edges {
|
||||
cursor{
|
||||
value
|
||||
tiebreaker
|
||||
}
|
||||
node {
|
||||
_id
|
||||
_index
|
||||
timestamp
|
||||
event {
|
||||
type
|
||||
severity
|
||||
module
|
||||
category
|
||||
id
|
||||
}
|
||||
host {
|
||||
name
|
||||
ip
|
||||
}
|
||||
source {
|
||||
ip
|
||||
port
|
||||
}
|
||||
destination {
|
||||
ip
|
||||
port
|
||||
}
|
||||
geo {
|
||||
region_name
|
||||
country_iso_code
|
||||
}
|
||||
suricata {
|
||||
eve {
|
||||
proto
|
||||
flow_id
|
||||
alert {
|
||||
signature
|
||||
signature_id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
kpiEventType {
|
||||
value
|
||||
count
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
timerange: {
|
||||
interval: '12h',
|
||||
to: 1514782800000,
|
||||
from: 1546318799999,
|
||||
},
|
||||
pagination: {
|
||||
limit: 2,
|
||||
cursor: null,
|
||||
},
|
||||
sortField: {
|
||||
sortFieldId: '@timestamp',
|
||||
direction: 'desc',
|
||||
},
|
||||
},
|
||||
context: {
|
||||
req: {
|
||||
payload: {
|
||||
operationName: 'events',
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
data: {
|
||||
source: {
|
||||
...mockEventsData,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'Test case to query Timeline',
|
||||
query: `
|
||||
query GetTimelineQuery(
|
||||
$timerange: TimerangeInput!
|
||||
$pagination: PaginationInput!
|
||||
$sortField: SortField!
|
||||
$filterQuery: String
|
||||
$fieldRequested: [String!]!
|
||||
) {
|
||||
source(id: "default") {
|
||||
Timeline(
|
||||
timerange: $timerange
|
||||
pagination: $pagination
|
||||
sortField: $sortField
|
||||
filterQuery: $filterQuery
|
||||
fieldRequested: $fieldRequested
|
||||
) {
|
||||
totalCount
|
||||
pageInfo {
|
||||
endCursor {
|
||||
value
|
||||
tiebreaker
|
||||
}
|
||||
hasNextPage
|
||||
}
|
||||
edges {
|
||||
cursor{
|
||||
value
|
||||
tiebreaker
|
||||
}
|
||||
node {
|
||||
_id
|
||||
_index
|
||||
data {
|
||||
field
|
||||
value
|
||||
}
|
||||
ecs {
|
||||
timestamp
|
||||
_id
|
||||
_index
|
||||
event {
|
||||
type
|
||||
severity
|
||||
module
|
||||
category
|
||||
id
|
||||
}
|
||||
host {
|
||||
name
|
||||
ip
|
||||
}
|
||||
source {
|
||||
ip
|
||||
port
|
||||
}
|
||||
destination {
|
||||
ip
|
||||
port
|
||||
}
|
||||
geo {
|
||||
region_name
|
||||
country_iso_code
|
||||
}
|
||||
suricata {
|
||||
eve {
|
||||
proto
|
||||
flow_id
|
||||
alert {
|
||||
signature
|
||||
signature_id
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
timerange: {
|
||||
interval: '12h',
|
||||
to: 1514782800000,
|
||||
from: 1546318799999,
|
||||
},
|
||||
pagination: {
|
||||
limit: 2,
|
||||
cursor: null,
|
||||
},
|
||||
sortField: {
|
||||
sortFieldId: '@timestamp',
|
||||
direction: 'desc',
|
||||
},
|
||||
fieldRequested: ['@timestamp', 'host.name'],
|
||||
},
|
||||
context: {
|
||||
req: {
|
||||
payload: {
|
||||
operationName: 'timeline',
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
data: {
|
||||
source: {
|
||||
...mockTimelineData,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'Test case to query Timeline Details',
|
||||
query: `
|
||||
query GetTimelineDetailsQuery($eventId: String!, $indexName: String!) {
|
||||
source(id: "default") {
|
||||
TimelineDetails(eventId: $eventId, indexName: $indexName) {
|
||||
data {
|
||||
category
|
||||
description
|
||||
example
|
||||
field
|
||||
type
|
||||
values
|
||||
originalValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
eventId: 'QRhG1WgBqd-n62SwZYDT',
|
||||
indexName: 'filebeat-7.0.0-iot-2019.06',
|
||||
},
|
||||
context: {
|
||||
req: {
|
||||
payload: {
|
||||
operationName: 'details',
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: {
|
||||
data: {
|
||||
source: {
|
||||
...mockTimelineDetailsData,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
describe('Test Source Schema', () => {
|
||||
const typeDefs = [
|
||||
rootSchema,
|
||||
sharedSchema,
|
||||
sourcesSchema,
|
||||
sourceStatusSchema,
|
||||
ecsSchema,
|
||||
eventsSchema,
|
||||
dateSchema,
|
||||
toNumberSchema,
|
||||
toDateSchema,
|
||||
toBooleanSchema,
|
||||
];
|
||||
const mockSchema = makeExecutableSchema({ typeDefs });
|
||||
|
||||
// Here we specify the return payloads of mocked types
|
||||
const logger: Logger = {
|
||||
debug: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
};
|
||||
const mocks = {
|
||||
Query: () => ({
|
||||
...getEventsQueryMock(logger),
|
||||
}),
|
||||
};
|
||||
|
||||
addMockFunctionsToSchema({
|
||||
schema: mockSchema,
|
||||
mocks,
|
||||
});
|
||||
|
||||
cases.forEach(obj => {
|
||||
const { id, query, variables, context, expected } = obj;
|
||||
|
||||
test(`${id}`, async () => {
|
||||
const result = await graphql(mockSchema, query, null, context, variables);
|
||||
return await expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue