mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Security Solution] Resolver in Timeline (#69728)
Display Resolver in Security Solution's Timeline.
This commit is contained in:
parent
d3a0ab3db0
commit
8ffdd4568b
40 changed files with 2030 additions and 1356 deletions
|
@ -71,3 +71,19 @@ export const validateChildren = {
|
|||
legacyEndpointID: schema.maybe(schema.string()),
|
||||
}),
|
||||
};
|
||||
|
||||
/**
|
||||
* Used to validate GET requests for 'entities'
|
||||
*/
|
||||
export const validateEntities = {
|
||||
query: schema.object({
|
||||
/**
|
||||
* Return the process entities related to the document w/ the matching `_id`.
|
||||
*/
|
||||
_id: schema.string(),
|
||||
/**
|
||||
* Indices to search in.
|
||||
*/
|
||||
indices: schema.arrayOf(schema.string()),
|
||||
}),
|
||||
};
|
||||
|
|
|
@ -511,6 +511,11 @@ export interface EndpointEvent {
|
|||
|
||||
export type ResolverEvent = EndpointEvent | LegacyEndpointEvent;
|
||||
|
||||
/**
|
||||
* The response body for the resolver '/entity' index API
|
||||
*/
|
||||
export type ResolverEntityIndex = Array<{ entity_id: string }>;
|
||||
|
||||
/**
|
||||
* Takes a @kbn/config-schema 'schema' type and returns a type that represents valid inputs.
|
||||
* Similar to `TypeOf`, but allows strings as input for `schema.number()` (which is inline
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as reactTestingLibrary from '@testing-library/react';
|
||||
import { MemoryHistory } from 'history';
|
||||
import { Store } from 'redux';
|
||||
|
||||
import { mockAlertDetailsResult } from '../store/mock_alert_result_list';
|
||||
import { alertPageTestRender } from './test_helpers/render_alert_page';
|
||||
import { AppAction } from '../../common/store/actions';
|
||||
import { State } from '../../common/store/types';
|
||||
|
||||
describe('when the alert details flyout is open', () => {
|
||||
let render: () => reactTestingLibrary.RenderResult;
|
||||
let history: MemoryHistory<never>;
|
||||
let store: Store<State>;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Creates the render elements for the tests to use
|
||||
({ render, history, store } = alertPageTestRender());
|
||||
});
|
||||
describe('when the alerts details flyout is open', () => {
|
||||
beforeEach(() => {
|
||||
reactTestingLibrary.act(() => {
|
||||
history.push({
|
||||
search: '?selected_alert=1',
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('when the data loads', () => {
|
||||
beforeEach(() => {
|
||||
reactTestingLibrary.act(() => {
|
||||
const action: AppAction = {
|
||||
type: 'serverReturnedAlertDetailsData',
|
||||
payload: mockAlertDetailsResult(),
|
||||
};
|
||||
store.dispatch(action);
|
||||
});
|
||||
});
|
||||
it('should display take action button', async () => {
|
||||
await render().findByTestId('alertDetailTakeActionDropdownButton');
|
||||
});
|
||||
describe('when the user clicks the take action button on the flyout', () => {
|
||||
let renderResult: reactTestingLibrary.RenderResult;
|
||||
beforeEach(async () => {
|
||||
renderResult = render();
|
||||
const takeActionButton = await renderResult.findByTestId(
|
||||
'alertDetailTakeActionDropdownButton'
|
||||
);
|
||||
if (takeActionButton) {
|
||||
reactTestingLibrary.fireEvent.click(takeActionButton);
|
||||
}
|
||||
});
|
||||
it('should display the correct fields in the dropdown', async () => {
|
||||
await renderResult.findByTestId('alertDetailTakeActionCloseAlertButton');
|
||||
await renderResult.findByTestId('alertDetailTakeActionWhitelistButton');
|
||||
});
|
||||
});
|
||||
describe('when the user navigates to the resolver tab', () => {
|
||||
beforeEach(() => {
|
||||
reactTestingLibrary.act(() => {
|
||||
history.push({
|
||||
...history.location,
|
||||
search: '?selected_alert=1&active_details_tab=overviewResolver',
|
||||
});
|
||||
});
|
||||
});
|
||||
it('should show the resolver view', async () => {
|
||||
const resolver = await render().findByTestId('alertResolver');
|
||||
expect(resolver).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
describe('when the user navigates to the overview tab', () => {
|
||||
let renderResult: reactTestingLibrary.RenderResult;
|
||||
beforeEach(async () => {
|
||||
renderResult = render();
|
||||
const overviewTab = await renderResult.findByTestId('overviewMetadata');
|
||||
if (overviewTab) {
|
||||
reactTestingLibrary.fireEvent.click(overviewTab);
|
||||
}
|
||||
});
|
||||
it('should render all accordion panels', async () => {
|
||||
await renderResult.findAllByTestId('alertDetailsAlertAccordion');
|
||||
await renderResult.findAllByTestId('alertDetailsHostAccordion');
|
||||
await renderResult.findAllByTestId('alertDetailsFileAccordion');
|
||||
await renderResult.findAllByTestId('alertDetailsHashAccordion');
|
||||
await renderResult.findAllByTestId('alertDetailsSourceProcessAccordion');
|
||||
await renderResult.findAllByTestId('alertDetailsSourceProcessTokenAccordion');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -20,8 +20,6 @@ import { useAlertListSelector } from '../../hooks/use_alerts_selector';
|
|||
import * as selectors from '../../../store/selectors';
|
||||
import { MetadataPanel } from './metadata_panel';
|
||||
import { FormattedDate } from '../../formatted_date';
|
||||
import { AlertDetailResolver } from '../../resolver';
|
||||
import { ResolverEvent } from '../../../../../common/endpoint/types';
|
||||
import { TakeActionDropdown } from './take_action_dropdown';
|
||||
import { urlFromQueryParams } from '../../url_from_query_params';
|
||||
|
||||
|
@ -65,12 +63,11 @@ const AlertDetailsOverviewComponent = memo(() => {
|
|||
content: (
|
||||
<>
|
||||
<EuiSpacer />
|
||||
<AlertDetailResolver selectedEvent={(alertDetailsData as unknown) as ResolverEvent} />
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
}, [alertDetailsData]);
|
||||
}, []);
|
||||
|
||||
/* eslint-disable-next-line react-hooks/rules-of-hooks */
|
||||
const activeTab = useMemo(
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { Provider } from 'react-redux';
|
||||
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
|
||||
import { ResolverEvent } from '../../../common/endpoint/types';
|
||||
import { StartServices } from '../../types';
|
||||
import { storeFactory } from '../../resolver/store';
|
||||
import { Resolver } from '../../resolver/view';
|
||||
|
||||
const AlertDetailResolverComponents = React.memo(
|
||||
({ className, selectedEvent }: { className?: string; selectedEvent?: ResolverEvent }) => {
|
||||
const context = useKibana<StartServices>();
|
||||
const { store } = storeFactory(context);
|
||||
|
||||
return (
|
||||
<div className={className} data-test-subj="alertResolver">
|
||||
<Provider store={store}>
|
||||
<Resolver selectedEvent={selectedEvent} />
|
||||
</Provider>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
AlertDetailResolverComponents.displayName = 'AlertDetailResolver';
|
||||
|
||||
export const AlertDetailResolver = styled(AlertDetailResolverComponents)`
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
min-height: calc(100vh - 505px);
|
||||
`;
|
|
@ -56,42 +56,57 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
0,
|
||||
-229.43553924069099,
|
||||
],
|
||||
Array [
|
||||
395.9797974644666,
|
||||
-0.8164965809277259,
|
||||
],
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"metadata": Object {
|
||||
"uniqueId": "",
|
||||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
0,
|
||||
-229.43553924069099,
|
||||
],
|
||||
Array [
|
||||
197.9898987322333,
|
||||
-343.7450605705726,
|
||||
],
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"metadata": Object {
|
||||
"uniqueId": "",
|
||||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
395.9797974644666,
|
||||
-0.8164965809277259,
|
||||
-98.99494936611666,
|
||||
-286.5902999056318,
|
||||
],
|
||||
Array [
|
||||
593.9696961966999,
|
||||
-115.12601791080935,
|
||||
113.49302474895391,
|
||||
],
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"metadata": Object {
|
||||
"uniqueId": "",
|
||||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
-98.99494936611666,
|
||||
-286.5902999056318,
|
||||
],
|
||||
Array [
|
||||
98.99494936611666,
|
||||
-400.8998212355134,
|
||||
],
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"metadata": Object {
|
||||
"uniqueId": "",
|
||||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
296.98484809834997,
|
||||
-57.97125724586854,
|
||||
],
|
||||
Array [
|
||||
494.9747468305833,
|
||||
-172.28077857575016,
|
||||
],
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"metadata": Object {
|
||||
"uniqueId": "",
|
||||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
593.9696961966999,
|
||||
113.49302474895391,
|
||||
],
|
||||
Array [
|
||||
791.9595949289333,
|
||||
-0.8164965809277259,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -101,12 +116,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
197.9898987322333,
|
||||
-343.7450605705726,
|
||||
98.99494936611666,
|
||||
-400.8998212355134,
|
||||
],
|
||||
Array [
|
||||
395.9797974644666,
|
||||
-458.05458190045425,
|
||||
296.98484809834997,
|
||||
-515.2093425653951,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -116,12 +131,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
296.98484809834997,
|
||||
-515.2093425653951,
|
||||
197.9898987322333,
|
||||
-572.3641032303359,
|
||||
],
|
||||
Array [
|
||||
494.9747468305833,
|
||||
-400.8998212355134,
|
||||
395.9797974644666,
|
||||
-458.05458190045425,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -131,12 +146,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
296.98484809834997,
|
||||
-515.2093425653951,
|
||||
197.9898987322333,
|
||||
-572.3641032303359,
|
||||
],
|
||||
Array [
|
||||
494.9747468305833,
|
||||
-629.5188638952767,
|
||||
395.9797974644666,
|
||||
-686.6736245602175,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -146,12 +161,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
494.9747468305833,
|
||||
-400.8998212355134,
|
||||
395.9797974644666,
|
||||
-458.05458190045425,
|
||||
],
|
||||
Array [
|
||||
692.9646455628166,
|
||||
-515.2093425653951,
|
||||
593.9696961966999,
|
||||
-572.3641032303359,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -161,12 +176,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
593.9696961966999,
|
||||
-115.12601791080935,
|
||||
494.9747468305833,
|
||||
-172.28077857575016,
|
||||
],
|
||||
Array [
|
||||
791.9595949289333,
|
||||
-229.43553924069096,
|
||||
692.9646455628166,
|
||||
-286.5902999056318,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -176,12 +191,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
692.9646455628166,
|
||||
-286.5902999056318,
|
||||
593.9696961966999,
|
||||
-343.7450605705726,
|
||||
],
|
||||
Array [
|
||||
890.9545442950499,
|
||||
-172.28077857575016,
|
||||
791.9595949289333,
|
||||
-229.43553924069096,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -191,12 +206,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
692.9646455628166,
|
||||
-286.5902999056318,
|
||||
593.9696961966999,
|
||||
-343.7450605705726,
|
||||
],
|
||||
Array [
|
||||
890.9545442950499,
|
||||
-400.89982123551346,
|
||||
791.9595949289333,
|
||||
-458.05458190045425,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -206,12 +221,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
890.9545442950499,
|
||||
-172.28077857575016,
|
||||
791.9595949289333,
|
||||
-229.43553924069096,
|
||||
],
|
||||
Array [
|
||||
1088.9444430272833,
|
||||
-286.5902999056318,
|
||||
989.9494936611666,
|
||||
-343.7450605705726,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -221,12 +236,12 @@ Object {
|
|||
},
|
||||
"points": Array [
|
||||
Array [
|
||||
1088.9444430272833,
|
||||
-286.5902999056318,
|
||||
989.9494936611666,
|
||||
-343.7450605705726,
|
||||
],
|
||||
Array [
|
||||
1286.9343417595164,
|
||||
-400.89982123551346,
|
||||
1187.9393923933999,
|
||||
-458.05458190045425,
|
||||
],
|
||||
],
|
||||
},
|
||||
|
@ -263,8 +278,8 @@ Object {
|
|||
"unique_ppid": 0,
|
||||
},
|
||||
} => Array [
|
||||
197.9898987322333,
|
||||
-343.7450605705726,
|
||||
98.99494936611666,
|
||||
-400.8998212355134,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
|
@ -280,8 +295,25 @@ Object {
|
|||
"unique_ppid": 0,
|
||||
},
|
||||
} => Array [
|
||||
593.9696961966999,
|
||||
-115.12601791080935,
|
||||
494.9747468305833,
|
||||
-172.28077857575016,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
"agent": Object {
|
||||
"id": "",
|
||||
"type": "",
|
||||
"version": "",
|
||||
},
|
||||
"endgame": Object {
|
||||
"event_subtype_full": "termination_event",
|
||||
"event_type_full": "process_event",
|
||||
"unique_pid": 8,
|
||||
"unique_ppid": 0,
|
||||
},
|
||||
} => Array [
|
||||
791.9595949289333,
|
||||
-0.8164965809277259,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
|
@ -297,8 +329,8 @@ Object {
|
|||
"unique_ppid": 1,
|
||||
},
|
||||
} => Array [
|
||||
494.9747468305833,
|
||||
-629.5188638952767,
|
||||
395.9797974644666,
|
||||
-686.6736245602175,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
|
@ -314,8 +346,8 @@ Object {
|
|||
"unique_ppid": 1,
|
||||
},
|
||||
} => Array [
|
||||
692.9646455628166,
|
||||
-515.2093425653951,
|
||||
593.9696961966999,
|
||||
-572.3641032303359,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
|
@ -331,8 +363,8 @@ Object {
|
|||
"unique_ppid": 2,
|
||||
},
|
||||
} => Array [
|
||||
890.9545442950499,
|
||||
-400.89982123551346,
|
||||
791.9595949289333,
|
||||
-458.05458190045425,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
|
@ -348,8 +380,8 @@ Object {
|
|||
"unique_ppid": 2,
|
||||
},
|
||||
} => Array [
|
||||
1088.9444430272833,
|
||||
-286.5902999056318,
|
||||
989.9494936611666,
|
||||
-343.7450605705726,
|
||||
],
|
||||
Object {
|
||||
"@timestamp": 1582233383000,
|
||||
|
@ -365,8 +397,8 @@ Object {
|
|||
"unique_ppid": 6,
|
||||
},
|
||||
} => Array [
|
||||
1286.9343417595164,
|
||||
-400.89982123551346,
|
||||
1187.9393923933999,
|
||||
-458.05458190045425,
|
||||
],
|
||||
},
|
||||
}
|
|
@ -4,10 +4,10 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { uniquePidForProcess, uniqueParentPidForProcess } from './process_event';
|
||||
import { IndexedProcessTree, AdjacentProcessMap } from '../types';
|
||||
import { ResolverEvent } from '../../../common/endpoint/types';
|
||||
import { levelOrder as baseLevelOrder } from '../lib/tree_sequencers';
|
||||
import { uniquePidForProcess, uniqueParentPidForProcess } from '../process_event';
|
||||
import { IndexedProcessTree, AdjacentProcessMap } from '../../types';
|
||||
import { ResolverEvent } from '../../../../common/endpoint/types';
|
||||
import { levelOrder as baseLevelOrder } from '../../lib/tree_sequencers';
|
||||
|
||||
/**
|
||||
* Create a new IndexedProcessTree from an array of ProcessEvents
|
|
@ -0,0 +1,152 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { IsometricTaxiLayout } from '../../types';
|
||||
import { LegacyEndpointEvent } from '../../../../common/endpoint/types';
|
||||
import { isometricTaxiLayout } from './isometric_taxi_layout';
|
||||
import { mockProcessEvent } from '../../models/process_event_test_helpers';
|
||||
import { factory } from './index';
|
||||
|
||||
describe('resolver graph layout', () => {
|
||||
let processA: LegacyEndpointEvent;
|
||||
let processB: LegacyEndpointEvent;
|
||||
let processC: LegacyEndpointEvent;
|
||||
let processD: LegacyEndpointEvent;
|
||||
let processE: LegacyEndpointEvent;
|
||||
let processF: LegacyEndpointEvent;
|
||||
let processG: LegacyEndpointEvent;
|
||||
let processH: LegacyEndpointEvent;
|
||||
let processI: LegacyEndpointEvent;
|
||||
let events: LegacyEndpointEvent[];
|
||||
let layout: () => IsometricTaxiLayout;
|
||||
|
||||
beforeEach(() => {
|
||||
/*
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
* ___|___ ___|___
|
||||
* | | | |
|
||||
* D E F G
|
||||
* |
|
||||
* H
|
||||
*
|
||||
*/
|
||||
processA = mockProcessEvent({
|
||||
endgame: {
|
||||
process_name: '',
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 0,
|
||||
},
|
||||
});
|
||||
processB = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'already_running',
|
||||
unique_pid: 1,
|
||||
unique_ppid: 0,
|
||||
},
|
||||
});
|
||||
processC = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 2,
|
||||
unique_ppid: 0,
|
||||
},
|
||||
});
|
||||
processD = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 3,
|
||||
unique_ppid: 1,
|
||||
},
|
||||
});
|
||||
processE = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 4,
|
||||
unique_ppid: 1,
|
||||
},
|
||||
});
|
||||
processF = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 5,
|
||||
unique_ppid: 2,
|
||||
},
|
||||
});
|
||||
processG = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 6,
|
||||
unique_ppid: 2,
|
||||
},
|
||||
});
|
||||
processH = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 7,
|
||||
unique_ppid: 6,
|
||||
},
|
||||
});
|
||||
processI = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'termination_event',
|
||||
unique_pid: 8,
|
||||
unique_ppid: 0,
|
||||
},
|
||||
});
|
||||
layout = () => isometricTaxiLayout(factory(events));
|
||||
events = [];
|
||||
});
|
||||
describe('when rendering no nodes', () => {
|
||||
it('renders right', () => {
|
||||
expect(layout()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('when rendering one node', () => {
|
||||
beforeEach(() => {
|
||||
events = [processA];
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(layout()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('when rendering two nodes, one being the parent of the other', () => {
|
||||
beforeEach(() => {
|
||||
events = [processA, processB];
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(layout()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('when rendering two forks, and one fork has an extra long tine', () => {
|
||||
beforeEach(() => {
|
||||
events = [
|
||||
processA,
|
||||
processB,
|
||||
processC,
|
||||
processD,
|
||||
processE,
|
||||
processF,
|
||||
processG,
|
||||
processH,
|
||||
processI,
|
||||
];
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(layout()).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,453 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import * as vector2 from '../../lib/vector2';
|
||||
import {
|
||||
IndexedProcessTree,
|
||||
Vector2,
|
||||
EdgeLineSegment,
|
||||
ProcessWidths,
|
||||
ProcessPositions,
|
||||
EdgeLineMetadata,
|
||||
ProcessWithWidthMetadata,
|
||||
Matrix3,
|
||||
IsometricTaxiLayout,
|
||||
} from '../../types';
|
||||
import * as event from '../../../../common/endpoint/models/event';
|
||||
import { ResolverEvent } from '../../../../common/endpoint/types';
|
||||
import * as model from './index';
|
||||
import { getFriendlyElapsedTime as elapsedTime } from '../../lib/date';
|
||||
|
||||
/**
|
||||
* Graph the process tree
|
||||
*/
|
||||
export function isometricTaxiLayout(indexedProcessTree: IndexedProcessTree): IsometricTaxiLayout {
|
||||
/**
|
||||
* Walk the tree in reverse level order, calculating the 'width' of subtrees.
|
||||
*/
|
||||
const widths = widthsOfProcessSubtrees(indexedProcessTree);
|
||||
|
||||
/**
|
||||
* Walk the tree in level order. Using the precalculated widths, calculate the position of nodes.
|
||||
* Nodes are positioned relative to their parents and preceding siblings.
|
||||
*/
|
||||
const positions = processPositions(indexedProcessTree, widths);
|
||||
|
||||
/**
|
||||
* With the widths and positions precalculated, we calculate edge line segments (arrays of vector2s)
|
||||
* which connect them in a 'pitchfork' design.
|
||||
*/
|
||||
const edgeLineSegments = processEdgeLineSegments(indexedProcessTree, widths, positions);
|
||||
|
||||
/**
|
||||
* Transform the positions of nodes and edges so they seem like they are on an isometric grid.
|
||||
*/
|
||||
const transformedEdgeLineSegments: EdgeLineSegment[] = [];
|
||||
const transformedPositions = new Map<ResolverEvent, Vector2>();
|
||||
|
||||
for (const [processEvent, position] of positions) {
|
||||
transformedPositions.set(
|
||||
processEvent,
|
||||
vector2.applyMatrix3(position, isometricTransformMatrix)
|
||||
);
|
||||
}
|
||||
|
||||
for (const edgeLineSegment of edgeLineSegments) {
|
||||
const {
|
||||
points: [startPoint, endPoint],
|
||||
} = edgeLineSegment;
|
||||
|
||||
const transformedSegment: EdgeLineSegment = {
|
||||
...edgeLineSegment,
|
||||
points: [
|
||||
vector2.applyMatrix3(startPoint, isometricTransformMatrix),
|
||||
vector2.applyMatrix3(endPoint, isometricTransformMatrix),
|
||||
],
|
||||
};
|
||||
|
||||
transformedEdgeLineSegments.push(transformedSegment);
|
||||
}
|
||||
|
||||
return {
|
||||
processNodePositions: transformedPositions,
|
||||
edgeLineSegments: transformedEdgeLineSegments,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* In laying out the graph, we precalculate the 'width' of each subtree. The 'width' of the subtree is determined by its
|
||||
* descedants and the rule that each process node must be at least 1 unit apart. Enforcing that all nodes are at least
|
||||
* 1 unit apart on the x axis makes it easy to prevent the UI components from overlapping. There will always be space.
|
||||
*
|
||||
* Example widths:
|
||||
*
|
||||
* A and B each have a width of 0
|
||||
*
|
||||
* A
|
||||
* |
|
||||
* B
|
||||
*
|
||||
* A has a width of 1. B and C have a width of 0.
|
||||
* B and C must be 1 unit apart, so the A subtree has a width of 1.
|
||||
*
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
*
|
||||
*
|
||||
* D, E, F, G, H all have a width of 0.
|
||||
* B has a width of 1 since D->E must be 1 unit apart.
|
||||
* Similarly, C has a width of 1 since F->G must be 1 unit apart.
|
||||
* A has width of 3, since B has a width of 1, and C has a width of 1, and E->F must be at least
|
||||
* 1 unit apart.
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
* ___|___ ___|___
|
||||
* | | | |
|
||||
* D E F G
|
||||
* |
|
||||
* H
|
||||
*
|
||||
*/
|
||||
function widthsOfProcessSubtrees(indexedProcessTree: IndexedProcessTree): ProcessWidths {
|
||||
const widths = new Map<ResolverEvent, number>();
|
||||
|
||||
if (model.size(indexedProcessTree) === 0) {
|
||||
return widths;
|
||||
}
|
||||
|
||||
const processesInReverseLevelOrder = [...model.levelOrder(indexedProcessTree)].reverse();
|
||||
|
||||
for (const process of processesInReverseLevelOrder) {
|
||||
const children = model.children(indexedProcessTree, process);
|
||||
|
||||
const sumOfWidthOfChildren = function sumOfWidthOfChildren() {
|
||||
return children.reduce(function sum(currentValue, child) {
|
||||
/**
|
||||
* `widths.get` will always return a number in this case.
|
||||
* This loop sequences a tree in reverse level order. Width values are set for each node.
|
||||
* Therefore a parent can always find a width for its children, since all of its children
|
||||
* will have been handled already.
|
||||
*/
|
||||
return currentValue + widths.get(child)!;
|
||||
}, 0);
|
||||
};
|
||||
|
||||
const width = sumOfWidthOfChildren() + Math.max(0, children.length - 1) * distanceBetweenNodes;
|
||||
widths.set(process, width);
|
||||
}
|
||||
|
||||
return widths;
|
||||
}
|
||||
|
||||
function processEdgeLineSegments(
|
||||
indexedProcessTree: IndexedProcessTree,
|
||||
widths: ProcessWidths,
|
||||
positions: ProcessPositions
|
||||
): EdgeLineSegment[] {
|
||||
const edgeLineSegments: EdgeLineSegment[] = [];
|
||||
for (const metadata of levelOrderWithWidths(indexedProcessTree, widths)) {
|
||||
const edgeLineMetadata: EdgeLineMetadata = { uniqueId: '' };
|
||||
/**
|
||||
* We only handle children, drawing lines back to their parents. The root has no parent, so we skip it
|
||||
*/
|
||||
if (metadata.parent === null) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
const { process, parent, parentWidth } = metadata;
|
||||
const position = positions.get(process);
|
||||
const parentPosition = positions.get(parent);
|
||||
const parentId = event.entityId(parent);
|
||||
const processEntityId = event.entityId(process);
|
||||
const edgeLineId = parentId ? parentId + processEntityId : parentId;
|
||||
|
||||
if (position === undefined || parentPosition === undefined) {
|
||||
/**
|
||||
* All positions have been precalculated, so if any are missing, it's an error. This will never happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
const parentTime = event.eventTimestamp(parent);
|
||||
const processTime = event.eventTimestamp(process);
|
||||
if (parentTime && processTime) {
|
||||
edgeLineMetadata.elapsedTime = elapsedTime(parentTime, processTime) ?? undefined;
|
||||
}
|
||||
edgeLineMetadata.uniqueId = edgeLineId;
|
||||
|
||||
/**
|
||||
* The point halfway between the parent and child on the y axis, we sometimes have a hard angle here in the edge line
|
||||
*/
|
||||
const midwayY = parentPosition[1] + (position[1] - parentPosition[1]) / 2;
|
||||
|
||||
/**
|
||||
* When drawing edge lines between a parent and children (when there are multiple children) we draw a pitchfork type
|
||||
* design. The 'midway' line, runs along the x axis and joins all the children with a single descendant line from the parent.
|
||||
* See the ascii diagram below. The underscore characters would be the midway line.
|
||||
*
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
*/
|
||||
const lineFromProcessToMidwayLine: EdgeLineSegment = {
|
||||
points: [[position[0], midwayY], position],
|
||||
metadata: edgeLineMetadata,
|
||||
};
|
||||
|
||||
const siblings = model.children(indexedProcessTree, parent);
|
||||
const isFirstChild = process === siblings[0];
|
||||
|
||||
if (metadata.isOnlyChild) {
|
||||
// add a single line segment directly from parent to child. We don't do the 'pitchfork' in this case.
|
||||
edgeLineSegments.push({ points: [parentPosition, position], metadata: edgeLineMetadata });
|
||||
} else if (isFirstChild) {
|
||||
/**
|
||||
* If the parent has multiple children, we draw the 'midway' line, and the line from the
|
||||
* parent to the midway line, while handling the first child.
|
||||
*
|
||||
* Consider A the parent, and B the first child. We would draw somemthing like what's in the below diagram. The line from the
|
||||
* midway line to C would be drawn when we handle C.
|
||||
*
|
||||
* A
|
||||
* ____|____
|
||||
* |
|
||||
* B C
|
||||
*/
|
||||
const { firstChildWidth, lastChildWidth } = metadata;
|
||||
|
||||
const lineFromParentToMidwayLine: EdgeLineSegment = {
|
||||
points: [parentPosition, [parentPosition[0], midwayY]],
|
||||
metadata: { uniqueId: `parentToMid${edgeLineId}` },
|
||||
};
|
||||
|
||||
const widthOfMidline = parentWidth - firstChildWidth / 2 - lastChildWidth / 2;
|
||||
|
||||
const minX = parentWidth / -2 + firstChildWidth / 2;
|
||||
const maxX = minX + widthOfMidline;
|
||||
|
||||
const midwayLine: EdgeLineSegment = {
|
||||
points: [
|
||||
[
|
||||
// Position line relative to the parent's x component
|
||||
parentPosition[0] + minX,
|
||||
midwayY,
|
||||
],
|
||||
[
|
||||
// Position line relative to the parent's x component
|
||||
parentPosition[0] + maxX,
|
||||
midwayY,
|
||||
],
|
||||
],
|
||||
metadata: { uniqueId: `midway${edgeLineId}` },
|
||||
};
|
||||
|
||||
edgeLineSegments.push(
|
||||
/* line from parent to midway line */
|
||||
lineFromParentToMidwayLine,
|
||||
midwayLine,
|
||||
lineFromProcessToMidwayLine
|
||||
);
|
||||
} else {
|
||||
// If this isn't the first child, it must have siblings (the first of which drew the midway line and line
|
||||
// from the parent to the midway line
|
||||
edgeLineSegments.push(lineFromProcessToMidwayLine);
|
||||
}
|
||||
}
|
||||
return edgeLineSegments;
|
||||
}
|
||||
|
||||
function processPositions(
|
||||
indexedProcessTree: IndexedProcessTree,
|
||||
widths: ProcessWidths
|
||||
): ProcessPositions {
|
||||
const positions = new Map<ResolverEvent, Vector2>();
|
||||
/**
|
||||
* This algorithm iterates the tree in level order. It keeps counters that are reset for each parent.
|
||||
* By keeping track of the last parent node, we can know when we are dealing with a new set of siblings and
|
||||
* reset the counters.
|
||||
*/
|
||||
let lastProcessedParentNode: ResolverEvent | undefined;
|
||||
/**
|
||||
* Nodes are positioned relative to their siblings. We walk this in level order, so we handle
|
||||
* children left -> right.
|
||||
*
|
||||
* The width of preceding siblings is used to left align the node.
|
||||
* The number of preceding siblings is important because each sibling must be 1 unit apart
|
||||
* on the x axis.
|
||||
*/
|
||||
let numberOfPrecedingSiblings = 0;
|
||||
let runningWidthOfPrecedingSiblings = 0;
|
||||
|
||||
for (const metadata of levelOrderWithWidths(indexedProcessTree, widths)) {
|
||||
// Handle root node
|
||||
if (metadata.parent === null) {
|
||||
const { process } = metadata;
|
||||
/**
|
||||
* Place the root node at (0, 0) for now.
|
||||
*/
|
||||
positions.set(process, [0, 0]);
|
||||
} else {
|
||||
const { process, parent, isOnlyChild, width, parentWidth } = metadata;
|
||||
|
||||
// Reinit counters when parent changes
|
||||
if (lastProcessedParentNode !== parent) {
|
||||
numberOfPrecedingSiblings = 0;
|
||||
runningWidthOfPrecedingSiblings = 0;
|
||||
|
||||
// keep track of this so we know when to reinitialize
|
||||
lastProcessedParentNode = parent;
|
||||
}
|
||||
|
||||
const parentPosition = positions.get(parent);
|
||||
|
||||
if (parentPosition === undefined) {
|
||||
/**
|
||||
* Since this algorithm populates the `positions` map in level order,
|
||||
* the parent node will have been processed already and the parent position
|
||||
* will always be available.
|
||||
*
|
||||
* This will never happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
/**
|
||||
* The x 'offset' is added to the x value of the parent to determine the position of the node.
|
||||
* We add `parentWidth / -2` in order to align the left side of this node with the left side of its parent.
|
||||
* We add `numberOfPrecedingSiblings * distanceBetweenNodes` in order to keep each node 1 apart on the x axis.
|
||||
* We add `runningWidthOfPrecedingSiblings` so that we don't overlap with our preceding siblings. We stack em up.
|
||||
* We add `width / 2` so that we center the node horizontally (in case it has non-0 width.)
|
||||
*/
|
||||
const xOffset =
|
||||
parentWidth / -2 +
|
||||
numberOfPrecedingSiblings * distanceBetweenNodes +
|
||||
runningWidthOfPrecedingSiblings +
|
||||
width / 2;
|
||||
|
||||
/**
|
||||
* The y axis gains `-distanceBetweenNodes` as we move down the screen 1 unit at a time.
|
||||
*/
|
||||
let yDistanceBetweenNodes = -distanceBetweenNodes;
|
||||
|
||||
if (!isOnlyChild) {
|
||||
// Make space on leaves to show elapsed time
|
||||
yDistanceBetweenNodes *= 2;
|
||||
}
|
||||
|
||||
const position = vector2.add([xOffset, yDistanceBetweenNodes], parentPosition);
|
||||
|
||||
positions.set(process, position);
|
||||
|
||||
numberOfPrecedingSiblings += 1;
|
||||
runningWidthOfPrecedingSiblings += width;
|
||||
}
|
||||
}
|
||||
|
||||
return positions;
|
||||
}
|
||||
function* levelOrderWithWidths(
|
||||
tree: IndexedProcessTree,
|
||||
widths: ProcessWidths
|
||||
): Iterable<ProcessWithWidthMetadata> {
|
||||
for (const process of model.levelOrder(tree)) {
|
||||
const parent = model.parent(tree, process);
|
||||
const width = widths.get(process);
|
||||
|
||||
if (width === undefined) {
|
||||
/**
|
||||
* All widths have been precalcluated, so this will not happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
/** If the parent is undefined, we are processing the root. */
|
||||
if (parent === undefined) {
|
||||
yield {
|
||||
process,
|
||||
width,
|
||||
parent: null,
|
||||
parentWidth: null,
|
||||
isOnlyChild: null,
|
||||
firstChildWidth: null,
|
||||
lastChildWidth: null,
|
||||
};
|
||||
} else {
|
||||
const parentWidth = widths.get(parent);
|
||||
|
||||
if (parentWidth === undefined) {
|
||||
/**
|
||||
* All widths have been precalcluated, so this will not happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
const metadata: Partial<ProcessWithWidthMetadata> = {
|
||||
process,
|
||||
width,
|
||||
parent,
|
||||
parentWidth,
|
||||
};
|
||||
|
||||
const siblings = model.children(tree, parent);
|
||||
if (siblings.length === 1) {
|
||||
metadata.isOnlyChild = true;
|
||||
metadata.lastChildWidth = width;
|
||||
metadata.firstChildWidth = width;
|
||||
} else {
|
||||
const firstChildWidth = widths.get(siblings[0]);
|
||||
const lastChildWidth = widths.get(siblings[siblings.length - 1]);
|
||||
if (firstChildWidth === undefined || lastChildWidth === undefined) {
|
||||
/**
|
||||
* All widths have been precalcluated, so this will not happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
metadata.isOnlyChild = false;
|
||||
metadata.firstChildWidth = firstChildWidth;
|
||||
metadata.lastChildWidth = lastChildWidth;
|
||||
}
|
||||
|
||||
yield metadata as ProcessWithWidthMetadata;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An isometric projection is a method for representing three dimensional objects in 2 dimensions.
|
||||
* More information about isometric projections can be found here https://en.wikipedia.org/wiki/Isometric_projection.
|
||||
* In our case, we obtain the isometric projection by rotating the objects 45 degrees in the plane of the screen
|
||||
* and arctan(1/sqrt(2)) (~35.3 degrees) through the horizontal axis.
|
||||
*
|
||||
* A rotation by 45 degrees in the plane of the screen is given by:
|
||||
* [ sqrt(2)/2 -sqrt(2)/2 0
|
||||
* sqrt(2)/2 sqrt(2)/2 0
|
||||
* 0 0 1]
|
||||
*
|
||||
* A rotation by arctan(1/sqrt(2)) through the horizantal axis is given by:
|
||||
* [ 1 0 0
|
||||
* 0 sqrt(3)/3 -sqrt(6)/3
|
||||
* 0 sqrt(6)/3 sqrt(3)/3]
|
||||
*
|
||||
* We can multiply both of these matrices to get the final transformation below.
|
||||
*/
|
||||
/* prettier-ignore */
|
||||
const isometricTransformMatrix: Matrix3 = [
|
||||
Math.sqrt(2) / 2, -(Math.sqrt(2) / 2), 0,
|
||||
Math.sqrt(6) / 6, Math.sqrt(6) / 6, -(Math.sqrt(6) / 3),
|
||||
0, 0, 1,
|
||||
]
|
||||
|
||||
const unit = 140;
|
||||
const distanceBetweenNodesInUnits = 2;
|
||||
|
||||
/**
|
||||
* The distance in pixels (at scale 1) between nodes. Change this to space out nodes more
|
||||
*/
|
||||
const distanceBetweenNodes = distanceBetweenNodesInUnits * unit;
|
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
ResolverTree,
|
||||
ResolverEvent,
|
||||
ResolverNodeStats,
|
||||
ResolverLifecycleNode,
|
||||
} from '../../../common/endpoint/types';
|
||||
import { uniquePidForProcess } from './process_event';
|
||||
|
||||
/**
|
||||
* ResolverTree is a type returned by the server.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This returns the 'LifecycleNodes' of the tree. These nodes have
|
||||
* the entityID and stats for a process. Used by `relatedEventsStats`.
|
||||
*/
|
||||
function lifecycleNodes(tree: ResolverTree): ResolverLifecycleNode[] {
|
||||
return [tree, ...tree.children.childNodes, ...tree.ancestry.ancestors];
|
||||
}
|
||||
|
||||
/**
|
||||
* All the process events
|
||||
*/
|
||||
export function lifecycleEvents(tree: ResolverTree) {
|
||||
const events: ResolverEvent[] = [...tree.lifecycle];
|
||||
for (const { lifecycle } of tree.children.childNodes) {
|
||||
events.push(...lifecycle);
|
||||
}
|
||||
for (const { lifecycle } of tree.ancestry.ancestors) {
|
||||
events.push(...lifecycle);
|
||||
}
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* This returns a map of entity_ids to stats for the related events and alerts.
|
||||
*/
|
||||
export function relatedEventsStats(tree: ResolverTree): Map<string, ResolverNodeStats> {
|
||||
const nodeStats: Map<string, ResolverNodeStats> = new Map();
|
||||
for (const node of lifecycleNodes(tree)) {
|
||||
if (node.stats) {
|
||||
nodeStats.set(node.entityID, node.stats);
|
||||
}
|
||||
}
|
||||
return nodeStats;
|
||||
}
|
||||
|
||||
/**
|
||||
* ResolverTree type is returned by the server. It organizes events into a complex structure. The
|
||||
* organization of events in the tree is done to associate metadata with the events. The client does not
|
||||
* use this metadata. Instead, the client flattens the tree into an array. Therefore we can safely
|
||||
* make a malformed ResolverTree for the purposes of the tests, so long as it is flattened in a predictable way.
|
||||
*/
|
||||
export function mock({
|
||||
events,
|
||||
cursors = { childrenNextChild: null, ancestryNextAncestor: null },
|
||||
}: {
|
||||
/**
|
||||
* Events represented by the ResolverTree.
|
||||
*/
|
||||
events: ResolverEvent[];
|
||||
/**
|
||||
* Optionally provide cursors for the 'children' and 'ancestry' edges.
|
||||
*/
|
||||
cursors?: { childrenNextChild: string | null; ancestryNextAncestor: string | null };
|
||||
}): ResolverTree | null {
|
||||
if (events.length === 0) {
|
||||
return null;
|
||||
}
|
||||
const first = events[0];
|
||||
return {
|
||||
entityID: uniquePidForProcess(first),
|
||||
// Required
|
||||
children: {
|
||||
childNodes: [],
|
||||
nextChild: cursors.childrenNextChild,
|
||||
},
|
||||
// Required
|
||||
relatedEvents: {
|
||||
events: [],
|
||||
nextEvent: null,
|
||||
},
|
||||
// Required
|
||||
relatedAlerts: {
|
||||
alerts: [],
|
||||
nextAlert: null,
|
||||
},
|
||||
// Required
|
||||
ancestry: {
|
||||
ancestors: [],
|
||||
nextAncestor: cursors.ancestryNextAncestor,
|
||||
},
|
||||
// Normally, this would have only certain events, but for testing purposes, it will have all events, since
|
||||
// the position of events in the ResolverTree is irrelevant.
|
||||
lifecycle: events,
|
||||
// Required
|
||||
stats: {
|
||||
events: {
|
||||
total: 0,
|
||||
byCategory: {},
|
||||
},
|
||||
totalAlerts: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* `true` if there are more children to fetch.
|
||||
*/
|
||||
export function hasMoreChildren(resolverTree: ResolverTree): boolean {
|
||||
return resolverTree.children.nextChild !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* `true` if there are more ancestors to fetch.
|
||||
*/
|
||||
export function hasMoreAncestors(resolverTree: ResolverTree): boolean {
|
||||
return resolverTree.ancestry.nextAncestor !== null;
|
||||
}
|
|
@ -4,8 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { CameraAction } from './camera';
|
||||
import { DataAction } from './data';
|
||||
import { ResolverEvent } from '../../../common/endpoint/types';
|
||||
import { DataAction } from './data/action';
|
||||
|
||||
/**
|
||||
* When the user wants to bring a process node front-and-center on the map.
|
||||
|
@ -53,26 +53,6 @@ interface AppDetectedNewIdFromQueryParams {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Used when the alert list selects an alert and the flyout shows resolver.
|
||||
*/
|
||||
interface UserChangedSelectedEvent {
|
||||
readonly type: 'userChangedSelectedEvent';
|
||||
readonly payload: {
|
||||
/**
|
||||
* Optional because they could have unselected the event.
|
||||
*/
|
||||
readonly selectedEvent?: ResolverEvent;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggered by middleware when the data for resolver needs to be loaded. Used to set state in redux to 'loading'.
|
||||
*/
|
||||
interface AppRequestedResolverData {
|
||||
readonly type: 'appRequestedResolverData';
|
||||
}
|
||||
|
||||
/**
|
||||
* The action dispatched when the app requests related event data for one
|
||||
* subject (whose entity_id should be included as `payload`)
|
||||
|
@ -145,8 +125,6 @@ export type ResolverAction =
|
|||
| CameraAction
|
||||
| DataAction
|
||||
| UserBroughtProcessIntoView
|
||||
| UserChangedSelectedEvent
|
||||
| AppRequestedResolverData
|
||||
| UserFocusedOnResolverNode
|
||||
| UserSelectedResolverNode
|
||||
| UserRequestedRelatedEventData
|
||||
|
|
|
@ -6,10 +6,11 @@
|
|||
|
||||
import { createStore, Store, Reducer } from 'redux';
|
||||
import { cameraReducer, cameraInitialState } from './reducer';
|
||||
import { CameraState, Vector2, ResolverAction } from '../../types';
|
||||
import { CameraState, Vector2 } from '../../types';
|
||||
import * as selectors from './selectors';
|
||||
import { animatePanning } from './methods';
|
||||
import { lerp } from '../../lib/math';
|
||||
import { ResolverAction } from '../actions';
|
||||
|
||||
type TestAction =
|
||||
| ResolverAction
|
||||
|
|
|
@ -11,8 +11,9 @@ import * as vector2 from '../../lib/vector2';
|
|||
import * as selectors from './selectors';
|
||||
import { clamp } from '../../lib/math';
|
||||
|
||||
import { CameraState, ResolverAction, Vector2 } from '../../types';
|
||||
import { CameraState, Vector2 } from '../../types';
|
||||
import { scaleToZoom } from './scale_to_zoom';
|
||||
import { ResolverAction } from '../actions';
|
||||
|
||||
/**
|
||||
* Used in tests.
|
||||
|
|
|
@ -4,23 +4,44 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
ResolverEvent,
|
||||
ResolverNodeStats,
|
||||
ResolverRelatedEvents,
|
||||
} from '../../../../common/endpoint/types';
|
||||
import { ResolverRelatedEvents, ResolverTree } from '../../../../common/endpoint/types';
|
||||
|
||||
interface ServerReturnedResolverData {
|
||||
readonly type: 'serverReturnedResolverData';
|
||||
readonly payload: {
|
||||
readonly events: Readonly<ResolverEvent[]>;
|
||||
readonly stats: Readonly<Map<string, ResolverNodeStats>>;
|
||||
readonly lineageLimits: { readonly children: string | null; readonly ancestors: string | null };
|
||||
/**
|
||||
* The result of fetching data
|
||||
*/
|
||||
result: ResolverTree;
|
||||
/**
|
||||
* The database document ID that was used to fetch the resolver tree
|
||||
*/
|
||||
databaseDocumentID: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface AppRequestedResolverData {
|
||||
readonly type: 'appRequestedResolverData';
|
||||
/**
|
||||
* entity ID used to make the request.
|
||||
*/
|
||||
readonly payload: string;
|
||||
}
|
||||
|
||||
interface ServerFailedToReturnResolverData {
|
||||
readonly type: 'serverFailedToReturnResolverData';
|
||||
/**
|
||||
* entity ID used to make the failed request
|
||||
*/
|
||||
readonly payload: string;
|
||||
}
|
||||
|
||||
interface AppAbortedResolverDataRequest {
|
||||
readonly type: 'appAbortedResolverDataRequest';
|
||||
/**
|
||||
* entity ID used to make the aborted request
|
||||
*/
|
||||
readonly payload: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -39,8 +60,29 @@ interface ServerReturnedRelatedEventData {
|
|||
readonly payload: ResolverRelatedEvents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used by `useStateSyncingActions` hook.
|
||||
* This is dispatched when external sources provide new parameters for Resolver.
|
||||
* When the component receives a new 'databaseDocumentID' prop, this is fired.
|
||||
*/
|
||||
interface AppReceivedNewExternalProperties {
|
||||
type: 'appReceivedNewExternalProperties';
|
||||
/**
|
||||
* Defines the externally provided properties that Resolver acknowledges.
|
||||
*/
|
||||
payload: {
|
||||
/**
|
||||
* the `_id` of an ES document. This defines the origin of the Resolver graph.
|
||||
*/
|
||||
databaseDocumentID?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type DataAction =
|
||||
| ServerReturnedResolverData
|
||||
| ServerFailedToReturnResolverData
|
||||
| ServerFailedToReturnRelatedEventData
|
||||
| ServerReturnedRelatedEventData;
|
||||
| ServerReturnedRelatedEventData
|
||||
| AppReceivedNewExternalProperties
|
||||
| AppRequestedResolverData
|
||||
| AppAbortedResolverDataRequest;
|
||||
|
|
|
@ -1,251 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Store, createStore } from 'redux';
|
||||
import { DataAction } from './action';
|
||||
import { dataReducer } from './reducer';
|
||||
import { DataState } from '../../types';
|
||||
import { LegacyEndpointEvent, ResolverEvent } from '../../../../common/endpoint/types';
|
||||
import {
|
||||
graphableProcesses,
|
||||
processNodePositionsAndEdgeLineSegments,
|
||||
limitsReached,
|
||||
} from './selectors';
|
||||
import { mockProcessEvent } from '../../models/process_event_test_helpers';
|
||||
import { EndpointDocGenerator } from '../../../../common/endpoint/generate_data';
|
||||
|
||||
describe('resolver graph layout', () => {
|
||||
let processA: LegacyEndpointEvent;
|
||||
let processB: LegacyEndpointEvent;
|
||||
let processC: LegacyEndpointEvent;
|
||||
let processD: LegacyEndpointEvent;
|
||||
let processE: LegacyEndpointEvent;
|
||||
let processF: LegacyEndpointEvent;
|
||||
let processG: LegacyEndpointEvent;
|
||||
let processH: LegacyEndpointEvent;
|
||||
let processI: LegacyEndpointEvent;
|
||||
let store: Store<DataState, DataAction>;
|
||||
|
||||
beforeEach(() => {
|
||||
/*
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
* ___|___ ___|___
|
||||
* | | | |
|
||||
* D E F G
|
||||
* |
|
||||
* H
|
||||
*
|
||||
*/
|
||||
processA = mockProcessEvent({
|
||||
endgame: {
|
||||
process_name: '',
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 0,
|
||||
},
|
||||
});
|
||||
processB = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'already_running',
|
||||
unique_pid: 1,
|
||||
unique_ppid: 0,
|
||||
},
|
||||
});
|
||||
processC = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 2,
|
||||
unique_ppid: 0,
|
||||
},
|
||||
});
|
||||
processD = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 3,
|
||||
unique_ppid: 1,
|
||||
},
|
||||
});
|
||||
processE = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 4,
|
||||
unique_ppid: 1,
|
||||
},
|
||||
});
|
||||
processF = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 5,
|
||||
unique_ppid: 2,
|
||||
},
|
||||
});
|
||||
processG = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 6,
|
||||
unique_ppid: 2,
|
||||
},
|
||||
});
|
||||
processH = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'creation_event',
|
||||
unique_pid: 7,
|
||||
unique_ppid: 6,
|
||||
},
|
||||
});
|
||||
processI = mockProcessEvent({
|
||||
endgame: {
|
||||
event_type_full: 'process_event',
|
||||
event_subtype_full: 'termination_event',
|
||||
unique_pid: 8,
|
||||
unique_ppid: 0,
|
||||
},
|
||||
});
|
||||
store = createStore(dataReducer, undefined);
|
||||
});
|
||||
describe('when rendering no nodes', () => {
|
||||
beforeEach(() => {
|
||||
const events: ResolverEvent[] = [];
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } },
|
||||
};
|
||||
store.dispatch(action);
|
||||
});
|
||||
it('the graphableProcesses list should only include nothing', () => {
|
||||
const actual = graphableProcesses(store.getState());
|
||||
expect(actual).toEqual([]);
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('when rendering one node', () => {
|
||||
beforeEach(() => {
|
||||
const events = [processA];
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } },
|
||||
};
|
||||
store.dispatch(action);
|
||||
});
|
||||
it('the graphableProcesses list should only include nothing', () => {
|
||||
const actual = graphableProcesses(store.getState());
|
||||
expect(actual).toEqual([processA]);
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('when rendering two nodes, one being the parent of the other', () => {
|
||||
beforeEach(() => {
|
||||
const events = [processA, processB];
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } },
|
||||
};
|
||||
store.dispatch(action);
|
||||
});
|
||||
it('the graphableProcesses list should only include nothing', () => {
|
||||
const actual = graphableProcesses(store.getState());
|
||||
expect(actual).toEqual([processA, processB]);
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('when rendering two forks, and one fork has an extra long tine', () => {
|
||||
beforeEach(() => {
|
||||
const events = [
|
||||
processA,
|
||||
processB,
|
||||
processC,
|
||||
processD,
|
||||
processE,
|
||||
processF,
|
||||
processG,
|
||||
processH,
|
||||
processI,
|
||||
];
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { events, stats: new Map(), lineageLimits: { children: null, ancestors: null } },
|
||||
};
|
||||
store.dispatch(action);
|
||||
});
|
||||
it("the graphableProcesses list should only include events with 'processCreated' an 'processRan' eventType", () => {
|
||||
const actual = graphableProcesses(store.getState());
|
||||
expect(actual).toEqual([
|
||||
processA,
|
||||
processB,
|
||||
processC,
|
||||
processD,
|
||||
processE,
|
||||
processF,
|
||||
processG,
|
||||
processH,
|
||||
]);
|
||||
});
|
||||
it('renders right', () => {
|
||||
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolver graph with too much lineage', () => {
|
||||
let generator: EndpointDocGenerator;
|
||||
let store: Store<DataState, DataAction>;
|
||||
let allEvents: ResolverEvent[];
|
||||
let childrenCursor: string;
|
||||
let ancestorCursor: string;
|
||||
|
||||
beforeEach(() => {
|
||||
generator = new EndpointDocGenerator('seed');
|
||||
allEvents = generator.generateTree({ ancestors: 1, generations: 2, children: 2 }).allEvents;
|
||||
childrenCursor = 'aValidChildursor';
|
||||
ancestorCursor = 'aValidAncestorCursor';
|
||||
store = createStore(dataReducer, undefined);
|
||||
});
|
||||
|
||||
describe('should select from state properly', () => {
|
||||
it('should indicate there are too many ancestors', () => {
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: {
|
||||
events: allEvents,
|
||||
stats: new Map(),
|
||||
lineageLimits: { children: childrenCursor, ancestors: ancestorCursor },
|
||||
},
|
||||
};
|
||||
store.dispatch(action);
|
||||
const { ancestors } = limitsReached(store.getState());
|
||||
expect(ancestors).toEqual(true);
|
||||
});
|
||||
it('should indicate there are too many children', () => {
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: {
|
||||
events: allEvents,
|
||||
stats: new Map(),
|
||||
lineageLimits: { children: childrenCursor, ancestors: ancestorCursor },
|
||||
},
|
||||
};
|
||||
store.dispatch(action);
|
||||
const { children } = limitsReached(store.getState());
|
||||
expect(children).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export { dataReducer } from './reducer';
|
||||
export { DataAction } from './action';
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { createStore, Store } from 'redux';
|
||||
import { EndpointDocGenerator } from '../../../../common/endpoint/generate_data';
|
||||
import { mock as mockResolverTree } from '../../models/resolver_tree';
|
||||
import { dataReducer } from './reducer';
|
||||
import * as selectors from './selectors';
|
||||
import { DataState } from '../../types';
|
||||
import { DataAction } from './action';
|
||||
|
||||
/**
|
||||
* Test the data reducer and selector.
|
||||
*/
|
||||
describe('Resolver Data Middleware', () => {
|
||||
let store: Store<DataState, DataAction>;
|
||||
|
||||
beforeEach(() => {
|
||||
store = createStore(dataReducer, undefined);
|
||||
});
|
||||
|
||||
describe('when data was received and the ancestry and children edges had cursors', () => {
|
||||
beforeEach(() => {
|
||||
const generator = new EndpointDocGenerator('seed');
|
||||
const tree = mockResolverTree({
|
||||
events: generator.generateTree({ ancestors: 1, generations: 2, children: 2 }).allEvents,
|
||||
cursors: {
|
||||
childrenNextChild: 'aValidChildursor',
|
||||
ancestryNextAncestor: 'aValidAncestorCursor',
|
||||
},
|
||||
});
|
||||
if (tree) {
|
||||
const action: DataAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: {
|
||||
result: tree,
|
||||
databaseDocumentID: '',
|
||||
},
|
||||
};
|
||||
store.dispatch(action);
|
||||
}
|
||||
});
|
||||
it('should indicate there are additional ancestor', () => {
|
||||
expect(selectors.hasMoreAncestors(store.getState())).toBe(true);
|
||||
});
|
||||
it('should indicate there are additional children', () => {
|
||||
expect(selectors.hasMoreChildren(store.getState())).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,41 +5,72 @@
|
|||
*/
|
||||
|
||||
import { Reducer } from 'redux';
|
||||
import { DataState, ResolverAction } from '../../types';
|
||||
import { DataState } from '../../types';
|
||||
import { ResolverAction } from '../actions';
|
||||
|
||||
function initialState(): DataState {
|
||||
return {
|
||||
results: [],
|
||||
relatedEventsStats: new Map(),
|
||||
relatedEvents: new Map(),
|
||||
relatedEventsReady: new Map(),
|
||||
lineageLimits: { children: null, ancestors: null },
|
||||
isLoading: false,
|
||||
hasError: false,
|
||||
};
|
||||
}
|
||||
const initialState: DataState = {
|
||||
relatedEventsStats: new Map(),
|
||||
relatedEvents: new Map(),
|
||||
relatedEventsReady: new Map(),
|
||||
};
|
||||
|
||||
export const dataReducer: Reducer<DataState, ResolverAction> = (state = initialState(), action) => {
|
||||
if (action.type === 'serverReturnedResolverData') {
|
||||
return {
|
||||
export const dataReducer: Reducer<DataState, ResolverAction> = (state = initialState, action) => {
|
||||
if (action.type === 'appReceivedNewExternalProperties') {
|
||||
const nextState: DataState = {
|
||||
...state,
|
||||
results: action.payload.events,
|
||||
relatedEventsStats: action.payload.stats,
|
||||
lineageLimits: action.payload.lineageLimits,
|
||||
isLoading: false,
|
||||
hasError: false,
|
||||
databaseDocumentID: action.payload.databaseDocumentID,
|
||||
};
|
||||
return nextState;
|
||||
} else if (action.type === 'appRequestedResolverData') {
|
||||
// keep track of what we're requesting, this way we know when to request and when not to.
|
||||
return {
|
||||
...state,
|
||||
isLoading: true,
|
||||
hasError: false,
|
||||
pendingRequestDatabaseDocumentID: action.payload,
|
||||
};
|
||||
} else if (action.type === 'appAbortedResolverDataRequest') {
|
||||
if (action.payload === state.pendingRequestDatabaseDocumentID) {
|
||||
// the request we were awaiting was aborted
|
||||
return {
|
||||
...state,
|
||||
pendingRequestDatabaseDocumentID: undefined,
|
||||
};
|
||||
} else {
|
||||
return state;
|
||||
}
|
||||
} else if (action.type === 'serverReturnedResolverData') {
|
||||
/** Only handle this if we are expecting a response */
|
||||
const nextState: DataState = {
|
||||
...state,
|
||||
|
||||
/**
|
||||
* Store the last received data, as well as the databaseDocumentID it relates to.
|
||||
*/
|
||||
lastResponse: {
|
||||
result: action.payload.result,
|
||||
databaseDocumentID: action.payload.databaseDocumentID,
|
||||
successful: true,
|
||||
},
|
||||
|
||||
// This assumes that if we just received something, there is no longer a pending request.
|
||||
// This cannot model multiple in-flight requests
|
||||
pendingRequestDatabaseDocumentID: undefined,
|
||||
};
|
||||
return nextState;
|
||||
} else if (action.type === 'serverFailedToReturnResolverData') {
|
||||
return {
|
||||
...state,
|
||||
hasError: true,
|
||||
};
|
||||
/** Only handle this if we are expecting a response */
|
||||
if (state.pendingRequestDatabaseDocumentID !== undefined) {
|
||||
const nextState: DataState = {
|
||||
...state,
|
||||
pendingRequestDatabaseDocumentID: undefined,
|
||||
lastResponse: {
|
||||
databaseDocumentID: state.pendingRequestDatabaseDocumentID,
|
||||
successful: false,
|
||||
},
|
||||
};
|
||||
return nextState;
|
||||
} else {
|
||||
return state;
|
||||
}
|
||||
} else if (
|
||||
action.type === 'userRequestedRelatedEventData' ||
|
||||
action.type === 'appDetectedMissingEventData'
|
||||
|
|
|
@ -0,0 +1,253 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as selectors from './selectors';
|
||||
import { DataState } from '../../types';
|
||||
import { dataReducer } from './reducer';
|
||||
import { DataAction } from './action';
|
||||
import { createStore } from 'redux';
|
||||
describe('data state', () => {
|
||||
let actions: DataAction[] = [];
|
||||
|
||||
/**
|
||||
* Get state, given an ordered collection of actions.
|
||||
*/
|
||||
const state: () => DataState = () => {
|
||||
const store = createStore(dataReducer);
|
||||
for (const action of actions) {
|
||||
store.dispatch(action);
|
||||
}
|
||||
return store.getState();
|
||||
};
|
||||
|
||||
/**
|
||||
* This prints out all of the properties of the data state.
|
||||
* This way we can see the overall behavior of the selector easily.
|
||||
*/
|
||||
const viewAsAString = (dataState: DataState) => {
|
||||
return [
|
||||
['is loading', selectors.isLoading(dataState)],
|
||||
['has an error', selectors.hasError(dataState)],
|
||||
['has more children', selectors.hasMoreChildren(dataState)],
|
||||
['has more ancestors', selectors.hasMoreAncestors(dataState)],
|
||||
['document to fetch', selectors.databaseDocumentIDToFetch(dataState)],
|
||||
['requires a pending request to be aborted', selectors.databaseDocumentIDToAbort(dataState)],
|
||||
]
|
||||
.map(([message, value]) => `${message}: ${JSON.stringify(value)}`)
|
||||
.join('\n');
|
||||
};
|
||||
|
||||
it(`shouldn't initially be loading, or have an error, or have more children or ancestors, or have a document to fetch, or have a pending request that needs to be aborted.`, () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: false
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: null
|
||||
requires a pending request to be aborted: null"
|
||||
`);
|
||||
});
|
||||
|
||||
describe('when there is a databaseDocumentID but no pending request', () => {
|
||||
const databaseDocumentID = 'databaseDocumentID';
|
||||
beforeEach(() => {
|
||||
actions = [
|
||||
{
|
||||
type: 'appReceivedNewExternalProperties',
|
||||
payload: { databaseDocumentID },
|
||||
},
|
||||
];
|
||||
});
|
||||
it('should need to fetch the databaseDocumentID', () => {
|
||||
expect(selectors.databaseDocumentIDToFetch(state())).toBe(databaseDocumentID);
|
||||
});
|
||||
it('should not be loading, have an error, have more children or ancestors, or have a pending request that needs to be aborted.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: false
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: \\"databaseDocumentID\\"
|
||||
requires a pending request to be aborted: null"
|
||||
`);
|
||||
});
|
||||
});
|
||||
describe('when there is a pending request but no databaseDocumentID', () => {
|
||||
const databaseDocumentID = 'databaseDocumentID';
|
||||
beforeEach(() => {
|
||||
actions = [
|
||||
{
|
||||
type: 'appRequestedResolverData',
|
||||
payload: databaseDocumentID,
|
||||
},
|
||||
];
|
||||
});
|
||||
it('should be loading', () => {
|
||||
expect(selectors.isLoading(state())).toBe(true);
|
||||
});
|
||||
it('should have a request to abort', () => {
|
||||
expect(selectors.databaseDocumentIDToAbort(state())).toBe(databaseDocumentID);
|
||||
});
|
||||
it('should not have an error, more children, more ancestors, or a document to fetch.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: true
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: null
|
||||
requires a pending request to be aborted: \\"databaseDocumentID\\""
|
||||
`);
|
||||
});
|
||||
});
|
||||
describe('when there is a pending request for the current databaseDocumentID', () => {
|
||||
const databaseDocumentID = 'databaseDocumentID';
|
||||
beforeEach(() => {
|
||||
actions = [
|
||||
{
|
||||
type: 'appReceivedNewExternalProperties',
|
||||
payload: { databaseDocumentID },
|
||||
},
|
||||
{
|
||||
type: 'appRequestedResolverData',
|
||||
payload: databaseDocumentID,
|
||||
},
|
||||
];
|
||||
});
|
||||
it('should be loading', () => {
|
||||
expect(selectors.isLoading(state())).toBe(true);
|
||||
});
|
||||
it('should not have a request to abort', () => {
|
||||
expect(selectors.databaseDocumentIDToAbort(state())).toBe(null);
|
||||
});
|
||||
it('should not have an error, more children, more ancestors, a document to begin fetching, or a pending request that should be aborted.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: true
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: null
|
||||
requires a pending request to be aborted: null"
|
||||
`);
|
||||
});
|
||||
describe('when the pending request fails', () => {
|
||||
beforeEach(() => {
|
||||
actions.push({
|
||||
type: 'serverFailedToReturnResolverData',
|
||||
payload: databaseDocumentID,
|
||||
});
|
||||
});
|
||||
it('should not be loading', () => {
|
||||
expect(selectors.isLoading(state())).toBe(false);
|
||||
});
|
||||
it('should have an error', () => {
|
||||
expect(selectors.hasError(state())).toBe(true);
|
||||
});
|
||||
it('should not be loading, have more children, have more ancestors, have a document to fetch, or have a pending request that needs to be aborted.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: false
|
||||
has an error: true
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: null
|
||||
requires a pending request to be aborted: null"
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('when there is a pending request for a different databaseDocumentID than the current one', () => {
|
||||
const firstDatabaseDocumentID = 'first databaseDocumentID';
|
||||
const secondDatabaseDocumentID = 'second databaseDocumentID';
|
||||
beforeEach(() => {
|
||||
actions = [
|
||||
// receive the document ID, this would cause the middleware to starts the request
|
||||
{
|
||||
type: 'appReceivedNewExternalProperties',
|
||||
payload: { databaseDocumentID: firstDatabaseDocumentID },
|
||||
},
|
||||
// this happens when the middleware starts the request
|
||||
{
|
||||
type: 'appRequestedResolverData',
|
||||
payload: firstDatabaseDocumentID,
|
||||
},
|
||||
// receive a different databaseDocumentID. this should cause the middleware to abort the existing request and start a new one
|
||||
{
|
||||
type: 'appReceivedNewExternalProperties',
|
||||
payload: { databaseDocumentID: secondDatabaseDocumentID },
|
||||
},
|
||||
];
|
||||
});
|
||||
it('should be loading', () => {
|
||||
expect(selectors.isLoading(state())).toBe(true);
|
||||
});
|
||||
it('should need to fetch the second databaseDocumentID', () => {
|
||||
expect(selectors.databaseDocumentIDToFetch(state())).toBe(secondDatabaseDocumentID);
|
||||
});
|
||||
it('should need to abort the request for the databaseDocumentID', () => {
|
||||
expect(selectors.databaseDocumentIDToFetch(state())).toBe(secondDatabaseDocumentID);
|
||||
});
|
||||
it('should not have an error, more children, or more ancestors.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: true
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: \\"second databaseDocumentID\\"
|
||||
requires a pending request to be aborted: \\"first databaseDocumentID\\""
|
||||
`);
|
||||
});
|
||||
describe('and when the old request was aborted', () => {
|
||||
beforeEach(() => {
|
||||
actions.push({
|
||||
type: 'appAbortedResolverDataRequest',
|
||||
payload: firstDatabaseDocumentID,
|
||||
});
|
||||
});
|
||||
it('should not require a pending request to be aborted', () => {
|
||||
expect(selectors.databaseDocumentIDToAbort(state())).toBe(null);
|
||||
});
|
||||
it('should have a document to fetch', () => {
|
||||
expect(selectors.databaseDocumentIDToFetch(state())).toBe(secondDatabaseDocumentID);
|
||||
});
|
||||
it('should not be loading', () => {
|
||||
expect(selectors.isLoading(state())).toBe(false);
|
||||
});
|
||||
it('should not have an error, more children, or more ancestors.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: false
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: \\"second databaseDocumentID\\"
|
||||
requires a pending request to be aborted: null"
|
||||
`);
|
||||
});
|
||||
describe('and when the next request starts', () => {
|
||||
beforeEach(() => {
|
||||
actions.push({
|
||||
type: 'appRequestedResolverData',
|
||||
payload: secondDatabaseDocumentID,
|
||||
});
|
||||
});
|
||||
it('should not have a document ID to fetch', () => {
|
||||
expect(selectors.databaseDocumentIDToFetch(state())).toBe(null);
|
||||
});
|
||||
it('should be loading', () => {
|
||||
expect(selectors.isLoading(state())).toBe(true);
|
||||
});
|
||||
it('should not have an error, more children, more ancestors, or a pending request that needs to be aborted.', () => {
|
||||
expect(viewAsAString(state())).toMatchInlineSnapshot(`
|
||||
"is loading: true
|
||||
has an error: false
|
||||
has more children: false
|
||||
has more ancestors: false
|
||||
document to fetch: null
|
||||
requires a pending request to be aborted: null"
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -8,449 +8,92 @@ import rbush from 'rbush';
|
|||
import { createSelector } from 'reselect';
|
||||
import {
|
||||
DataState,
|
||||
IndexedProcessTree,
|
||||
ProcessWidths,
|
||||
ProcessPositions,
|
||||
EdgeLineSegment,
|
||||
ProcessWithWidthMetadata,
|
||||
Matrix3,
|
||||
AdjacentProcessMap,
|
||||
Vector2,
|
||||
EdgeLineMetadata,
|
||||
IndexedEntity,
|
||||
IndexedEdgeLineSegment,
|
||||
IndexedProcessNode,
|
||||
AABB,
|
||||
VisibleEntites,
|
||||
} from '../../types';
|
||||
import { ResolverEvent } from '../../../../common/endpoint/types';
|
||||
import * as event from '../../../../common/endpoint/models/event';
|
||||
import { add as vector2Add, applyMatrix3 } from '../../lib/vector2';
|
||||
import {
|
||||
isGraphableProcess,
|
||||
isTerminatedProcess,
|
||||
uniquePidForProcess,
|
||||
} from '../../models/process_event';
|
||||
import {
|
||||
factory as indexedProcessTreeFactory,
|
||||
children as indexedProcessTreeChildren,
|
||||
parent as indexedProcessTreeParent,
|
||||
size,
|
||||
levelOrder,
|
||||
} from '../../models/indexed_process_tree';
|
||||
import { getFriendlyElapsedTime } from '../../lib/date';
|
||||
import { factory as indexedProcessTreeFactory } from '../../models/indexed_process_tree';
|
||||
import { isEqual } from '../../lib/aabb';
|
||||
|
||||
const unit = 140;
|
||||
const distanceBetweenNodesInUnits = 2;
|
||||
import {
|
||||
ResolverEvent,
|
||||
ResolverTree,
|
||||
ResolverNodeStats,
|
||||
ResolverRelatedEvents,
|
||||
} from '../../../../common/endpoint/types';
|
||||
import * as resolverTreeModel from '../../models/resolver_tree';
|
||||
import { isometricTaxiLayout } from '../../models/indexed_process_tree/isometric_taxi_layout';
|
||||
|
||||
export function isLoading(state: DataState) {
|
||||
return state.isLoading;
|
||||
}
|
||||
|
||||
export function hasError(state: DataState) {
|
||||
return state.hasError;
|
||||
/**
|
||||
* If there is currently a request.
|
||||
*/
|
||||
export function isLoading(state: DataState): boolean {
|
||||
return state.pendingRequestDatabaseDocumentID !== undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* An isometric projection is a method for representing three dimensional objects in 2 dimensions.
|
||||
* More information about isometric projections can be found here https://en.wikipedia.org/wiki/Isometric_projection.
|
||||
* In our case, we obtain the isometric projection by rotating the objects 45 degrees in the plane of the screen
|
||||
* and arctan(1/sqrt(2)) (~35.3 degrees) through the horizontal axis.
|
||||
*
|
||||
* A rotation by 45 degrees in the plane of the screen is given by:
|
||||
* [ sqrt(2)/2 -sqrt(2)/2 0
|
||||
* sqrt(2)/2 sqrt(2)/2 0
|
||||
* 0 0 1]
|
||||
*
|
||||
* A rotation by arctan(1/sqrt(2)) through the horizantal axis is given by:
|
||||
* [ 1 0 0
|
||||
* 0 sqrt(3)/3 -sqrt(6)/3
|
||||
* 0 sqrt(6)/3 sqrt(3)/3]
|
||||
*
|
||||
* We can multiply both of these matrices to get the final transformation below.
|
||||
* If a request was made and it threw an error or returned a failure response code.
|
||||
*/
|
||||
/* prettier-ignore */
|
||||
const isometricTransformMatrix: Matrix3 = [
|
||||
Math.sqrt(2) / 2, -(Math.sqrt(2) / 2), 0,
|
||||
Math.sqrt(6) / 6, Math.sqrt(6) / 6, -(Math.sqrt(6) / 3),
|
||||
0, 0, 1,
|
||||
]
|
||||
|
||||
/**
|
||||
* The distance in pixels (at scale 1) between nodes. Change this to space out nodes more
|
||||
*/
|
||||
const distanceBetweenNodes = distanceBetweenNodesInUnits * unit;
|
||||
|
||||
/**
|
||||
* Process events that will be graphed.
|
||||
*/
|
||||
export const graphableProcesses = createSelector(
|
||||
({ results }: DataState) => results,
|
||||
function (results: DataState['results']) {
|
||||
return results.filter(isGraphableProcess);
|
||||
export function hasError(state: DataState): boolean {
|
||||
if (state.lastResponse && state.lastResponse.successful === false) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* The last ResolverTree we received, if any. It may be stale (it might not be for the same databaseDocumentID that
|
||||
* we're currently interested in.
|
||||
*/
|
||||
const resolverTree = (state: DataState): ResolverTree | undefined => {
|
||||
if (state.lastResponse && state.lastResponse.successful) {
|
||||
return state.lastResponse.result;
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Process events that will be displayed as terminated.
|
||||
*/
|
||||
export const terminatedProcesses = createSelector(
|
||||
({ results }: DataState) => results,
|
||||
function (results: DataState['results']) {
|
||||
return new Set(
|
||||
results.filter(isTerminatedProcess).map((terminatedEvent) => {
|
||||
export const terminatedProcesses = createSelector(resolverTree, function (tree?: ResolverTree) {
|
||||
if (!tree) {
|
||||
return new Set();
|
||||
}
|
||||
return new Set(
|
||||
resolverTreeModel
|
||||
.lifecycleEvents(tree)
|
||||
.filter(isTerminatedProcess)
|
||||
.map((terminatedEvent) => {
|
||||
return uniquePidForProcess(terminatedEvent);
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
* In laying out the graph, we precalculate the 'width' of each subtree. The 'width' of the subtree is determined by its
|
||||
* descedants and the rule that each process node must be at least 1 unit apart. Enforcing that all nodes are at least
|
||||
* 1 unit apart on the x axis makes it easy to prevent the UI components from overlapping. There will always be space.
|
||||
*
|
||||
* Example widths:
|
||||
*
|
||||
* A and B each have a width of 0
|
||||
*
|
||||
* A
|
||||
* |
|
||||
* B
|
||||
*
|
||||
* A has a width of 1. B and C have a width of 0.
|
||||
* B and C must be 1 unit apart, so the A subtree has a width of 1.
|
||||
*
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
*
|
||||
*
|
||||
* D, E, F, G, H all have a width of 0.
|
||||
* B has a width of 1 since D->E must be 1 unit apart.
|
||||
* Similarly, C has a width of 1 since F->G must be 1 unit apart.
|
||||
* A has width of 3, since B has a width of 1, and C has a width of 1, and E->F must be at least
|
||||
* 1 unit apart.
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
* ___|___ ___|___
|
||||
* | | | |
|
||||
* D E F G
|
||||
* |
|
||||
* H
|
||||
*
|
||||
* Process events that will be graphed.
|
||||
*/
|
||||
function widthsOfProcessSubtrees(indexedProcessTree: IndexedProcessTree): ProcessWidths {
|
||||
const widths = new Map<ResolverEvent, number>();
|
||||
|
||||
if (size(indexedProcessTree) === 0) {
|
||||
return widths;
|
||||
export const graphableProcesses = createSelector(resolverTree, function (tree?) {
|
||||
if (tree) {
|
||||
return resolverTreeModel.lifecycleEvents(tree).filter(isGraphableProcess);
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
const processesInReverseLevelOrder = [...levelOrder(indexedProcessTree)].reverse();
|
||||
|
||||
for (const process of processesInReverseLevelOrder) {
|
||||
const children = indexedProcessTreeChildren(indexedProcessTree, process);
|
||||
|
||||
const sumOfWidthOfChildren = function sumOfWidthOfChildren() {
|
||||
return children.reduce(function sum(currentValue, child) {
|
||||
/**
|
||||
* `widths.get` will always return a number in this case.
|
||||
* This loop sequences a tree in reverse level order. Width values are set for each node.
|
||||
* Therefore a parent can always find a width for its children, since all of its children
|
||||
* will have been handled already.
|
||||
*/
|
||||
return currentValue + widths.get(child)!;
|
||||
}, 0);
|
||||
};
|
||||
|
||||
const width = sumOfWidthOfChildren() + Math.max(0, children.length - 1) * distanceBetweenNodes;
|
||||
widths.set(process, width);
|
||||
}
|
||||
|
||||
return widths;
|
||||
}
|
||||
|
||||
function processEdgeLineSegments(
|
||||
indexedProcessTree: IndexedProcessTree,
|
||||
widths: ProcessWidths,
|
||||
positions: ProcessPositions
|
||||
): EdgeLineSegment[] {
|
||||
const edgeLineSegments: EdgeLineSegment[] = [];
|
||||
for (const metadata of levelOrderWithWidths(indexedProcessTree, widths)) {
|
||||
const edgeLineMetadata: EdgeLineMetadata = { uniqueId: '' };
|
||||
/**
|
||||
* We only handle children, drawing lines back to their parents. The root has no parent, so we skip it
|
||||
*/
|
||||
if (metadata.parent === null) {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
const { process, parent, parentWidth } = metadata;
|
||||
const position = positions.get(process);
|
||||
const parentPosition = positions.get(parent);
|
||||
const parentId = event.entityId(parent);
|
||||
const processEntityId = event.entityId(process);
|
||||
const edgeLineId = parentId ? parentId + processEntityId : parentId;
|
||||
|
||||
if (position === undefined || parentPosition === undefined) {
|
||||
/**
|
||||
* All positions have been precalculated, so if any are missing, it's an error. This will never happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
const parentTime = event.eventTimestamp(parent);
|
||||
const processTime = event.eventTimestamp(process);
|
||||
if (parentTime && processTime) {
|
||||
const elapsedTime = getFriendlyElapsedTime(parentTime, processTime);
|
||||
if (elapsedTime) edgeLineMetadata.elapsedTime = elapsedTime;
|
||||
}
|
||||
edgeLineMetadata.uniqueId = edgeLineId;
|
||||
|
||||
/**
|
||||
* The point halfway between the parent and child on the y axis, we sometimes have a hard angle here in the edge line
|
||||
*/
|
||||
const midwayY = parentPosition[1] + (position[1] - parentPosition[1]) / 2;
|
||||
|
||||
/**
|
||||
* When drawing edge lines between a parent and children (when there are multiple children) we draw a pitchfork type
|
||||
* design. The 'midway' line, runs along the x axis and joins all the children with a single descendant line from the parent.
|
||||
* See the ascii diagram below. The underscore characters would be the midway line.
|
||||
*
|
||||
* A
|
||||
* ____|____
|
||||
* | |
|
||||
* B C
|
||||
*/
|
||||
const lineFromProcessToMidwayLine: EdgeLineSegment = {
|
||||
points: [[position[0], midwayY], position],
|
||||
metadata: edgeLineMetadata,
|
||||
};
|
||||
|
||||
const siblings = indexedProcessTreeChildren(indexedProcessTree, parent);
|
||||
const isFirstChild = process === siblings[0];
|
||||
|
||||
if (metadata.isOnlyChild) {
|
||||
// add a single line segment directly from parent to child. We don't do the 'pitchfork' in this case.
|
||||
edgeLineSegments.push({ points: [parentPosition, position], metadata: edgeLineMetadata });
|
||||
} else if (isFirstChild) {
|
||||
/**
|
||||
* If the parent has multiple children, we draw the 'midway' line, and the line from the
|
||||
* parent to the midway line, while handling the first child.
|
||||
*
|
||||
* Consider A the parent, and B the first child. We would draw somemthing like what's in the below diagram. The line from the
|
||||
* midway line to C would be drawn when we handle C.
|
||||
*
|
||||
* A
|
||||
* ____|____
|
||||
* |
|
||||
* B C
|
||||
*/
|
||||
const { firstChildWidth, lastChildWidth } = metadata;
|
||||
|
||||
const lineFromParentToMidwayLine: EdgeLineSegment = {
|
||||
points: [parentPosition, [parentPosition[0], midwayY]],
|
||||
metadata: { uniqueId: `parentToMid${edgeLineId}` },
|
||||
};
|
||||
|
||||
const widthOfMidline = parentWidth - firstChildWidth / 2 - lastChildWidth / 2;
|
||||
|
||||
const minX = parentWidth / -2 + firstChildWidth / 2;
|
||||
const maxX = minX + widthOfMidline;
|
||||
|
||||
const midwayLine: EdgeLineSegment = {
|
||||
points: [
|
||||
[
|
||||
// Position line relative to the parent's x component
|
||||
parentPosition[0] + minX,
|
||||
midwayY,
|
||||
],
|
||||
[
|
||||
// Position line relative to the parent's x component
|
||||
parentPosition[0] + maxX,
|
||||
midwayY,
|
||||
],
|
||||
],
|
||||
metadata: { uniqueId: `midway${edgeLineId}` },
|
||||
};
|
||||
|
||||
edgeLineSegments.push(
|
||||
/* line from parent to midway line */
|
||||
lineFromParentToMidwayLine,
|
||||
midwayLine,
|
||||
lineFromProcessToMidwayLine
|
||||
);
|
||||
} else {
|
||||
// If this isn't the first child, it must have siblings (the first of which drew the midway line and line
|
||||
// from the parent to the midway line
|
||||
edgeLineSegments.push(lineFromProcessToMidwayLine);
|
||||
}
|
||||
}
|
||||
return edgeLineSegments;
|
||||
}
|
||||
|
||||
function* levelOrderWithWidths(
|
||||
tree: IndexedProcessTree,
|
||||
widths: ProcessWidths
|
||||
): Iterable<ProcessWithWidthMetadata> {
|
||||
for (const process of levelOrder(tree)) {
|
||||
const parent = indexedProcessTreeParent(tree, process);
|
||||
const width = widths.get(process);
|
||||
|
||||
if (width === undefined) {
|
||||
/**
|
||||
* All widths have been precalcluated, so this will not happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
/** If the parent is undefined, we are processing the root. */
|
||||
if (parent === undefined) {
|
||||
yield {
|
||||
process,
|
||||
width,
|
||||
parent: null,
|
||||
parentWidth: null,
|
||||
isOnlyChild: null,
|
||||
firstChildWidth: null,
|
||||
lastChildWidth: null,
|
||||
};
|
||||
} else {
|
||||
const parentWidth = widths.get(parent);
|
||||
|
||||
if (parentWidth === undefined) {
|
||||
/**
|
||||
* All widths have been precalcluated, so this will not happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
const metadata: Partial<ProcessWithWidthMetadata> = {
|
||||
process,
|
||||
width,
|
||||
parent,
|
||||
parentWidth,
|
||||
};
|
||||
|
||||
const siblings = indexedProcessTreeChildren(tree, parent);
|
||||
if (siblings.length === 1) {
|
||||
metadata.isOnlyChild = true;
|
||||
metadata.lastChildWidth = width;
|
||||
metadata.firstChildWidth = width;
|
||||
} else {
|
||||
const firstChildWidth = widths.get(siblings[0]);
|
||||
const lastChildWidth = widths.get(siblings[siblings.length - 1]);
|
||||
if (firstChildWidth === undefined || lastChildWidth === undefined) {
|
||||
/**
|
||||
* All widths have been precalcluated, so this will not happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
metadata.isOnlyChild = false;
|
||||
metadata.firstChildWidth = firstChildWidth;
|
||||
metadata.lastChildWidth = lastChildWidth;
|
||||
}
|
||||
|
||||
yield metadata as ProcessWithWidthMetadata;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function processPositions(
|
||||
indexedProcessTree: IndexedProcessTree,
|
||||
widths: ProcessWidths
|
||||
): ProcessPositions {
|
||||
const positions = new Map<ResolverEvent, Vector2>();
|
||||
/**
|
||||
* This algorithm iterates the tree in level order. It keeps counters that are reset for each parent.
|
||||
* By keeping track of the last parent node, we can know when we are dealing with a new set of siblings and
|
||||
* reset the counters.
|
||||
*/
|
||||
let lastProcessedParentNode: ResolverEvent | undefined;
|
||||
/**
|
||||
* Nodes are positioned relative to their siblings. We walk this in level order, so we handle
|
||||
* children left -> right.
|
||||
*
|
||||
* The width of preceding siblings is used to left align the node.
|
||||
* The number of preceding siblings is important because each sibling must be 1 unit apart
|
||||
* on the x axis.
|
||||
*/
|
||||
let numberOfPrecedingSiblings = 0;
|
||||
let runningWidthOfPrecedingSiblings = 0;
|
||||
|
||||
for (const metadata of levelOrderWithWidths(indexedProcessTree, widths)) {
|
||||
// Handle root node
|
||||
if (metadata.parent === null) {
|
||||
const { process } = metadata;
|
||||
/**
|
||||
* Place the root node at (0, 0) for now.
|
||||
*/
|
||||
positions.set(process, [0, 0]);
|
||||
} else {
|
||||
const { process, parent, isOnlyChild, width, parentWidth } = metadata;
|
||||
|
||||
// Reinit counters when parent changes
|
||||
if (lastProcessedParentNode !== parent) {
|
||||
numberOfPrecedingSiblings = 0;
|
||||
runningWidthOfPrecedingSiblings = 0;
|
||||
|
||||
// keep track of this so we know when to reinitialize
|
||||
lastProcessedParentNode = parent;
|
||||
}
|
||||
|
||||
const parentPosition = positions.get(parent);
|
||||
|
||||
if (parentPosition === undefined) {
|
||||
/**
|
||||
* Since this algorithm populates the `positions` map in level order,
|
||||
* the parent node will have been processed already and the parent position
|
||||
* will always be available.
|
||||
*
|
||||
* This will never happen.
|
||||
*/
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
/**
|
||||
* The x 'offset' is added to the x value of the parent to determine the position of the node.
|
||||
* We add `parentWidth / -2` in order to align the left side of this node with the left side of its parent.
|
||||
* We add `numberOfPrecedingSiblings * distanceBetweenNodes` in order to keep each node 1 apart on the x axis.
|
||||
* We add `runningWidthOfPrecedingSiblings` so that we don't overlap with our preceding siblings. We stack em up.
|
||||
* We add `width / 2` so that we center the node horizontally (in case it has non-0 width.)
|
||||
*/
|
||||
const xOffset =
|
||||
parentWidth / -2 +
|
||||
numberOfPrecedingSiblings * distanceBetweenNodes +
|
||||
runningWidthOfPrecedingSiblings +
|
||||
width / 2;
|
||||
|
||||
/**
|
||||
* The y axis gains `-distanceBetweenNodes` as we move down the screen 1 unit at a time.
|
||||
*/
|
||||
let yDistanceBetweenNodes = -distanceBetweenNodes;
|
||||
|
||||
if (!isOnlyChild) {
|
||||
// Make space on leaves to show elapsed time
|
||||
yDistanceBetweenNodes *= 2;
|
||||
}
|
||||
|
||||
const position = vector2Add([xOffset, yDistanceBetweenNodes], parentPosition);
|
||||
|
||||
positions.set(process, position);
|
||||
|
||||
numberOfPrecedingSiblings += 1;
|
||||
runningWidthOfPrecedingSiblings += width;
|
||||
}
|
||||
}
|
||||
|
||||
return positions;
|
||||
}
|
||||
|
||||
/**
|
||||
* The 'indexed process tree' contains the tree data, indexed in helpful ways. Used for O(1) access to stuff during graph layout.
|
||||
*/
|
||||
export const indexedProcessTree = createSelector(graphableProcesses, function indexedTree(
|
||||
/* eslint-disable no-shadow */
|
||||
graphableProcesses
|
||||
|
@ -462,22 +105,28 @@ export const indexedProcessTree = createSelector(graphableProcesses, function in
|
|||
/**
|
||||
* This returns a map of entity_ids to stats about the related events and alerts.
|
||||
*/
|
||||
export function relatedEventsStats(data: DataState) {
|
||||
return data.relatedEventsStats;
|
||||
}
|
||||
export const relatedEventsStats: (
|
||||
state: DataState
|
||||
) => Map<string, ResolverNodeStats> | null = createSelector(resolverTree, (tree?: ResolverTree) => {
|
||||
if (tree) {
|
||||
return resolverTreeModel.relatedEventsStats(tree);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* returns {Map<string, ResolverRelatedEvents>} a map of entity_ids to related event data.
|
||||
* returns a map of entity_ids to related event data.
|
||||
*/
|
||||
export function relatedEventsByEntityId(data: DataState) {
|
||||
export function relatedEventsByEntityId(data: DataState): Map<string, ResolverRelatedEvents> {
|
||||
return data.relatedEvents;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns {Map<string, boolean>} a map of entity_ids to booleans indicating if it is waiting on related event
|
||||
* returns a map of entity_ids to booleans indicating if it is waiting on related event
|
||||
* A value of `undefined` can be interpreted as `not yet requested`
|
||||
*/
|
||||
export function relatedEventsReady(data: DataState) {
|
||||
export function relatedEventsReady(data: DataState): Map<string, boolean> {
|
||||
return data.relatedEventsReady;
|
||||
}
|
||||
|
||||
|
@ -502,6 +151,39 @@ export const processAdjacencies = createSelector(
|
|||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* `true` if there were more children than we got in the last request.
|
||||
*/
|
||||
export function hasMoreChildren(state: DataState): boolean {
|
||||
const tree = resolverTree(state);
|
||||
return tree ? resolverTreeModel.hasMoreChildren(tree) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* `true` if there were more ancestors than we got in the last request.
|
||||
*/
|
||||
export function hasMoreAncestors(state: DataState): boolean {
|
||||
const tree = resolverTree(state);
|
||||
return tree ? resolverTreeModel.hasMoreAncestors(tree) : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* If we need to fetch, this is the ID to fetch.
|
||||
*/
|
||||
export function databaseDocumentIDToFetch(state: DataState): string | null {
|
||||
// If there is an ID, it must match either the last received version, or the pending version.
|
||||
// Otherwise, we need to fetch it
|
||||
// NB: this technique will not allow for refreshing of data.
|
||||
if (
|
||||
state.databaseDocumentID !== undefined &&
|
||||
state.databaseDocumentID !== state.pendingRequestDatabaseDocumentID &&
|
||||
state.databaseDocumentID !== state.lastResponse?.databaseDocumentID
|
||||
) {
|
||||
return state.databaseDocumentID;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
export const processNodePositionsAndEdgeLineSegments = createSelector(
|
||||
indexedProcessTree,
|
||||
function processNodePositionsAndEdgeLineSegments(
|
||||
|
@ -509,53 +191,7 @@ export const processNodePositionsAndEdgeLineSegments = createSelector(
|
|||
indexedProcessTree
|
||||
/* eslint-enable no-shadow */
|
||||
) {
|
||||
/**
|
||||
* Walk the tree in reverse level order, calculating the 'width' of subtrees.
|
||||
*/
|
||||
const widths = widthsOfProcessSubtrees(indexedProcessTree);
|
||||
|
||||
/**
|
||||
* Walk the tree in level order. Using the precalculated widths, calculate the position of nodes.
|
||||
* Nodes are positioned relative to their parents and preceding siblings.
|
||||
*/
|
||||
const positions = processPositions(indexedProcessTree, widths);
|
||||
|
||||
/**
|
||||
* With the widths and positions precalculated, we calculate edge line segments (arrays of vector2s)
|
||||
* which connect them in a 'pitchfork' design.
|
||||
*/
|
||||
const edgeLineSegments = processEdgeLineSegments(indexedProcessTree, widths, positions);
|
||||
|
||||
/**
|
||||
* Transform the positions of nodes and edges so they seem like they are on an isometric grid.
|
||||
*/
|
||||
const transformedEdgeLineSegments: EdgeLineSegment[] = [];
|
||||
const transformedPositions = new Map<ResolverEvent, Vector2>();
|
||||
|
||||
for (const [processEvent, position] of positions) {
|
||||
transformedPositions.set(processEvent, applyMatrix3(position, isometricTransformMatrix));
|
||||
}
|
||||
|
||||
for (const edgeLineSegment of edgeLineSegments) {
|
||||
const {
|
||||
points: [startPoint, endPoint],
|
||||
} = edgeLineSegment;
|
||||
|
||||
const transformedSegment: EdgeLineSegment = {
|
||||
...edgeLineSegment,
|
||||
points: [
|
||||
applyMatrix3(startPoint, isometricTransformMatrix),
|
||||
applyMatrix3(endPoint, isometricTransformMatrix),
|
||||
],
|
||||
};
|
||||
|
||||
transformedEdgeLineSegments.push(transformedSegment);
|
||||
}
|
||||
|
||||
return {
|
||||
processNodePositions: transformedPositions,
|
||||
edgeLineSegments: transformedEdgeLineSegments,
|
||||
};
|
||||
return isometricTaxiLayout(indexedProcessTree);
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -650,13 +286,18 @@ export const visibleProcessNodePositionsAndEdgeLineSegments = createSelector(
|
|||
}
|
||||
);
|
||||
/**
|
||||
* Returns the `children` and `ancestors` limits for the current graph, if any.
|
||||
*
|
||||
* @param state {DataState} the DataState from the reducer
|
||||
* If there is a pending request that's for a entity ID that doesn't matche the `entityID`, then we should cancel it.
|
||||
*/
|
||||
export const limitsReached = (state: DataState): { children: boolean; ancestors: boolean } => {
|
||||
return {
|
||||
children: state.lineageLimits.children !== null,
|
||||
ancestors: state.lineageLimits.ancestors !== null,
|
||||
};
|
||||
};
|
||||
export function databaseDocumentIDToAbort(state: DataState): string | null {
|
||||
/**
|
||||
* If there is a pending request, and its not for the current databaseDocumentID (even, if the current databaseDocumentID is undefined) then we should abort the request.
|
||||
*/
|
||||
if (
|
||||
state.pendingRequestDatabaseDocumentID !== undefined &&
|
||||
state.pendingRequestDatabaseDocumentID !== state.databaseDocumentID
|
||||
) {
|
||||
return state.pendingRequestDatabaseDocumentID;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import { ResolverState } from '../../types';
|
|||
import { LegacyEndpointEvent, ResolverEvent } from '../../../../common/endpoint/types';
|
||||
import { visibleProcessNodePositionsAndEdgeLineSegments } from '../selectors';
|
||||
import { mockProcessEvent } from '../../models/process_event_test_helpers';
|
||||
import { mock as mockResolverTree } from '../../models/resolver_tree';
|
||||
|
||||
describe('resolver visible entities', () => {
|
||||
let processA: LegacyEndpointEvent;
|
||||
|
@ -111,7 +112,7 @@ describe('resolver visible entities', () => {
|
|||
];
|
||||
const action: ResolverAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { events, stats: new Map(), lineageLimits: { children: '', ancestors: '' } },
|
||||
payload: { result: mockResolverTree({ events })!, databaseDocumentID: '' },
|
||||
};
|
||||
const cameraAction: ResolverAction = { type: 'userSetRasterSize', payload: [300, 200] };
|
||||
store.dispatch(action);
|
||||
|
@ -143,7 +144,7 @@ describe('resolver visible entities', () => {
|
|||
];
|
||||
const action: ResolverAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { events, stats: new Map(), lineageLimits: { children: '', ancestors: '' } },
|
||||
payload: { result: mockResolverTree({ events })!, databaseDocumentID: '' },
|
||||
};
|
||||
const cameraAction: ResolverAction = { type: 'userSetRasterSize', payload: [2000, 2000] };
|
||||
store.dispatch(action);
|
||||
|
|
|
@ -7,14 +7,15 @@
|
|||
import { createStore, applyMiddleware, Store } from 'redux';
|
||||
import { composeWithDevTools } from 'redux-devtools-extension/developmentOnly';
|
||||
import { KibanaReactContextValue } from '../../../../../../src/plugins/kibana_react/public';
|
||||
import { ResolverAction, ResolverState } from '../types';
|
||||
import { ResolverState } from '../types';
|
||||
import { StartServices } from '../../types';
|
||||
import { resolverReducer } from './reducer';
|
||||
import { resolverMiddlewareFactory } from './middleware';
|
||||
import { ResolverAction } from './actions';
|
||||
|
||||
export const storeFactory = (
|
||||
context?: KibanaReactContextValue<StartServices>
|
||||
): { store: Store<ResolverState, ResolverAction> } => {
|
||||
): Store<ResolverState, ResolverAction> => {
|
||||
const actionsBlacklist: Array<ResolverAction['type']> = ['userMovedPointer'];
|
||||
const composeEnhancers = composeWithDevTools({
|
||||
name: 'Resolver',
|
||||
|
@ -22,8 +23,5 @@ export const storeFactory = (
|
|||
});
|
||||
const middlewareEnhancer = applyMiddleware(resolverMiddlewareFactory(context));
|
||||
|
||||
const store = createStore(resolverReducer, composeEnhancers(middlewareEnhancer));
|
||||
return {
|
||||
store,
|
||||
};
|
||||
return createStore(resolverReducer, composeEnhancers(middlewareEnhancer));
|
||||
};
|
||||
|
|
|
@ -1,127 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Dispatch, MiddlewareAPI } from 'redux';
|
||||
import { KibanaReactContextValue } from '../../../../../../src/plugins/kibana_react/public';
|
||||
import { StartServices } from '../../types';
|
||||
import { ResolverState, ResolverAction } from '../types';
|
||||
import {
|
||||
ResolverEvent,
|
||||
ResolverChildren,
|
||||
ResolverAncestry,
|
||||
ResolverLifecycleNode,
|
||||
ResolverNodeStats,
|
||||
ResolverRelatedEvents,
|
||||
} from '../../../common/endpoint/types';
|
||||
import * as event from '../../../common/endpoint/models/event';
|
||||
|
||||
type MiddlewareFactory<S = ResolverState> = (
|
||||
context?: KibanaReactContextValue<StartServices>
|
||||
) => (
|
||||
api: MiddlewareAPI<Dispatch<ResolverAction>, S>
|
||||
) => (next: Dispatch<ResolverAction>) => (action: ResolverAction) => unknown;
|
||||
|
||||
function getLifecycleEventsAndStats(
|
||||
nodes: ResolverLifecycleNode[],
|
||||
stats: Map<string, ResolverNodeStats>
|
||||
): ResolverEvent[] {
|
||||
return nodes.reduce((flattenedEvents: ResolverEvent[], currentNode: ResolverLifecycleNode) => {
|
||||
if (currentNode.lifecycle && currentNode.lifecycle.length > 0) {
|
||||
flattenedEvents.push(...currentNode.lifecycle);
|
||||
}
|
||||
|
||||
if (currentNode.stats) {
|
||||
stats.set(currentNode.entityID, currentNode.stats);
|
||||
}
|
||||
|
||||
return flattenedEvents;
|
||||
}, []);
|
||||
}
|
||||
|
||||
export const resolverMiddlewareFactory: MiddlewareFactory = (context) => {
|
||||
return (api) => (next) => async (action: ResolverAction) => {
|
||||
next(action);
|
||||
if (action.type === 'userChangedSelectedEvent') {
|
||||
/**
|
||||
* concurrently fetches a process's details, its ancestors, and its related events.
|
||||
*/
|
||||
if (context?.services.http && action.payload.selectedEvent) {
|
||||
api.dispatch({ type: 'appRequestedResolverData' });
|
||||
try {
|
||||
let lifecycle: ResolverEvent[];
|
||||
let children: ResolverChildren;
|
||||
let ancestry: ResolverAncestry;
|
||||
let entityId: string;
|
||||
let stats: ResolverNodeStats;
|
||||
if (event.isLegacyEvent(action.payload.selectedEvent)) {
|
||||
entityId = action.payload.selectedEvent?.endgame?.unique_pid.toString();
|
||||
const legacyEndpointID = action.payload.selectedEvent?.agent?.id;
|
||||
[{ lifecycle, children, ancestry, stats }] = await Promise.all([
|
||||
context.services.http.get(`/api/endpoint/resolver/${entityId}`, {
|
||||
query: { legacyEndpointID, children: 5, ancestors: 5 },
|
||||
}),
|
||||
]);
|
||||
} else {
|
||||
entityId = action.payload.selectedEvent.process.entity_id;
|
||||
[{ lifecycle, children, ancestry, stats }] = await Promise.all([
|
||||
context.services.http.get(`/api/endpoint/resolver/${entityId}`, {
|
||||
query: {
|
||||
children: 5,
|
||||
ancestors: 5,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
}
|
||||
const nodeStats: Map<string, ResolverNodeStats> = new Map();
|
||||
nodeStats.set(entityId, stats);
|
||||
const lineageLimits = { children: children.nextChild, ancestors: ancestry.nextAncestor };
|
||||
|
||||
const events = [
|
||||
...lifecycle,
|
||||
...getLifecycleEventsAndStats(children.childNodes, nodeStats),
|
||||
...getLifecycleEventsAndStats(ancestry.ancestors, nodeStats),
|
||||
];
|
||||
api.dispatch({
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: {
|
||||
events,
|
||||
stats: nodeStats,
|
||||
lineageLimits,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
api.dispatch({
|
||||
type: 'serverFailedToReturnResolverData',
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
(action.type === 'userRequestedRelatedEventData' ||
|
||||
action.type === 'appDetectedMissingEventData') &&
|
||||
context
|
||||
) {
|
||||
const entityIdToFetchFor = action.payload;
|
||||
let result: ResolverRelatedEvents;
|
||||
try {
|
||||
result = await context.services.http.get(
|
||||
`/api/endpoint/resolver/${entityIdToFetchFor}/events`,
|
||||
{
|
||||
query: { events: 100 },
|
||||
}
|
||||
);
|
||||
api.dispatch({
|
||||
type: 'serverReturnedRelatedEventData',
|
||||
payload: result,
|
||||
});
|
||||
} catch (e) {
|
||||
api.dispatch({
|
||||
type: 'serverFailedToReturnRelatedEventData',
|
||||
payload: action.payload,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Dispatch, MiddlewareAPI } from 'redux';
|
||||
import { KibanaReactContextValue } from '../../../../../../../src/plugins/kibana_react/public';
|
||||
import { StartServices } from '../../../types';
|
||||
import { ResolverState } from '../../types';
|
||||
import { ResolverRelatedEvents } from '../../../../common/endpoint/types';
|
||||
import { ResolverTreeFetcher } from './resolver_tree_fetcher';
|
||||
import { ResolverAction } from '../actions';
|
||||
|
||||
type MiddlewareFactory<S = ResolverState> = (
|
||||
context?: KibanaReactContextValue<StartServices>
|
||||
) => (
|
||||
api: MiddlewareAPI<Dispatch<ResolverAction>, S>
|
||||
) => (next: Dispatch<ResolverAction>) => (action: ResolverAction) => unknown;
|
||||
|
||||
/**
|
||||
* The redux middleware that the app uses to trigger side effects.
|
||||
* All data fetching should be done here.
|
||||
* For actions that the app triggers directly, use `app` as a prefix for the type.
|
||||
* For actions that are triggered as a result of server interaction, use `server` as a prefix for the type.
|
||||
*/
|
||||
export const resolverMiddlewareFactory: MiddlewareFactory = (context) => {
|
||||
return (api) => (next) => {
|
||||
// This cannot work w/o `context`.
|
||||
if (!context) {
|
||||
return async (action: ResolverAction) => {
|
||||
next(action);
|
||||
};
|
||||
}
|
||||
const resolverTreeFetcher = ResolverTreeFetcher(context, api);
|
||||
return async (action: ResolverAction) => {
|
||||
next(action);
|
||||
|
||||
resolverTreeFetcher();
|
||||
|
||||
if (
|
||||
action.type === 'userRequestedRelatedEventData' ||
|
||||
action.type === 'appDetectedMissingEventData'
|
||||
) {
|
||||
const entityIdToFetchFor = action.payload;
|
||||
let result: ResolverRelatedEvents;
|
||||
try {
|
||||
result = await context.services.http.get(
|
||||
`/api/endpoint/resolver/${entityIdToFetchFor}/events`,
|
||||
{
|
||||
query: { events: 100 },
|
||||
}
|
||||
);
|
||||
|
||||
api.dispatch({
|
||||
type: 'serverReturnedRelatedEventData',
|
||||
payload: result,
|
||||
});
|
||||
} catch (e) {
|
||||
api.dispatch({
|
||||
type: 'serverFailedToReturnRelatedEventData',
|
||||
payload: action.payload,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
};
|
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/* eslint-disable no-duplicate-imports */
|
||||
|
||||
import { Dispatch, MiddlewareAPI } from 'redux';
|
||||
import { ResolverTree, ResolverEntityIndex } from '../../../../common/endpoint/types';
|
||||
|
||||
import { KibanaReactContextValue } from '../../../../../../../src/plugins/kibana_react/public';
|
||||
import { ResolverState } from '../../types';
|
||||
import * as selectors from '../selectors';
|
||||
import { StartServices } from '../../../types';
|
||||
import { DEFAULT_INDEX_KEY as defaultIndexKey } from '../../../../common/constants';
|
||||
import { ResolverAction } from '../actions';
|
||||
/**
|
||||
* A function that handles syncing ResolverTree data w/ the current entity ID.
|
||||
* This will make a request anytime the entityID changes (to something other than undefined.)
|
||||
* If the entity ID changes while a request is in progress, the in-progress request will be cancelled.
|
||||
* Call the returned function after each state transition.
|
||||
* This is a factory because it is stateful and keeps that state in closure.
|
||||
*/
|
||||
export function ResolverTreeFetcher(
|
||||
context: KibanaReactContextValue<StartServices>,
|
||||
api: MiddlewareAPI<Dispatch<ResolverAction>, ResolverState>
|
||||
): () => void {
|
||||
let lastRequestAbortController: AbortController | undefined;
|
||||
|
||||
// Call this after each state change.
|
||||
// This fetches the ResolverTree for the current entityID
|
||||
// if the entityID changes while
|
||||
return async () => {
|
||||
const state = api.getState();
|
||||
const databaseDocumentIDToFetch = selectors.databaseDocumentIDToFetch(state);
|
||||
|
||||
if (selectors.databaseDocumentIDToAbort(state) && lastRequestAbortController) {
|
||||
lastRequestAbortController.abort();
|
||||
// calling abort will cause an action to be fired
|
||||
} else if (databaseDocumentIDToFetch !== null) {
|
||||
lastRequestAbortController = new AbortController();
|
||||
let result: ResolverTree | undefined;
|
||||
// Inform the state that we've made the request. Without this, the middleware will try to make the request again
|
||||
// immediately.
|
||||
api.dispatch({
|
||||
type: 'appRequestedResolverData',
|
||||
payload: databaseDocumentIDToFetch,
|
||||
});
|
||||
try {
|
||||
const indices: string[] = context.services.uiSettings.get(defaultIndexKey);
|
||||
const matchingEntities: ResolverEntityIndex = await context.services.http.get(
|
||||
'/api/endpoint/resolver/entity',
|
||||
{
|
||||
signal: lastRequestAbortController.signal,
|
||||
query: {
|
||||
_id: databaseDocumentIDToFetch,
|
||||
indices,
|
||||
},
|
||||
}
|
||||
);
|
||||
if (matchingEntities.length < 1) {
|
||||
// If no entity_id could be found for the _id, bail out with a failure.
|
||||
api.dispatch({
|
||||
type: 'serverFailedToReturnResolverData',
|
||||
payload: databaseDocumentIDToFetch,
|
||||
});
|
||||
return;
|
||||
}
|
||||
const entityIDToFetch = matchingEntities[0].entity_id;
|
||||
result = await context.services.http.get(`/api/endpoint/resolver/${entityIDToFetch}`, {
|
||||
signal: lastRequestAbortController.signal,
|
||||
query: {
|
||||
children: 5,
|
||||
ancestors: 5,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/DOMException#exception-AbortError
|
||||
if (error instanceof DOMException && error.name === 'AbortError') {
|
||||
api.dispatch({
|
||||
type: 'appAbortedResolverDataRequest',
|
||||
payload: databaseDocumentIDToFetch,
|
||||
});
|
||||
} else {
|
||||
api.dispatch({
|
||||
type: 'serverFailedToReturnResolverData',
|
||||
payload: databaseDocumentIDToFetch,
|
||||
});
|
||||
}
|
||||
}
|
||||
if (result !== undefined) {
|
||||
api.dispatch({
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: {
|
||||
result,
|
||||
databaseDocumentID: databaseDocumentIDToFetch,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
|
@ -8,7 +8,8 @@ import { htmlIdGenerator } from '@elastic/eui';
|
|||
import { animateProcessIntoView } from './methods';
|
||||
import { cameraReducer } from './camera/reducer';
|
||||
import { dataReducer } from './data/reducer';
|
||||
import { ResolverState, ResolverAction, ResolverUIState } from '../types';
|
||||
import { ResolverAction } from './actions';
|
||||
import { ResolverState, ResolverUIState } from '../types';
|
||||
import { uniquePidForProcess } from '../models/process_event';
|
||||
|
||||
/**
|
||||
|
|
|
@ -56,6 +56,19 @@ export const processNodePositionsAndEdgeLineSegments = composeSelectors(
|
|||
dataSelectors.processNodePositionsAndEdgeLineSegments
|
||||
);
|
||||
|
||||
/**
|
||||
* If we need to fetch, this is the entity ID to fetch.
|
||||
*/
|
||||
export const databaseDocumentIDToFetch = composeSelectors(
|
||||
dataStateSelector,
|
||||
dataSelectors.databaseDocumentIDToFetch
|
||||
);
|
||||
|
||||
export const databaseDocumentIDToAbort = composeSelectors(
|
||||
dataStateSelector,
|
||||
dataSelectors.databaseDocumentIDToAbort
|
||||
);
|
||||
|
||||
export const processAdjacencies = composeSelectors(
|
||||
dataStateSelector,
|
||||
dataSelectors.processAdjacencies
|
||||
|
@ -158,15 +171,6 @@ export const graphableProcesses = composeSelectors(
|
|||
dataSelectors.graphableProcesses
|
||||
);
|
||||
|
||||
/**
|
||||
* Select the `ancestors` and `children` limits that were reached or exceeded
|
||||
* during the request for the current tree.
|
||||
*/
|
||||
export const lineageLimitsReached = composeSelectors(
|
||||
dataStateSelector,
|
||||
dataSelectors.limitsReached
|
||||
);
|
||||
|
||||
/**
|
||||
* Calls the `secondSelector` with the result of the `selector`. Use this when re-exporting a
|
||||
* concern-specific selector. `selector` should return the concern-specific state.
|
||||
|
|
|
@ -7,11 +7,11 @@
|
|||
import { Store } from 'redux';
|
||||
import { BBox } from 'rbush';
|
||||
import { ResolverAction } from './store/actions';
|
||||
export { ResolverAction } from './store/actions';
|
||||
import {
|
||||
ResolverEvent,
|
||||
ResolverNodeStats,
|
||||
ResolverRelatedEvents,
|
||||
ResolverTree,
|
||||
} from '../../common/endpoint/types';
|
||||
|
||||
/**
|
||||
|
@ -176,15 +176,49 @@ export interface VisibleEntites {
|
|||
* State for `data` reducer which handles receiving Resolver data from the backend.
|
||||
*/
|
||||
export interface DataState {
|
||||
readonly results: readonly ResolverEvent[];
|
||||
readonly relatedEventsStats: Readonly<Map<string, ResolverNodeStats>>;
|
||||
readonly relatedEventsStats: Map<string, ResolverNodeStats>;
|
||||
readonly relatedEvents: Map<string, ResolverRelatedEvents>;
|
||||
readonly relatedEventsReady: Map<string, boolean>;
|
||||
readonly lineageLimits: Readonly<{ children: string | null; ancestors: string | null }>;
|
||||
isLoading: boolean;
|
||||
hasError: boolean;
|
||||
/**
|
||||
* The `_id` for an ES document. Used to select a process that we'll show the graph for.
|
||||
*/
|
||||
readonly databaseDocumentID?: string;
|
||||
/**
|
||||
* The id used for the pending request, if there is one.
|
||||
*/
|
||||
readonly pendingRequestDatabaseDocumentID?: string;
|
||||
|
||||
/**
|
||||
* The parameters and response from the last successful request.
|
||||
*/
|
||||
readonly lastResponse?: {
|
||||
/**
|
||||
* The id used in the request.
|
||||
*/
|
||||
readonly databaseDocumentID: string;
|
||||
} & (
|
||||
| {
|
||||
/**
|
||||
* If a response with a success code was received, this is `true`.
|
||||
*/
|
||||
readonly successful: true;
|
||||
/**
|
||||
* The ResolverTree parsed from the response.
|
||||
*/
|
||||
readonly result: ResolverTree;
|
||||
}
|
||||
| {
|
||||
/**
|
||||
* If the request threw an exception or the response had a failure code, this will be false.
|
||||
*/
|
||||
readonly successful: false;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents an ordered pair. Used for x-y coordinates and the like.
|
||||
*/
|
||||
export type Vector2 = readonly [number, number];
|
||||
|
||||
/**
|
||||
|
@ -416,3 +450,17 @@ export type ResolverProcessType =
|
|||
| 'unknownEvent';
|
||||
|
||||
export type ResolverStore = Store<ResolverState, ResolverAction>;
|
||||
|
||||
/**
|
||||
* Describes the basic Resolver graph layout.
|
||||
*/
|
||||
export interface IsometricTaxiLayout {
|
||||
/**
|
||||
* A map of events to position. each event represents its own node.
|
||||
*/
|
||||
processNodePositions: Map<ResolverEvent, Vector2>;
|
||||
/**
|
||||
* A map of edgline segments, which graphically connect nodes.
|
||||
*/
|
||||
edgeLineSegments: EdgeLineSegment[];
|
||||
}
|
||||
|
|
|
@ -11,9 +11,10 @@ import styled from 'styled-components';
|
|||
import { EuiRange, EuiPanel, EuiIcon } from '@elastic/eui';
|
||||
import { useSelector, useDispatch } from 'react-redux';
|
||||
import { SideEffectContext } from './side_effect_context';
|
||||
import { ResolverAction, Vector2 } from '../types';
|
||||
import { Vector2 } from '../types';
|
||||
import * as selectors from '../store/selectors';
|
||||
import { useResolverTheme } from './assets';
|
||||
import { ResolverAction } from '../store/actions';
|
||||
|
||||
interface StyledGraphControls {
|
||||
graphControlsBackground: string;
|
||||
|
|
|
@ -3,164 +3,44 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
/* eslint-disable react/display-name */
|
||||
|
||||
import React, { useLayoutEffect, useContext } from 'react';
|
||||
import { useSelector, useDispatch } from 'react-redux';
|
||||
import styled from 'styled-components';
|
||||
import { EuiLoadingSpinner } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import * as selectors from '../store/selectors';
|
||||
import { EdgeLine } from './edge_line';
|
||||
import { Panel } from './panel';
|
||||
import { GraphControls } from './graph_controls';
|
||||
import { ProcessEventDot } from './process_event_dot';
|
||||
import { useCamera } from './use_camera';
|
||||
import { SymbolDefinitions, useResolverTheme } from './assets';
|
||||
import { entityId } from '../../../common/endpoint/models/event';
|
||||
import { ResolverAction } from '../types';
|
||||
import { ResolverEvent } from '../../../common/endpoint/types';
|
||||
import { SideEffectContext } from './side_effect_context';
|
||||
import React, { useMemo } from 'react';
|
||||
import { Provider } from 'react-redux';
|
||||
import { ResolverMap } from './map';
|
||||
import { storeFactory } from '../store';
|
||||
import { StartServices } from '../../types';
|
||||
import { useKibana } from '../../../../../../src/plugins/kibana_react/public';
|
||||
|
||||
interface StyledResolver {
|
||||
backgroundColor: string;
|
||||
}
|
||||
|
||||
const StyledResolver = styled.div<StyledResolver>`
|
||||
/**
|
||||
* Take up all availble space
|
||||
*/
|
||||
&,
|
||||
.resolver-graph {
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
}
|
||||
.loading-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-grow: 1;
|
||||
}
|
||||
/**
|
||||
* The placeholder components use absolute positioning.
|
||||
*/
|
||||
position: relative;
|
||||
/**
|
||||
* Prevent partially visible components from showing up outside the bounds of Resolver.
|
||||
*/
|
||||
overflow: hidden;
|
||||
contain: strict;
|
||||
background-color: ${(props) => props.backgroundColor};
|
||||
`;
|
||||
|
||||
const StyledPanel = styled(Panel)`
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
overflow: auto;
|
||||
width: 25em;
|
||||
max-width: 50%;
|
||||
`;
|
||||
|
||||
const StyledResolverContainer = styled.div`
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
contain: layout;
|
||||
`;
|
||||
|
||||
export const Resolver = React.memo(function Resolver({
|
||||
/**
|
||||
* The top level, unconnected, Resolver component.
|
||||
*/
|
||||
export const Resolver = React.memo(function ({
|
||||
className,
|
||||
selectedEvent,
|
||||
databaseDocumentID,
|
||||
}: {
|
||||
/**
|
||||
* Used by `styled-components`.
|
||||
*/
|
||||
className?: string;
|
||||
selectedEvent?: ResolverEvent;
|
||||
/**
|
||||
* The `_id` value of an event in ES.
|
||||
* Used as the origin of the Resolver graph.
|
||||
*/
|
||||
databaseDocumentID?: string;
|
||||
}) {
|
||||
const { timestamp } = useContext(SideEffectContext);
|
||||
|
||||
const { processNodePositions, connectingEdgeLineSegments } = useSelector(
|
||||
selectors.visibleProcessNodePositionsAndEdgeLineSegments
|
||||
)(timestamp());
|
||||
|
||||
const dispatch: (action: ResolverAction) => unknown = useDispatch();
|
||||
const { processToAdjacencyMap } = useSelector(selectors.processAdjacencies);
|
||||
const { projectionMatrix, ref, onMouseDown } = useCamera();
|
||||
const isLoading = useSelector(selectors.isLoading);
|
||||
const hasError = useSelector(selectors.hasError);
|
||||
const relatedEventsStats = useSelector(selectors.relatedEventsStats);
|
||||
const activeDescendantId = useSelector(selectors.uiActiveDescendantId);
|
||||
const terminatedProcesses = useSelector(selectors.terminatedProcesses);
|
||||
const { colorMap } = useResolverTheme();
|
||||
|
||||
useLayoutEffect(() => {
|
||||
dispatch({
|
||||
type: 'userChangedSelectedEvent',
|
||||
payload: { selectedEvent },
|
||||
});
|
||||
}, [dispatch, selectedEvent]);
|
||||
const context = useKibana<StartServices>();
|
||||
const store = useMemo(() => {
|
||||
return storeFactory(context);
|
||||
}, [context]);
|
||||
|
||||
/**
|
||||
* Setup the store and use `Provider` here. This allows the ResolverMap component to
|
||||
* dispatch actions and read from state.
|
||||
*/
|
||||
return (
|
||||
<StyledResolver
|
||||
data-test-subj="resolverEmbeddable"
|
||||
className={className}
|
||||
backgroundColor={colorMap.resolverBackground}
|
||||
>
|
||||
{isLoading ? (
|
||||
<div className="loading-container">
|
||||
<EuiLoadingSpinner size="xl" />
|
||||
</div>
|
||||
) : hasError ? (
|
||||
<div className="loading-container">
|
||||
<div>
|
||||
{' '}
|
||||
<FormattedMessage
|
||||
id="xpack.securitySolution.endpoint.resolver.loadingError"
|
||||
defaultMessage="Error loading data."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<StyledResolverContainer
|
||||
className="resolver-graph kbn-resetFocusState"
|
||||
onMouseDown={onMouseDown}
|
||||
ref={ref}
|
||||
role="tree"
|
||||
tabIndex={0}
|
||||
aria-activedescendant={activeDescendantId || undefined}
|
||||
>
|
||||
{connectingEdgeLineSegments.map(({ points: [startPosition, endPosition], metadata }) => (
|
||||
<EdgeLine
|
||||
edgeLineMetadata={metadata}
|
||||
key={metadata.uniqueId}
|
||||
startPosition={startPosition}
|
||||
endPosition={endPosition}
|
||||
projectionMatrix={projectionMatrix}
|
||||
/>
|
||||
))}
|
||||
{[...processNodePositions].map(([processEvent, position]) => {
|
||||
const adjacentNodeMap = processToAdjacencyMap.get(processEvent);
|
||||
const processEntityId = entityId(processEvent);
|
||||
if (!adjacentNodeMap) {
|
||||
// This should never happen
|
||||
throw new Error('Issue calculating adjacency node map.');
|
||||
}
|
||||
return (
|
||||
<ProcessEventDot
|
||||
key={processEntityId}
|
||||
position={position}
|
||||
projectionMatrix={projectionMatrix}
|
||||
event={processEvent}
|
||||
adjacentNodeMap={adjacentNodeMap}
|
||||
relatedEventsStats={relatedEventsStats.get(entityId(processEvent))}
|
||||
isProcessTerminated={terminatedProcesses.has(processEntityId)}
|
||||
isProcessOrigin={false}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</StyledResolverContainer>
|
||||
)}
|
||||
<StyledPanel />
|
||||
<GraphControls />
|
||||
<SymbolDefinitions />
|
||||
</StyledResolver>
|
||||
<Provider store={store}>
|
||||
<ResolverMap className={className} databaseDocumentID={databaseDocumentID} />
|
||||
</Provider>
|
||||
);
|
||||
});
|
||||
|
|
125
x-pack/plugins/security_solution/public/resolver/view/map.tsx
Normal file
125
x-pack/plugins/security_solution/public/resolver/view/map.tsx
Normal file
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
/* eslint-disable no-duplicate-imports */
|
||||
|
||||
/* eslint-disable react/display-name */
|
||||
|
||||
import React, { useContext } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { EuiLoadingSpinner } from '@elastic/eui';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import * as selectors from '../store/selectors';
|
||||
import { EdgeLine } from './edge_line';
|
||||
import { GraphControls } from './graph_controls';
|
||||
import { ProcessEventDot } from './process_event_dot';
|
||||
import { useCamera } from './use_camera';
|
||||
import { SymbolDefinitions, useResolverTheme } from './assets';
|
||||
import { useStateSyncingActions } from './use_state_syncing_actions';
|
||||
import { StyledMapContainer, StyledPanel, GraphContainer } from './styles';
|
||||
import { entityId } from '../../../common/endpoint/models/event';
|
||||
import { SideEffectContext } from './side_effect_context';
|
||||
|
||||
/**
|
||||
* The highest level connected Resolver component. Needs a `Provider` in its ancestry to work.
|
||||
*/
|
||||
export const ResolverMap = React.memo(function ({
|
||||
className,
|
||||
databaseDocumentID,
|
||||
}: {
|
||||
/**
|
||||
* Used by `styled-components`.
|
||||
*/
|
||||
className?: string;
|
||||
/**
|
||||
* The `_id` value of an event in ES.
|
||||
* Used as the origin of the Resolver graph.
|
||||
*/
|
||||
databaseDocumentID?: string;
|
||||
}) {
|
||||
/**
|
||||
* This is responsible for dispatching actions that include any external data.
|
||||
* `databaseDocumentID`
|
||||
*/
|
||||
useStateSyncingActions({ databaseDocumentID });
|
||||
|
||||
const { timestamp } = useContext(SideEffectContext);
|
||||
const { processNodePositions, connectingEdgeLineSegments } = useSelector(
|
||||
selectors.visibleProcessNodePositionsAndEdgeLineSegments
|
||||
)(timestamp());
|
||||
const { processToAdjacencyMap } = useSelector(selectors.processAdjacencies);
|
||||
const relatedEventsStats = useSelector(selectors.relatedEventsStats);
|
||||
const terminatedProcesses = useSelector(selectors.terminatedProcesses);
|
||||
const { projectionMatrix, ref, onMouseDown } = useCamera();
|
||||
const isLoading = useSelector(selectors.isLoading);
|
||||
const hasError = useSelector(selectors.hasError);
|
||||
const activeDescendantId = useSelector(selectors.uiActiveDescendantId);
|
||||
const { colorMap } = useResolverTheme();
|
||||
|
||||
return (
|
||||
<StyledMapContainer className={className} backgroundColor={colorMap.resolverBackground}>
|
||||
{isLoading ? (
|
||||
<div className="loading-container">
|
||||
<EuiLoadingSpinner size="xl" />
|
||||
</div>
|
||||
) : hasError ? (
|
||||
<div className="loading-container">
|
||||
<div>
|
||||
{' '}
|
||||
<FormattedMessage
|
||||
id="xpack.securitySolution.endpoint.resolver.loadingError"
|
||||
defaultMessage="Error loading data."
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<GraphContainer
|
||||
className="resolver-graph kbn-resetFocusState"
|
||||
onMouseDown={onMouseDown}
|
||||
ref={ref}
|
||||
role="tree"
|
||||
tabIndex={0}
|
||||
aria-activedescendant={activeDescendantId || undefined}
|
||||
>
|
||||
{connectingEdgeLineSegments.map(({ points: [startPosition, endPosition], metadata }) => (
|
||||
<EdgeLine
|
||||
edgeLineMetadata={metadata}
|
||||
key={metadata.uniqueId}
|
||||
startPosition={startPosition}
|
||||
endPosition={endPosition}
|
||||
projectionMatrix={projectionMatrix}
|
||||
/>
|
||||
))}
|
||||
{[...processNodePositions].map(([processEvent, position]) => {
|
||||
const adjacentNodeMap = processToAdjacencyMap.get(processEvent);
|
||||
const processEntityId = entityId(processEvent);
|
||||
if (!adjacentNodeMap) {
|
||||
// This should never happen
|
||||
throw new Error('Issue calculating adjacency node map.');
|
||||
}
|
||||
return (
|
||||
<ProcessEventDot
|
||||
key={processEntityId}
|
||||
position={position}
|
||||
projectionMatrix={projectionMatrix}
|
||||
event={processEvent}
|
||||
adjacentNodeMap={adjacentNodeMap}
|
||||
relatedEventsStats={
|
||||
relatedEventsStats ? relatedEventsStats.get(entityId(processEvent)) : undefined
|
||||
}
|
||||
isProcessTerminated={terminatedProcesses.has(processEntityId)}
|
||||
isProcessOrigin={false}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</GraphContainer>
|
||||
)}
|
||||
<StyledPanel />
|
||||
<GraphControls />
|
||||
<SymbolDefinitions />
|
||||
</StyledMapContainer>
|
||||
);
|
||||
});
|
|
@ -22,7 +22,7 @@ import { displayNameRecord } from './process_event_dot';
|
|||
import * as selectors from '../store/selectors';
|
||||
import { useResolverDispatch } from './use_resolver_dispatch';
|
||||
import * as event from '../../../common/endpoint/models/event';
|
||||
import { ResolverEvent } from '../../../common/endpoint/types';
|
||||
import { ResolverEvent, ResolverNodeStats } from '../../../common/endpoint/types';
|
||||
import { SideEffectContext } from './side_effect_context';
|
||||
import { ProcessEventListNarrowedByType } from './panels/panel_content_related_list';
|
||||
import { EventCountsForProcess } from './panels/panel_content_related_counts';
|
||||
|
@ -141,15 +141,10 @@ const PanelContent = memo(function PanelContent() {
|
|||
[history, urlSearch]
|
||||
);
|
||||
|
||||
// GO JONNY GO
|
||||
const relatedEventStats = useSelector(selectors.relatedEventsStats);
|
||||
const { crumbId, crumbEvent } = queryParams;
|
||||
const relatedStatsForIdFromParams = useMemo(() => {
|
||||
if (idFromParams) {
|
||||
return relatedEventStats.get(idFromParams);
|
||||
}
|
||||
return undefined;
|
||||
}, [relatedEventStats, idFromParams]);
|
||||
const relatedStatsForIdFromParams: ResolverNodeStats | undefined =
|
||||
idFromParams && relatedEventStats ? relatedEventStats.get(idFromParams) : undefined;
|
||||
|
||||
/**
|
||||
* Determine which set of breadcrumbs to display based on the query parameters
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import styled from 'styled-components';
|
||||
import { Panel } from './panel';
|
||||
|
||||
/**
|
||||
* The top level DOM element for Resolver
|
||||
* NB: `styled-components` may be used to wrap this.
|
||||
*/
|
||||
export const StyledMapContainer = styled.div<{ backgroundColor: string }>`
|
||||
/**
|
||||
* Take up all availble space
|
||||
*/
|
||||
&,
|
||||
.resolver-graph {
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
}
|
||||
.loading-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-grow: 1;
|
||||
}
|
||||
/**
|
||||
* The placeholder components use absolute positioning.
|
||||
*/
|
||||
position: relative;
|
||||
/**
|
||||
* Prevent partially visible components from showing up outside the bounds of Resolver.
|
||||
*/
|
||||
overflow: hidden;
|
||||
contain: strict;
|
||||
background-color: ${(props) => props.backgroundColor};
|
||||
`;
|
||||
|
||||
/**
|
||||
* The Panel, styled for use in `ResolverMap`.
|
||||
*/
|
||||
export const StyledPanel = styled(Panel)`
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
overflow: auto;
|
||||
width: 25em;
|
||||
max-width: 50%;
|
||||
`;
|
||||
|
||||
/**
|
||||
* Used by ResolverMap to contain the lines and nodes.
|
||||
*/
|
||||
export const GraphContainer = styled.div`
|
||||
display: flex;
|
||||
flex-grow: 1;
|
||||
contain: layout;
|
||||
`;
|
|
@ -11,12 +11,14 @@ import { useCamera, useAutoUpdatingClientRect } from './use_camera';
|
|||
import { Provider } from 'react-redux';
|
||||
import * as selectors from '../store/selectors';
|
||||
import { storeFactory } from '../store';
|
||||
import { Matrix3, ResolverAction, ResolverStore, SideEffectSimulator } from '../types';
|
||||
import { Matrix3, ResolverStore, SideEffectSimulator } from '../types';
|
||||
import { ResolverEvent } from '../../../common/endpoint/types';
|
||||
import { SideEffectContext } from './side_effect_context';
|
||||
import { applyMatrix3 } from '../lib/vector2';
|
||||
import { sideEffectSimulator } from './side_effect_simulator';
|
||||
import { mockProcessEvent } from '../models/process_event_test_helpers';
|
||||
import { mock as mockResolverTree } from '../models/resolver_tree';
|
||||
import { ResolverAction } from '../store/actions';
|
||||
|
||||
describe('useCamera on an unpainted element', () => {
|
||||
let element: HTMLElement;
|
||||
|
@ -27,7 +29,7 @@ describe('useCamera on an unpainted element', () => {
|
|||
let simulator: SideEffectSimulator;
|
||||
|
||||
beforeEach(async () => {
|
||||
({ store } = storeFactory());
|
||||
store = storeFactory();
|
||||
|
||||
const Test = function Test() {
|
||||
const camera = useCamera();
|
||||
|
@ -159,7 +161,7 @@ describe('useCamera on an unpainted element', () => {
|
|||
let process: ResolverEvent;
|
||||
beforeEach(() => {
|
||||
const events: ResolverEvent[] = [];
|
||||
const numberOfEvents: number = Math.floor(Math.random() * 10 + 1);
|
||||
const numberOfEvents: number = 10;
|
||||
|
||||
for (let index = 0; index < numberOfEvents; index++) {
|
||||
const uniquePpid = index === 0 ? undefined : index - 1;
|
||||
|
@ -174,23 +176,27 @@ describe('useCamera on an unpainted element', () => {
|
|||
})
|
||||
);
|
||||
}
|
||||
const serverResponseAction: ResolverAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: {
|
||||
events,
|
||||
stats: new Map(),
|
||||
lineageLimits: { children: null, ancestors: null },
|
||||
},
|
||||
};
|
||||
act(() => {
|
||||
store.dispatch(serverResponseAction);
|
||||
});
|
||||
const tree = mockResolverTree({ events });
|
||||
if (tree !== null) {
|
||||
const serverResponseAction: ResolverAction = {
|
||||
type: 'serverReturnedResolverData',
|
||||
payload: { result: tree, databaseDocumentID: '' },
|
||||
};
|
||||
act(() => {
|
||||
store.dispatch(serverResponseAction);
|
||||
});
|
||||
} else {
|
||||
throw new Error('failed to create tree');
|
||||
}
|
||||
const processes: ResolverEvent[] = [
|
||||
...selectors
|
||||
.processNodePositionsAndEdgeLineSegments(store.getState())
|
||||
.processNodePositions.keys(),
|
||||
];
|
||||
process = processes[processes.length - 1];
|
||||
if (!process) {
|
||||
throw new Error('missing the process to bring into view');
|
||||
}
|
||||
simulator.controls.time = 0;
|
||||
const cameraAction: ResolverAction = {
|
||||
type: 'userBroughtProcessIntoView',
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { ResolverAction } from '../types';
|
||||
import { ResolverAction } from '../store/actions';
|
||||
|
||||
/**
|
||||
* Call `useDispatch`, but only accept `ResolverAction` actions.
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { useLayoutEffect } from 'react';
|
||||
import { useResolverDispatch } from './use_resolver_dispatch';
|
||||
|
||||
/**
|
||||
* This is a hook that is meant to be used once at the top level of Resolver.
|
||||
* It dispatches actions that keep the store in sync with external properties.
|
||||
*/
|
||||
export function useStateSyncingActions({
|
||||
databaseDocumentID,
|
||||
}: {
|
||||
/**
|
||||
* The `_id` of an event in ES. Used to determine the origin of the Resolver graph.
|
||||
*/
|
||||
databaseDocumentID?: string;
|
||||
}) {
|
||||
const dispatch = useResolverDispatch();
|
||||
useLayoutEffect(() => {
|
||||
dispatch({
|
||||
type: 'appReceivedNewExternalProperties',
|
||||
payload: { databaseDocumentID },
|
||||
});
|
||||
}, [dispatch, databaseDocumentID]);
|
||||
}
|
|
@ -4,13 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import {
|
||||
EuiButtonEmpty,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiHorizontalRule,
|
||||
EuiTitle,
|
||||
} from '@elastic/eui';
|
||||
import { EuiButtonEmpty, EuiFlexGroup, EuiFlexItem, EuiHorizontalRule } from '@elastic/eui';
|
||||
import { noop } from 'lodash/fp';
|
||||
import React, { useCallback, useState } from 'react';
|
||||
import { connect, ConnectedProps, useDispatch, useSelector } from 'react-redux';
|
||||
|
@ -31,6 +25,7 @@ import {
|
|||
setInsertTimeline,
|
||||
updateTimelineGraphEventId,
|
||||
} from '../../../timelines/store/timeline/actions';
|
||||
import { Resolver } from '../../../resolver/view';
|
||||
|
||||
import * as i18n from './translations';
|
||||
|
||||
|
@ -39,6 +34,10 @@ const OverlayContainer = styled.div<{ bodyHeight?: number }>`
|
|||
width: 100%;
|
||||
`;
|
||||
|
||||
const StyledResolver = styled(Resolver)`
|
||||
height: 100%;
|
||||
`;
|
||||
|
||||
interface OwnProps {
|
||||
bodyHeight?: number;
|
||||
graphEventId?: string;
|
||||
|
@ -117,9 +116,7 @@ const GraphOverlayComponent = ({
|
|||
</EuiFlexGroup>
|
||||
|
||||
<EuiHorizontalRule margin="none" />
|
||||
<EuiTitle>
|
||||
<>{`Resolver graph for event _id ${graphEventId}`}</>
|
||||
</EuiTitle>
|
||||
<StyledResolver databaseDocumentID={graphEventId} />
|
||||
<AllCasesModal
|
||||
onCloseCaseModal={onCloseCaseModal}
|
||||
showCaseModal={showCaseModal}
|
||||
|
|
|
@ -29,7 +29,7 @@ export const TimelineBody = styled.div.attrs(({ className = '' }) => ({
|
|||
overflow: auto;
|
||||
scrollbar-width: thin;
|
||||
flex: 1;
|
||||
visibility: ${({ visible }) => (visible ? 'visible' : 'hidden')};
|
||||
display: ${({ visible }) => (visible ? 'block' : 'none')};
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
height: ${({ theme }) => theme.eui.euiScrollBar};
|
||||
|
|
|
@ -12,12 +12,14 @@ import {
|
|||
validateChildren,
|
||||
validateAncestry,
|
||||
validateAlerts,
|
||||
validateEntities,
|
||||
} from '../../../common/endpoint/schema/resolver';
|
||||
import { handleEvents } from './resolver/events';
|
||||
import { handleChildren } from './resolver/children';
|
||||
import { handleAncestry } from './resolver/ancestry';
|
||||
import { handleTree } from './resolver/tree';
|
||||
import { handleAlerts } from './resolver/alerts';
|
||||
import { handleEntities } from './resolver/entity';
|
||||
|
||||
export function registerResolverRoutes(router: IRouter, endpointAppContext: EndpointAppContext) {
|
||||
const log = endpointAppContext.logFactory.get('resolver');
|
||||
|
@ -66,4 +68,16 @@ export function registerResolverRoutes(router: IRouter, endpointAppContext: Endp
|
|||
},
|
||||
handleTree(log, endpointAppContext)
|
||||
);
|
||||
|
||||
/**
|
||||
* Used to get details about an entity, aka process.
|
||||
*/
|
||||
router.get(
|
||||
{
|
||||
path: '/api/endpoint/resolver/entity',
|
||||
validate: validateEntities,
|
||||
options: { authRequired: true },
|
||||
},
|
||||
handleEntities()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { RequestHandler } from 'kibana/server';
|
||||
import { TypeOf } from '@kbn/config-schema';
|
||||
import { validateEntities } from '../../../../common/endpoint/schema/resolver';
|
||||
import { ResolverEntityIndex } from '../../../../common/endpoint/types';
|
||||
|
||||
/**
|
||||
* This is used to get an 'entity_id' which is an internal-to-Resolver concept, from an `_id`, which
|
||||
* is the artificial ID generated by ES for each document.
|
||||
*/
|
||||
export function handleEntities(): RequestHandler<unknown, TypeOf<typeof validateEntities.query>> {
|
||||
return async (context, request, response) => {
|
||||
const {
|
||||
query: { _id, indices },
|
||||
} = request;
|
||||
|
||||
/**
|
||||
* A safe type for the response based on the semantics of the query.
|
||||
* We specify _source, asking for `process.entity_id` and we only
|
||||
* accept documents that have it.
|
||||
* Also, we only request 1 document.
|
||||
*/
|
||||
interface ExpectedQueryResponse {
|
||||
hits: {
|
||||
hits:
|
||||
| []
|
||||
| [
|
||||
{
|
||||
_source: {
|
||||
process: {
|
||||
entity_id: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
}
|
||||
|
||||
const queryResponse: ExpectedQueryResponse = await context.core.elasticsearch.legacy.client.callAsCurrentUser(
|
||||
'search',
|
||||
{
|
||||
index: indices,
|
||||
body: {
|
||||
// only return process.entity_id
|
||||
_source: 'process.entity_id',
|
||||
// only return 1 match at most
|
||||
size: 1,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
// only return documents with the matching _id
|
||||
ids: {
|
||||
values: _id,
|
||||
},
|
||||
},
|
||||
{
|
||||
exists: {
|
||||
// only return documents that have process.entity_id
|
||||
field: 'process.entity_id',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const responseBody: ResolverEntityIndex = [];
|
||||
for (const {
|
||||
_source: {
|
||||
process: { entity_id },
|
||||
},
|
||||
} of queryResponse.hits.hits) {
|
||||
responseBody.push({
|
||||
entity_id,
|
||||
});
|
||||
}
|
||||
return response.ok({ body: responseBody });
|
||||
};
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue