[Security Solution][Resolver] Fix node data refresh and time range (#86014)

* Pass filters from SIEM to resolver

* Fix test type errors

* Revert loading state change, update snapshots

* Make correct check in nodeData selector

* Fix inverted logic in nodeData selector

* Adding request id to node data

* Working time range filters and node data stale selector

* Fixing random name from generator and panel icons

* Fixing unused import

* Remove hardcoded node id

* Ensure that the actions array is reset before each test

Co-authored-by: Kevin Qualters <kevin.qualters@elastic.co>
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Jonathan Buttner 2020-12-16 09:52:55 -05:00 committed by GitHub
parent 2f5f1481fb
commit fa2db85eac
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 506 additions and 199 deletions

View file

@ -22,6 +22,7 @@ import {
ancestryArray,
entityIDSafeVersion,
parentEntityIDSafeVersion,
processNameSafeVersion,
timestampSafeVersion,
} from './models/event';
import {
@ -965,6 +966,7 @@ export class EndpointDocGenerator {
eventCategory: ['process'],
eventType: ['end'],
eventsDataStream: opts.eventsDataStream,
processName: processNameSafeVersion(root),
})
);
}
@ -1002,6 +1004,7 @@ export class EndpointDocGenerator {
ancestry: ancestryArray(ancestor),
ancestryArrayLimit: opts.ancestryArraySize,
eventsDataStream: opts.eventsDataStream,
processName: processNameSafeVersion(ancestor),
})
);
}

View file

@ -19,11 +19,11 @@ describe('node data model', () => {
const original: Map<string, NodeData> = new Map();
it('creates a copy when using setRequestedNodes', () => {
expect(setRequestedNodes(original, new Set()) === original).toBeFalsy();
expect(setRequestedNodes(original, new Set(), 0) === original).toBeFalsy();
});
it('creates a copy when using setErrorNodes', () => {
expect(setErrorNodes(original, new Set()) === original).toBeFalsy();
expect(setErrorNodes(original, new Set(), 0) === original).toBeFalsy();
});
it('creates a copy when using setReloadedNodes', () => {
@ -37,40 +37,57 @@ describe('node data model', () => {
receivedEvents: [],
requestedNodes: new Set(),
numberOfRequestedEvents: 1,
dataRequestID: 0,
}) === original
).toBeFalsy();
});
});
it('overwrites the existing entries and creates new ones when calling setRequestedNodes', () => {
const state: Map<string, NodeData> = new Map([
['1', { events: [generator.generateEvent()], status: 'running', eventType: ['start'] }],
const state = new Map<string, NodeData>([
[
'1',
{
events: [generator.generateEvent({ eventType: ['start'] })],
status: 'running',
dataRequestID: 0,
},
],
]);
expect(setRequestedNodes(state, new Set(['1', '2']))).toEqual(
new Map([
['1', { events: [], status: 'loading' }],
['2', { events: [], status: 'loading' }],
expect(setRequestedNodes(state, new Set(['1', '2']), 0)).toEqual(
new Map<string, NodeData>([
['1', { events: [], status: 'loading', dataRequestID: 0 }],
['2', { events: [], status: 'loading', dataRequestID: 0 }],
])
);
});
it('overwrites the existing entries and creates new ones when calling setErrorNodes', () => {
const state: Map<string, NodeData> = new Map([
['1', { events: [generator.generateEvent()], status: 'running', eventType: ['start'] }],
const state = new Map<string, NodeData>([
[
'1',
{
events: [generator.generateEvent({ eventType: ['start'] })],
status: 'running',
dataRequestID: 0,
},
],
]);
expect(setErrorNodes(state, new Set(['1', '2']))).toEqual(
new Map([
['1', { events: [], status: 'error' }],
['2', { events: [], status: 'error' }],
expect(setErrorNodes(state, new Set(['1', '2']), 0)).toEqual(
new Map<string, NodeData>([
['1', { events: [], status: 'error', dataRequestID: 0 }],
['2', { events: [], status: 'error', dataRequestID: 0 }],
])
);
});
describe('setReloadedNodes', () => {
it('removes the id from the map', () => {
const state: Map<string, NodeData> = new Map([['1', { events: [], status: 'error' }]]);
const state = new Map<string, NodeData>([
['1', { events: [], status: 'error', dataRequestID: 0 }],
]);
expect(setReloadedNodes(state, '1')).toEqual(new Map());
});
});
@ -78,9 +95,9 @@ describe('node data model', () => {
describe('updateWithReceivedNodes', () => {
const node1Events = [generator.generateEvent({ entityID: '1', eventType: ['start'] })];
const node2Events = [generator.generateEvent({ entityID: '2', eventType: ['start'] })];
const state: Map<string, NodeData> = new Map([
['1', { events: node1Events, status: 'error' }],
['2', { events: node2Events, status: 'error' }],
const state = new Map<string, NodeData>([
['1', { events: node1Events, status: 'error', dataRequestID: 0 }],
['2', { events: node2Events, status: 'error', dataRequestID: 0 }],
]);
describe('reachedLimit is false', () => {
it('overwrites entries with the received data', () => {
@ -93,11 +110,12 @@ describe('node data model', () => {
requestedNodes: new Set(['1']),
// a number greater than the amount received so the reached limit flag with be false
numberOfRequestedEvents: 10,
dataRequestID: 0,
})
).toEqual(
new Map([
['1', { events: [genNodeEvent], status: 'running' }],
['2', { events: node2Events, status: 'error' }],
new Map<string, NodeData>([
['1', { events: [genNodeEvent], status: 'running', dataRequestID: 0 }],
['2', { events: node2Events, status: 'error', dataRequestID: 0 }],
])
);
});
@ -109,11 +127,12 @@ describe('node data model', () => {
receivedEvents: [],
requestedNodes: new Set(['1', '2']),
numberOfRequestedEvents: 1,
dataRequestID: 0,
})
).toEqual(
new Map([
['1', { events: [], status: 'running' }],
['2', { events: [], status: 'running' }],
new Map<string, NodeData>([
['1', { events: [], status: 'running', dataRequestID: 0 }],
['2', { events: [], status: 'running', dataRequestID: 0 }],
])
);
});
@ -127,8 +146,13 @@ describe('node data model', () => {
receivedEvents: [],
requestedNodes: new Set(['1']),
numberOfRequestedEvents: 0,
dataRequestID: 0,
})
).toEqual(new Map([['2', { events: node2Events, status: 'error' }]]));
).toEqual(
new Map<string, NodeData>([
['2', { events: node2Events, status: 'error', dataRequestID: 0 }],
])
);
});
it('attempts to remove entries from the map even if they do not exist', () => {
@ -138,11 +162,12 @@ describe('node data model', () => {
receivedEvents: [],
requestedNodes: new Set(['10']),
numberOfRequestedEvents: 0,
dataRequestID: 0,
})
).toEqual(
new Map([
['1', { events: node1Events, status: 'error' }],
['2', { events: node2Events, status: 'error' }],
new Map<string, NodeData>([
['1', { events: node1Events, status: 'error', dataRequestID: 0 }],
['2', { events: node2Events, status: 'error', dataRequestID: 0 }],
])
);
});
@ -156,11 +181,12 @@ describe('node data model', () => {
receivedEvents: [genNodeEvent],
requestedNodes: new Set(['1']),
numberOfRequestedEvents: 1,
dataRequestID: 0,
})
).toEqual(
new Map([
['1', { events: [genNodeEvent], status: 'running' }],
['2', { events: node2Events, status: 'error' }],
new Map<string, NodeData>([
['1', { events: [genNodeEvent], status: 'running', dataRequestID: 0 }],
['2', { events: node2Events, status: 'error', dataRequestID: 0 }],
])
);
});

View file

@ -17,14 +17,15 @@ import { isTerminatedProcess } from './process_event';
*/
export function setRequestedNodes(
storedNodeInfo = new Map<string, NodeData>(),
requestedNodes: Set<string>
requestedNodes: Set<string>,
dataRequestID: number
): Map<string, NodeData> {
const requestedNodesArray = Array.from(requestedNodes);
return new Map<string, NodeData>([
...storedNodeInfo,
...requestedNodesArray.map((id: string): [string, NodeData] => [
id,
{ events: [], status: 'loading' },
{ events: [], status: 'loading', dataRequestID },
]),
]);
}
@ -37,14 +38,15 @@ export function setRequestedNodes(
*/
export function setErrorNodes(
storedNodeInfo = new Map<string, NodeData>(),
errorNodes: Set<string>
errorNodes: Set<string>,
dataRequestID: number
): Map<string, NodeData> {
const errorNodesArray = Array.from(errorNodes);
return new Map<string, NodeData>([
...storedNodeInfo,
...errorNodesArray.map((id: string): [string, NodeData] => [
id,
{ events: [], status: 'error' },
{ events: [], status: 'error', dataRequestID },
]),
]);
}
@ -103,11 +105,13 @@ export function updateWithReceivedNodes({
receivedEvents,
requestedNodes,
numberOfRequestedEvents,
dataRequestID,
}: {
storedNodeInfo: Map<string, NodeData> | undefined;
receivedEvents: SafeResolverEvent[];
requestedNodes: Set<string>;
numberOfRequestedEvents: number;
dataRequestID: number;
}): Map<string, NodeData> {
const copiedMap = new Map<string, NodeData>([...storedNodeInfo]);
const reachedLimit = receivedEvents.length >= numberOfRequestedEvents;
@ -123,7 +127,7 @@ export function updateWithReceivedNodes({
} else {
// if we didn't reach the limit but we didn't receive any node data for a particular ID
// then that means Elasticsearch does not have any node data for that ID.
copiedMap.set(id, { events: [], status: 'running' });
copiedMap.set(id, { events: [], status: 'running', dataRequestID });
}
}
}
@ -133,6 +137,7 @@ export function updateWithReceivedNodes({
copiedMap.set(id, {
events: [...info.events],
status: info.terminated ? 'terminated' : 'running',
dataRequestID,
});
}
@ -143,6 +148,9 @@ export function updateWithReceivedNodes({
* This is used for displaying information in the node panel mainly and we should be able to remove it eventually in
* favor of showing all the node data associated with a node in the tree.
*
* NOTE: The events are actually in descending order by the timestamp field because of the `/events` api. So this
* returns the "newest" event.
*
* @param data node data for a specific node ID
* @returns the first event or undefined if the node data passed in was undefined
*/

View file

@ -51,6 +51,77 @@ describe('TreeFetcherParameters#equal:', () => {
{ databaseDocumentID: 'b', indices: [], dataRequestID: 1, filters: {} },
false,
],
// all parameters the same, except for the filters
[
{ databaseDocumentID: 'b', indices: [], dataRequestID: 0, filters: {} },
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to', from: 'from' },
},
false,
],
// all parameters the same, except for the filters.to
[
{ databaseDocumentID: 'b', indices: [], dataRequestID: 0, filters: { to: '100' } },
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to' },
},
false,
],
// all parameters the same, except for the filters.to, parameters are swapped from the one above
[
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to' },
},
{ databaseDocumentID: 'b', indices: [], dataRequestID: 0, filters: { to: '100' } },
false,
],
// all parameters the same
[
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to', from: 'from' },
},
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to', from: 'from' },
},
true,
],
// all parameters the same, only using the filters.to field
[
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to' },
},
{
databaseDocumentID: 'b',
indices: [],
dataRequestID: 0,
filters: { to: 'to' },
},
true,
],
// all parameters the same, except for the request id
[
{ databaseDocumentID: 'b', indices: [], dataRequestID: 0, filters: {} },
{ databaseDocumentID: 'b', indices: [], dataRequestID: 1, filters: {} },
false,
],
];
describe.each(cases)('%p when compared to %p', (first, second, expected) => {
it(`should ${expected ? '' : 'not'}be equal`, () => {

View file

@ -19,7 +19,9 @@ export function equal(param1: TreeFetcherParameters, param2?: TreeFetcherParamet
}
if (
param1.databaseDocumentID !== param2.databaseDocumentID ||
param1.dataRequestID !== param2.dataRequestID
param1.dataRequestID !== param2.dataRequestID ||
param1.filters.from !== param2.filters.from ||
param1.filters.to !== param2.filters.to
) {
return false;
}

View file

@ -150,6 +150,7 @@ interface AppRequestingNodeData {
* The list of IDs that will be sent to the server to retrieve data for.
*/
requestedIDs: Set<string>;
dataRequestID: number;
};
}
@ -174,6 +175,7 @@ interface ServerFailedToReturnNodeData {
* The list of IDs that were sent to the server to retrieve data for.
*/
requestedIDs: Set<string>;
dataRequestID: number;
};
}

View file

@ -194,6 +194,7 @@ export const dataReducer: Reducer<DataState, ResolverAction> = (state = initialS
receivedEvents: action.payload.nodeData,
requestedNodes: action.payload.requestedIDs,
numberOfRequestedEvents: action.payload.numberOfRequestedEvents,
dataRequestID: action.payload.dataRequestID,
});
return {
@ -209,7 +210,8 @@ export const dataReducer: Reducer<DataState, ResolverAction> = (state = initialS
} else if (action.type === 'appRequestingNodeData') {
const updatedNodeData = nodeDataModel.setRequestedNodes(
state.nodeData,
action.payload.requestedIDs
action.payload.requestedIDs,
action.payload.dataRequestID
);
return {
@ -217,7 +219,11 @@ export const dataReducer: Reducer<DataState, ResolverAction> = (state = initialS
nodeData: updatedNodeData,
};
} else if (action.type === 'serverFailedToReturnNodeData') {
const updatedData = nodeDataModel.setErrorNodes(state.nodeData, action.payload.requestedIDs);
const updatedData = nodeDataModel.setErrorNodes(
state.nodeData,
action.payload.requestedIDs,
action.payload.dataRequestID
);
return {
...state,

View file

@ -5,7 +5,7 @@
*/
import * as selectors from './selectors';
import { DataState } from '../../types';
import { DataState, TimeRange } from '../../types';
import { ResolverAction } from '../actions';
import { dataReducer } from './reducer';
import { createStore } from 'redux';
@ -20,6 +20,7 @@ import * as nodeModel from '../../../../common/endpoint/models/node';
import { mockTreeFetcherParameters } from '../../mocks/tree_fetcher_parameters';
import { SafeResolverEvent } from '../../../../common/endpoint/types';
import { mockEndpointEvent } from '../../mocks/endpoint_event';
import { maxDate } from '../../models/time_range';
function mockNodeDataWithAllProcessesTerminated({
originID,
@ -81,7 +82,7 @@ function mockNodeDataWithAllProcessesTerminated({
}
describe('data state', () => {
let actions: ResolverAction[] = [];
let actions: ResolverAction[];
/**
* Get state, given an ordered collection of actions.
@ -114,6 +115,10 @@ describe('data state', () => {
.join('\n');
};
beforeEach(() => {
actions = [];
});
it(`shouldn't initially be loading, or have an error, or have more children or ancestors, or have a request to make, or have a pending request that needs to be aborted.`, () => {
expect(viewAsAString(state())).toMatchInlineSnapshot(`
"is loading: false
@ -316,18 +321,209 @@ describe('data state', () => {
requires a pending request to be aborted: {\\"databaseDocumentID\\":\\"first databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{}}"
`);
});
describe('when after initial load resolver is told to refresh', () => {
const databaseDocumentID = 'doc id';
const resolverComponentInstanceID = 'instance';
const originID = 'origin';
const firstChildID = 'first';
const secondChildID = 'second';
const { resolverTree } = mockTreeWithNoAncestorsAnd2Children({
originID,
firstChildID,
secondChildID,
describe('and when the old request was aborted', () => {
beforeEach(() => {
actions.push({
type: 'appAbortedResolverDataRequest',
payload: { databaseDocumentID: firstDatabaseDocumentID, indices: [], filters: {} },
});
});
const { schema, dataSource } = endpointSourceSchema();
it('should not require a pending request to be aborted', () => {
expect(selectors.treeRequestParametersToAbort(state())).toBe(null);
});
it('should have a document to fetch', () => {
expect(selectors.treeParametersToFetch(state())?.databaseDocumentID).toBe(
secondDatabaseDocumentID
);
});
it('should not be loading', () => {
expect(selectors.isTreeLoading(state())).toBe(false);
});
it('should not have an error, more children, or more ancestors.', () => {
expect(viewAsAString(state())).toMatchInlineSnapshot(`
"is loading: false
has an error: false
has more children: false
has more ancestors: false
parameters to fetch: {\\"databaseDocumentID\\":\\"second databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{},\\"dataRequestID\\":0}
requires a pending request to be aborted: null"
`);
});
describe('and when the next request starts', () => {
beforeEach(() => {
actions.push({
type: 'appRequestedResolverData',
payload: { databaseDocumentID: secondDatabaseDocumentID, indices: [], filters: {} },
});
});
it('should be loading', () => {
expect(selectors.isTreeLoading(state())).toBe(true);
});
it('should not have an error, more children, more ancestors, or a pending request that needs to be aborted.', () => {
expect(viewAsAString(state())).toMatchInlineSnapshot(`
"is loading: true
has an error: false
has more children: false
has more ancestors: false
parameters to fetch: {\\"databaseDocumentID\\":\\"second databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{},\\"dataRequestID\\":0}
requires a pending request to be aborted: {\\"databaseDocumentID\\":\\"second databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{}}"
`);
});
});
});
});
describe('when resolver receives external properties indicating it should refresh', () => {
beforeEach(() => {
actions = [
{
type: 'appReceivedNewExternalProperties',
payload: {
databaseDocumentID: 'doc id',
resolverComponentInstanceID: 'instance',
locationSearch: '',
indices: [],
shouldUpdate: true,
filters: {},
},
},
];
});
it('should indicate that all node data is stale before the server returned node data', () => {
// the map does not exist yet so nothing should be in it
expect(selectors.nodeDataIsStale(state())('a')).toBeTruthy();
});
describe('when resolver receives some data for nodes', () => {
beforeEach(() => {
actions = [
...actions,
{
type: 'serverReturnedNodeData',
payload: {
nodeData: [],
requestedIDs: new Set(['a', 'b']),
numberOfRequestedEvents: 500,
// the refreshCount should be at 1 right now
dataRequestID: 0,
},
},
{
type: 'serverReturnedNodeData',
payload: {
nodeData: [],
requestedIDs: new Set(['c', 'd']),
numberOfRequestedEvents: 500,
// the refreshCount should be at 1 right now
dataRequestID: 1,
},
},
];
});
it('should indicate that nodes a and b are stale', () => {
expect(selectors.nodeDataIsStale(state())('a')).toBeTruthy();
expect(selectors.nodeDataIsStale(state())('b')).toBeTruthy();
});
it('should indicate that nodes c and d are up to date', () => {
expect(selectors.nodeDataIsStale(state())('c')).toBeFalsy();
expect(selectors.nodeDataIsStale(state())('d')).toBeFalsy();
});
});
});
describe('with a mock tree of no ancestors and two children', () => {
const databaseDocumentID = 'doc id';
const resolverComponentInstanceID = 'instance';
const originID = 'origin';
const firstChildID = 'first';
const secondChildID = 'second';
const { resolverTree } = mockTreeWithNoAncestorsAnd2Children({
originID,
firstChildID,
secondChildID,
});
const { schema, dataSource } = endpointSourceSchema();
describe('when resolver receives external properties without time range filters', () => {
beforeEach(() => {
actions = [
{
type: 'appReceivedNewExternalProperties',
payload: {
databaseDocumentID,
resolverComponentInstanceID,
locationSearch: '',
indices: [],
shouldUpdate: false,
filters: {},
},
},
{
type: 'appRequestedResolverData',
payload: { databaseDocumentID, indices: [], dataRequestID: 0, filters: {} },
},
{
type: 'serverReturnedResolverData',
payload: {
result: resolverTree,
dataSource,
schema,
parameters: { databaseDocumentID, indices: [], dataRequestID: 0, filters: {} },
},
},
];
});
it('uses the default time range filters', () => {
expect(selectors.timeRangeFilters(state())?.from).toBe(new Date(0).toISOString());
expect(selectors.timeRangeFilters(state())?.to).toBe(new Date(maxDate).toISOString());
});
describe('when resolver receives time range filters', () => {
const timeRangeFilters: TimeRange = {
to: 'to',
from: 'from',
};
beforeEach(() => {
actions = [
...actions,
{
type: 'appReceivedNewExternalProperties',
payload: {
databaseDocumentID,
resolverComponentInstanceID,
locationSearch: '',
indices: [],
shouldUpdate: false,
filters: timeRangeFilters,
},
},
{
type: 'appRequestedResolverData',
payload: {
databaseDocumentID,
indices: [],
dataRequestID: 0,
filters: timeRangeFilters,
},
},
{
type: 'serverReturnedResolverData',
payload: {
result: resolverTree,
dataSource,
schema,
parameters: {
databaseDocumentID,
indices: [],
dataRequestID: 0,
filters: timeRangeFilters,
},
},
},
];
});
it('uses the received time range filters', () => {
expect(selectors.timeRangeFilters(state())?.from).toBe('from');
expect(selectors.timeRangeFilters(state())?.to).toBe('to');
});
});
});
describe('when after initial load resolver is told to refresh', () => {
beforeEach(() => {
actions = [
// receive the document ID, this would cause the middleware to start the request
@ -404,56 +600,6 @@ describe('data state', () => {
`);
});
});
describe('and when the old request was aborted', () => {
beforeEach(() => {
actions.push({
type: 'appAbortedResolverDataRequest',
payload: { databaseDocumentID: firstDatabaseDocumentID, indices: [], filters: {} },
});
});
it('should not require a pending request to be aborted', () => {
expect(selectors.treeRequestParametersToAbort(state())).toBe(null);
});
it('should have a document to fetch', () => {
expect(selectors.treeParametersToFetch(state())?.databaseDocumentID).toBe(
secondDatabaseDocumentID
);
});
it('should not be loading', () => {
expect(selectors.isTreeLoading(state())).toBe(false);
});
it('should not have an error, more children, or more ancestors.', () => {
expect(viewAsAString(state())).toMatchInlineSnapshot(`
"is loading: false
has an error: false
has more children: false
has more ancestors: false
parameters to fetch: {\\"databaseDocumentID\\":\\"second databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{},\\"dataRequestID\\":0}
requires a pending request to be aborted: null"
`);
});
describe('and when the next request starts', () => {
beforeEach(() => {
actions.push({
type: 'appRequestedResolverData',
payload: { databaseDocumentID: secondDatabaseDocumentID, indices: [], filters: {} },
});
});
it('should be loading', () => {
expect(selectors.isTreeLoading(state())).toBe(true);
});
it('should not have an error, more children, more ancestors, or a pending request that needs to be aborted.', () => {
expect(viewAsAString(state())).toMatchInlineSnapshot(`
"is loading: true
has an error: false
has more children: false
has more ancestors: false
parameters to fetch: {\\"databaseDocumentID\\":\\"second databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{},\\"dataRequestID\\":0}
requires a pending request to be aborted: {\\"databaseDocumentID\\":\\"second databaseDocumentID\\",\\"indices\\":[],\\"filters\\":{}}"
`);
});
});
});
});
describe('with a tree with no descendants and 2 ancestors', () => {
const originID = 'c';
@ -461,20 +607,22 @@ describe('data state', () => {
const secondAncestorID = 'a';
beforeEach(() => {
const { schema, dataSource } = endpointSourceSchema();
actions.push({
type: 'serverReturnedResolverData',
payload: {
result: mockTreeWith2AncestorsAndNoChildren({
originID,
firstAncestorID,
secondAncestorID,
}),
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
actions = [
{
type: 'serverReturnedResolverData',
payload: {
result: mockTreeWith2AncestorsAndNoChildren({
originID,
firstAncestorID,
secondAncestorID,
}),
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
},
},
});
];
});
it('should have no flowto candidate for the origin', () => {
expect(selectors.ariaFlowtoCandidate(state())(originID)).toBe(null);
@ -496,17 +644,19 @@ describe('data state', () => {
secondAncestorID,
});
beforeEach(() => {
actions.push({
type: 'serverReturnedNodeData',
payload: {
nodeData,
requestedIDs: new Set([originID, firstAncestorID, secondAncestorID]),
// mock the requested size being larger than the returned number of events so we
// avoid the case where the limit was reached
numberOfRequestedEvents: nodeData.length + 1,
dataRequestID: 0,
actions = [
{
type: 'serverReturnedNodeData',
payload: {
nodeData,
requestedIDs: new Set([originID, firstAncestorID, secondAncestorID]),
// mock the requested size being larger than the returned number of events so we
// avoid the case where the limit was reached
numberOfRequestedEvents: nodeData.length + 1,
dataRequestID: 0,
},
},
});
];
});
it('should have origin as terminated', () => {
expect(selectors.nodeDataStatus(state())(originID)).toBe('terminated');
@ -529,16 +679,18 @@ describe('data state', () => {
secondChildID,
});
const { schema, dataSource } = endpointSourceSchema();
actions.push({
type: 'serverReturnedResolverData',
payload: {
result: resolverTree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
actions = [
{
type: 'serverReturnedResolverData',
payload: {
result: resolverTree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
},
},
});
];
});
it('should have no flowto candidate for the origin', () => {
expect(selectors.ariaFlowtoCandidate(state())(originID)).toBe(null);
@ -561,16 +713,18 @@ describe('data state', () => {
secondChildID,
});
const { schema, dataSource } = endpointSourceSchema();
actions.push({
type: 'serverReturnedResolverData',
payload: {
result: resolverTree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
actions = [
{
type: 'serverReturnedResolverData',
payload: {
result: resolverTree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
},
},
});
];
});
it('should be able to calculate the aria flowto candidates for all processes nodes', () => {
const graphables = selectors.graphableNodes(state());
@ -595,16 +749,18 @@ describe('data state', () => {
secondChildID,
});
const { schema, dataSource } = endpointSourceSchema();
actions.push({
type: 'serverReturnedResolverData',
payload: {
result: tree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
actions = [
{
type: 'serverReturnedResolverData',
payload: {
result: tree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
},
},
});
];
});
it('should have 4 graphable processes', () => {
expect(selectors.graphableNodes(state()).length).toBe(4);
@ -614,16 +770,18 @@ describe('data state', () => {
beforeEach(() => {
const { schema, dataSource } = endpointSourceSchema();
const tree = mockTreeWithNoProcessEvents();
actions.push({
type: 'serverReturnedResolverData',
payload: {
result: tree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
actions = [
{
type: 'serverReturnedResolverData',
payload: {
result: tree,
dataSource,
schema,
// this value doesn't matter
parameters: mockTreeFetcherParameters(),
},
},
});
];
});
it('should return an empty layout', () => {
expect(selectors.layout(state())).toMatchInlineSnapshot(`

View file

@ -149,15 +149,42 @@ export const nodeDataForID: (
};
});
const nodeDataRequestID: (state: DataState) => (id: string) => number | undefined = createSelector(
nodeDataForID,
(nodeInfo) => {
return (id: string) => {
return nodeInfo(id)?.dataRequestID;
};
}
);
/**
* Returns true if a specific node's data is outdated. It will be outdated if a user clicked the refresh/update button
* after the node data was retrieved.
*/
export const nodeDataIsStale: (state: DataState) => (id: string) => boolean = createSelector(
nodeDataRequestID,
refreshCount,
(nodeRequestID, newID) => {
return (id: string) => {
const oldID = nodeRequestID(id);
// if we don't have the node in the map then it's data must be stale or if the refreshCount is greater than the
// node's requestID then it is also stale
return oldID === undefined || newID > oldID;
};
}
);
/**
* Returns a function that can be called to retrieve the state of the node, running, loading, or terminated.
*/
export const nodeDataStatus: (state: DataState) => (id: string) => NodeDataStatus = createSelector(
nodeDataForID,
(nodeInfo) => {
nodeDataIsStale,
(nodeInfo, isStale) => {
return (id: string) => {
const info = nodeInfo(id);
if (!info) {
if (!info || isStale(id)) {
return 'loading';
}
@ -260,6 +287,9 @@ export const relatedEventCountByCategory: (
}
);
/**
* Retrieves the number of times the update/refresh button was clicked to be compared against various dataRequestIDs
*/
export function refreshCount(state: DataState) {
return state.refreshCount;
}
@ -345,8 +375,11 @@ export function treeParametersToFetch(state: DataState): TreeFetcherParameters |
}
}
/**
* Retrieve the time range filters if they exist, otherwise default to start of epoch to the largest future date.
*/
export const timeRangeFilters = createSelector(
treeParametersToFetch,
(state: DataState) => state.tree?.currentParameters,
function timeRangeFilters(treeParameters): TimeRange {
// Should always be provided from date picker, but provide valid defaults in any case.
const from = new Date(0);
@ -355,17 +388,15 @@ export const timeRangeFilters = createSelector(
from: from.toISOString(),
to: to.toISOString(),
};
if (treeParameters !== null) {
if (treeParameters !== undefined) {
if (treeParameters.filters.from) {
timeRange.from = treeParameters.filters.from;
}
if (treeParameters.filters.to) {
timeRange.to = treeParameters.filters.to;
}
return timeRange;
} else {
return timeRange;
}
return timeRange;
}
);

View file

@ -43,6 +43,7 @@ export function NodeDataFetcher(
return;
}
const newID = selectors.refreshCount(state);
/**
* Dispatch an action indicating that we are going to request data for a set of nodes so that we can show a loading
* state for those nodes in the UI.
@ -54,11 +55,11 @@ export function NodeDataFetcher(
type: 'appRequestingNodeData',
payload: {
requestedIDs: newIDsToRequest,
dataRequestID: newID,
},
});
let results: SafeResolverEvent[] | undefined;
const newID = selectors.refreshCount(state);
try {
const timeRangeFilters = selectors.timeRangeFilters(state);
results = await dataAccessLayer.nodeData({
@ -75,6 +76,7 @@ export function NodeDataFetcher(
type: 'serverFailedToReturnNodeData',
payload: {
requestedIDs: newIDsToRequest,
dataRequestID: newID,
},
});
}

View file

@ -8,7 +8,7 @@ import { createSelector, defaultMemoize } from 'reselect';
import * as cameraSelectors from './camera/selectors';
import * as dataSelectors from './data/selectors';
import * as uiSelectors from './ui/selectors';
import { ResolverState, IsometricTaxiLayout, DataState } from '../types';
import { ResolverState, IsometricTaxiLayout } from '../types';
import { EventStats } from '../../../common/endpoint/types';
import * as nodeModel from '../../../common/endpoint/models/node';
@ -397,9 +397,9 @@ export const graphNodeForID = composeSelectors(dataStateSelector, dataSelectors.
export const newIDsToRequest: (
state: ResolverState
) => (time: number) => Set<string> = createSelector(
composeSelectors(dataStateSelector, (dataState: DataState) => dataState.nodeData),
composeSelectors(dataStateSelector, dataSelectors.nodeDataIsStale),
visibleNodesAndEdgeLines,
function (nodeData, visibleNodesAndEdgeLinesAtTime) {
function (nodeDataIsStale, visibleNodesAndEdgeLinesAtTime) {
return defaultMemoize((time: number) => {
const { processNodePositions: nodesInView } = visibleNodesAndEdgeLinesAtTime(time);
@ -410,7 +410,7 @@ export const newIDsToRequest: (
// if the node has a valid ID field, and we either don't have any node data currently, or
// the map doesn't have info for this particular node, then add it to the set so it'll be requested
// by the middleware
if (id !== undefined && (!nodeData || !nodeData.has(id))) {
if (id !== undefined && nodeDataIsStale(id)) {
nodes.add(id);
}
}

View file

@ -316,6 +316,7 @@ export interface NodeData {
* An indication of the current state for retrieving the data.
*/
status: NodeDataStatus;
dataRequestID: number;
}
/**

View file

@ -233,25 +233,22 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
loadingNode.simulate('click', { button: 0 });
// the time here is equivalent to the animation duration in the camera reducer
graphSimulator.runAnimationFramesTimeFromNow(1000);
return loadingNode.prop('data-test-node-id');
};
const firstLoadingNodeInListID = '2kt059pl3i';
const identifiedLoadingNodeInGraph: (
graphSimulator: Simulator
) => Promise<ReactWrapper | undefined> = async (graphSimulator: Simulator) =>
graphSimulator.resolveWrapper(() =>
graphSimulator.selectedProcessNode(firstLoadingNodeInListID)
);
graphSimulator: Simulator,
nodeIDToFind: string
) => Promise<ReactWrapper | undefined> = async (
graphSimulator: Simulator,
nodeIDToFind: string
) => graphSimulator.resolveWrapper(() => graphSimulator.selectedProcessNode(nodeIDToFind));
const identifiedLoadingNodeInGraphState: (
graphSimulator: Simulator
) => Promise<string | undefined> = async (graphSimulator: Simulator) =>
(
await graphSimulator.resolveWrapper(() =>
graphSimulator.selectedProcessNode(firstLoadingNodeInListID)
)
)
graphSimulator: Simulator,
nodeIDToFind: string
) => Promise<string | undefined> = async (graphSimulator: Simulator, nodeIDToFind: string) =>
(await graphSimulator.resolveWrapper(() => graphSimulator.selectedProcessNode(nodeIDToFind)))
?.find('[data-test-subj="resolver:node:description"]')
.first()
.text();
@ -272,6 +269,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
describe('when clicking on a node in the panel whose node data has not yet been loaded and using a data access layer that returns an error for the clicked node', () => {
let throwError: boolean;
let foundLoadingNodeInList: string;
beforeEach(async () => {
// all the tests in this describe block will receive an error when loading data for the firstLoadingNodeInListID
// unless the tests explicitly sets this flag to false
@ -287,7 +285,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
indexPatterns: string[];
limit: number;
}): Promise<SafeResolverEvent[]> => {
if (throwError && ids.includes(firstLoadingNodeInListID)) {
if (throwError) {
throw new Error(
'simulated error for retrieving first loading node in the process node list'
);
@ -307,7 +305,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
filters: {},
});
await findAndClickFirstLoadingNodeInPanel(simulator);
foundLoadingNodeInList = await findAndClickFirstLoadingNodeInPanel(simulator);
});
it('should receive an error while loading the node data', async () => {
@ -315,7 +313,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
await expect(
simulator.map(async () => ({
nodeState: await identifiedLoadingNodeInGraphState(simulator),
nodeState: await identifiedLoadingNodeInGraphState(simulator, foundLoadingNodeInList),
}))
).toYieldEqualTo({
nodeState: 'Error Process',
@ -326,13 +324,13 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
beforeEach(async () => {
throwError = true;
// ensure that the node is in view
await identifiedLoadingNodeInGraph(simulator);
await identifiedLoadingNodeInGraph(simulator, foundLoadingNodeInList);
// at this point the node's state should be error
// don't throw an error now, so we can test that the reload button actually loads the data correctly
throwError = false;
const firstLoadingNodeInListButton = await simulator.resolveWrapper(() =>
simulator.processNodePrimaryButton(firstLoadingNodeInListID)
simulator.processNodePrimaryButton(foundLoadingNodeInList)
);
// Click the primary button to reload the node's data
if (firstLoadingNodeInListButton) {
@ -344,7 +342,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
// we should receive the node's data now so we'll know that it is terminated
await expect(
simulator.map(async () => ({
nodeState: await identifiedLoadingNodeInGraphState(simulator),
nodeState: await identifiedLoadingNodeInGraphState(simulator, foundLoadingNodeInList),
}))
).toYieldEqualTo({
nodeState: 'Terminated Process',
@ -354,6 +352,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
});
describe('when clicking on a node in the process panel that is not loaded', () => {
let foundLoadingNodeInList: string;
beforeEach(async () => {
simulator = new Simulator({
databaseDocumentID,
@ -364,13 +363,13 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
filters: {},
});
await findAndClickFirstLoadingNodeInPanel(simulator);
foundLoadingNodeInList = await findAndClickFirstLoadingNodeInPanel(simulator);
});
it('should load the node data for the process and mark the process node as terminated in the graph', async () => {
await expect(
simulator.map(async () => ({
nodeState: await identifiedLoadingNodeInGraphState(simulator),
nodeState: await identifiedLoadingNodeInGraphState(simulator, foundLoadingNodeInList),
}))
).toYieldEqualTo({
nodeState: 'Terminated Process',
@ -380,7 +379,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
describe('when finishing the navigation to the node that is not loaded and navigating back to the process list in the panel', () => {
beforeEach(async () => {
// make sure the node is in view
await identifiedLoadingNodeInGraph(simulator);
await identifiedLoadingNodeInGraph(simulator, foundLoadingNodeInList);
const breadcrumbs = await simulator.resolve(
'resolver:node-detail:breadcrumbs:node-list-link'
@ -397,9 +396,7 @@ describe('Resolver, when using a generated tree with 20 generations, 4 children
// grab the node in the list that has the ID that we're looking for
return (
(await simulator.resolve('resolver:node-list:node-link'))
?.findWhere(
(wrapper) => wrapper.prop('data-test-node-id') === firstLoadingNodeInListID
)
?.findWhere((wrapper) => wrapper.prop('data-test-node-id') === foundLoadingNodeInList)
?.first()
// grab the description tag so we can determine the state of the process
.find('desc')