mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
## Summary * Fixes a critical and blocking bug where we were querying against _ALL_ ML Jobs. Now we _only_ query for jobs that have the SIEM Group tag. * Fixes a critical and blocking bug where `ui/chrome` getBasePath() was not being added in the fetch API areas * Fixes a critical and blocking bug where we were querying `influencers` rather than `criteriaFields` when showing scores within the details pages for the "Max Anomaly Jobs". This caused missing jobs from the details and incorrect results. * Fixes a critical and blocking bug where we were not using `isInitialized` from the URL loading in which case we could be loading a slightly different time range or give incorrect results. * Fixes a critical and blocking bug where we were not filtering on `source` vs `destination` on the IP details page. Instead we were querying for everything and then filtering to either of the two as in a "source OR destination". Now instead, we only show what the user selects. * Fixes an embarrassing and potentially critical bug where React Router was warning about the usage of React.Memo. Instead I swapped back to using recompose pure so that we do not see warnings about the `ml-hosts` and `ml-network` routes. ### Checklist Use ~~strikethroughs~~ to remove checklist items you don't feel are applicable to this PR. - [ ] This was checked for cross-browser compatibility, [including a check against IE11](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#cross-browser-compatibility) ~- [ ] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/master/packages/kbn-i18n/README.md)~ ~- [ ] [Documentation](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#writing-documentation) was added for features that require explanation or tutorials~ - [x] [Unit or functional tests](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#cross-browser-compatibility) were updated or added to match the most common scenarios ~- [ ] This was checked for [keyboard-only and screenreader accessibility](https://developer.mozilla.org/en-US/docs/Learn/Tools_and_testing/Cross_browser_testing/Accessibility#Accessibility_testing_checklist)~ ### For maintainers - [x] This was checked for breaking API changes and was [labeled appropriately](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#release-notes-process) - [x] This includes a feature addition or change that requires a release note and was [labeled appropriately](https://github.com/elastic/kibana/blob/master/CONTRIBUTING.md#release-notes-process)
This commit is contained in:
parent
d5fb64570b
commit
63a82bbeee
26 changed files with 524 additions and 86 deletions
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import * as React from 'react';
|
||||
import { InfluencerInput, Anomalies } from '../types';
|
||||
import { InfluencerInput, Anomalies, CriteriaFields } from '../types';
|
||||
import { useAnomaliesTableData } from './use_anomalies_table_data';
|
||||
|
||||
interface ChildrenArgs {
|
||||
|
@ -14,18 +14,22 @@ interface ChildrenArgs {
|
|||
}
|
||||
|
||||
interface Props {
|
||||
influencers: InfluencerInput[] | null;
|
||||
influencers?: InfluencerInput[];
|
||||
startDate: number;
|
||||
endDate: number;
|
||||
criteriaFields?: CriteriaFields[];
|
||||
children: (args: ChildrenArgs) => React.ReactNode;
|
||||
skip: boolean;
|
||||
}
|
||||
|
||||
export const AnomalyTableProvider = React.memo<Props>(
|
||||
({ influencers, startDate, endDate, children }) => {
|
||||
({ influencers, startDate, endDate, children, criteriaFields, skip }) => {
|
||||
const [isLoadingAnomaliesData, anomaliesData] = useAnomaliesTableData({
|
||||
criteriaFields,
|
||||
influencers,
|
||||
startDate,
|
||||
endDate,
|
||||
skip,
|
||||
});
|
||||
return <>{children({ isLoadingAnomaliesData, anomaliesData })}</>;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import { InfluencerInput } from '../types';
|
||||
import { influencersToString, getThreshold } from './use_anomalies_table_data';
|
||||
import { influencersOrCriteriaToString, getThreshold } from './use_anomalies_table_data';
|
||||
import { AppKibanaFrameworkAdapter } from '../../../lib/adapters/framework/kibana_framework_adapter';
|
||||
|
||||
describe('use_anomalies_table_data', () => {
|
||||
|
@ -16,7 +16,7 @@ describe('use_anomalies_table_data', () => {
|
|||
fieldValue: 'value-1',
|
||||
},
|
||||
];
|
||||
const influencerString = influencersToString(influencers);
|
||||
const influencerString = influencersOrCriteriaToString(influencers);
|
||||
expect(influencerString).toEqual('field-1:value-1');
|
||||
});
|
||||
|
||||
|
@ -31,18 +31,13 @@ describe('use_anomalies_table_data', () => {
|
|||
fieldValue: 'value-2',
|
||||
},
|
||||
];
|
||||
const influencerString = influencersToString(influencers);
|
||||
const influencerString = influencersOrCriteriaToString(influencers);
|
||||
expect(influencerString).toEqual('field-1:value-1field-2:value-2');
|
||||
});
|
||||
|
||||
test('should return an empty string when the array is empty', () => {
|
||||
const influencers: InfluencerInput[] = [];
|
||||
const influencerString = influencersToString(influencers);
|
||||
expect(influencerString).toEqual('');
|
||||
});
|
||||
|
||||
test('should return an empty string when passed null', () => {
|
||||
const influencerString = influencersToString(null);
|
||||
const influencerString = influencersOrCriteriaToString(influencers);
|
||||
expect(influencerString).toEqual('');
|
||||
});
|
||||
|
||||
|
|
|
@ -7,25 +7,29 @@
|
|||
import { useState, useEffect, useContext } from 'react';
|
||||
import moment from 'moment-timezone';
|
||||
import { anomaliesTableData } from '../api/anomalies_table_data';
|
||||
import { InfluencerInput, Anomalies } from '../types';
|
||||
import { InfluencerInput, Anomalies, CriteriaFields } from '../types';
|
||||
import {
|
||||
KibanaConfigContext,
|
||||
AppKibanaFrameworkAdapter,
|
||||
} from '../../../lib/adapters/framework/kibana_framework_adapter';
|
||||
import { hasMlUserPermissions } from '../permissions/has_ml_user_permissions';
|
||||
import { MlCapabilitiesContext } from '../permissions/ml_capabilities_provider';
|
||||
import { useSiemJobs } from '../../ml_popover/hooks/use_siem_jobs';
|
||||
|
||||
interface Args {
|
||||
influencers: InfluencerInput[] | null;
|
||||
influencers?: InfluencerInput[];
|
||||
endDate: number;
|
||||
startDate: number;
|
||||
threshold?: number;
|
||||
skip?: boolean;
|
||||
criteriaFields?: CriteriaFields[];
|
||||
}
|
||||
|
||||
type Return = [boolean, Anomalies | null];
|
||||
|
||||
export const influencersToString = (influencers: InfluencerInput[] | null): string =>
|
||||
export const influencersOrCriteriaToString = (
|
||||
influencers: InfluencerInput[] | CriteriaFields[]
|
||||
): string =>
|
||||
influencers == null
|
||||
? ''
|
||||
: influencers.reduce((accum, item) => `${accum}${item.fieldName}:${item.fieldValue}`, '');
|
||||
|
@ -58,28 +62,31 @@ export const getThreshold = (
|
|||
};
|
||||
|
||||
export const useAnomaliesTableData = ({
|
||||
influencers,
|
||||
criteriaFields = [],
|
||||
influencers = [],
|
||||
startDate,
|
||||
endDate,
|
||||
threshold = -1,
|
||||
skip = false,
|
||||
}: Args): Return => {
|
||||
const [tableData, setTableData] = useState<Anomalies | null>(null);
|
||||
const [, siemJobs] = useSiemJobs(true);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const config = useContext(KibanaConfigContext);
|
||||
const capabilities = useContext(MlCapabilitiesContext);
|
||||
const userPermissions = hasMlUserPermissions(capabilities);
|
||||
|
||||
const fetchFunc = async (
|
||||
influencersInput: InfluencerInput[] | null,
|
||||
influencersInput: InfluencerInput[],
|
||||
criteriaFieldsInput: CriteriaFields[],
|
||||
earliestMs: number,
|
||||
latestMs: number
|
||||
) => {
|
||||
if (userPermissions && influencersInput != null && !skip) {
|
||||
if (userPermissions && !skip && siemJobs.length > 0) {
|
||||
const data = await anomaliesTableData(
|
||||
{
|
||||
jobIds: [],
|
||||
criteriaFields: [],
|
||||
jobIds: siemJobs,
|
||||
criteriaFields: criteriaFieldsInput,
|
||||
aggregationInterval: 'auto',
|
||||
threshold: getThreshold(config, threshold),
|
||||
earliestMs,
|
||||
|
@ -105,8 +112,16 @@ export const useAnomaliesTableData = ({
|
|||
|
||||
useEffect(() => {
|
||||
setLoading(true);
|
||||
fetchFunc(influencers, startDate, endDate);
|
||||
}, [influencersToString(influencers), startDate, endDate, skip, userPermissions]);
|
||||
fetchFunc(influencers, criteriaFields, startDate, endDate);
|
||||
}, [
|
||||
influencersOrCriteriaToString(influencers),
|
||||
influencersOrCriteriaToString(criteriaFields),
|
||||
startDate,
|
||||
endDate,
|
||||
skip,
|
||||
userPermissions,
|
||||
siemJobs.join(),
|
||||
]);
|
||||
|
||||
return [loading, tableData];
|
||||
};
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
*/
|
||||
|
||||
import chrome from 'ui/chrome';
|
||||
import { Anomalies, InfluencerInput } from '../types';
|
||||
import { Anomalies, InfluencerInput, CriteriaFields } from '../types';
|
||||
import { throwIfNotOk } from './throw_if_not_ok';
|
||||
|
||||
export interface Body {
|
||||
jobIds: string[];
|
||||
criteriaFields: string[];
|
||||
criteriaFields: CriteriaFields[];
|
||||
influencers: InfluencerInput[];
|
||||
aggregationInterval: string;
|
||||
threshold: number;
|
||||
|
@ -31,7 +31,7 @@ export const anomaliesTableData = async (
|
|||
headers: Record<string, string | undefined>
|
||||
): Promise<Anomalies> => {
|
||||
try {
|
||||
const response = await fetch('/api/ml/results/anomalies_table_data', {
|
||||
const response = await fetch(`${chrome.getBasePath()}/api/ml/results/anomalies_table_data`, {
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify(body),
|
||||
|
|
|
@ -26,8 +26,9 @@ export const getMlCapabilities = async (
|
|||
headers: Record<string, string | undefined>
|
||||
): Promise<MlCapabilities> => {
|
||||
try {
|
||||
const response = await fetch('/api/ml/ml_capabilities', {
|
||||
const response = await fetch(`${chrome.getBasePath()}/api/ml/ml_capabilities`, {
|
||||
method: 'GET',
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
'kbn-system-api': 'true',
|
||||
'content-Type': 'application/json',
|
||||
|
|
|
@ -8,6 +8,7 @@ import React from 'react';
|
|||
|
||||
import { match as RouteMatch, Redirect, Route, Switch } from 'react-router-dom';
|
||||
import { QueryString } from 'ui/utils/query_string';
|
||||
import { pure } from 'recompose';
|
||||
import { addEntitiesToKql } from './add_entities_to_kql';
|
||||
import { replaceKQLParts } from './replace_kql_parts';
|
||||
import { emptyEntity, multipleEntities, getMultipleEntities } from './entity_helpers';
|
||||
|
@ -24,7 +25,7 @@ interface QueryStringType {
|
|||
timerange: string | null;
|
||||
}
|
||||
|
||||
export const MlHostConditionalContainer = React.memo<MlHostConditionalProps>(({ match }) => (
|
||||
export const MlHostConditionalContainer = pure<MlHostConditionalProps>(({ match }) => (
|
||||
<Switch>
|
||||
<Route
|
||||
strict
|
||||
|
|
|
@ -8,6 +8,7 @@ import React from 'react';
|
|||
|
||||
import { match as RouteMatch, Redirect, Route, Switch } from 'react-router-dom';
|
||||
import { QueryString } from 'ui/utils/query_string';
|
||||
import { pure } from 'recompose';
|
||||
import { addEntitiesToKql } from './add_entities_to_kql';
|
||||
import { replaceKQLParts } from './replace_kql_parts';
|
||||
import { emptyEntity, getMultipleEntities, multipleEntities } from './entity_helpers';
|
||||
|
@ -24,7 +25,7 @@ interface QueryStringType {
|
|||
timerange: string | null;
|
||||
}
|
||||
|
||||
export const MlNetworkConditionalContainer = React.memo<MlNetworkConditionalProps>(({ match }) => (
|
||||
export const MlNetworkConditionalContainer = pure<MlNetworkConditionalProps>(({ match }) => (
|
||||
<Switch>
|
||||
<Route
|
||||
strict
|
||||
|
|
|
@ -12,7 +12,6 @@ export const operators = ['and', 'or', 'not'];
|
|||
export const removeKqlVariablesUsingRegex = (expression: string) => {
|
||||
const myRegexp = /(\s+)*(and|or|not){0,1}(\s+)*([\w\.\-\[\]]+)\s*:\s*"(\$[\w\.\-\(\)\[\]]+\$)"(\s+)*(and|or|not){0,1}(\s+)*/g;
|
||||
return expression.replace(myRegexp, replacement);
|
||||
// return expression.replace(myRegexp, '');
|
||||
};
|
||||
|
||||
export const replacement = (match: string, ...parts: string[]): string => {
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getCriteriaFromHostType } from './get_criteria_from_host_type';
|
||||
import { HostsType } from '../../../store/hosts/model';
|
||||
|
||||
describe('get_criteria_from_host_type', () => {
|
||||
test('returns host names from criteria if the host type is details', () => {
|
||||
const criteria = getCriteriaFromHostType(HostsType.details, 'zeek-iowa');
|
||||
expect(criteria).toEqual([{ fieldName: 'host.name', fieldValue: 'zeek-iowa' }]);
|
||||
});
|
||||
|
||||
test('returns empty array from criteria if the host type is page but rather an empty array', () => {
|
||||
const criteria = getCriteriaFromHostType(HostsType.page, 'zeek-iowa');
|
||||
expect(criteria).toEqual([]);
|
||||
});
|
||||
|
||||
test('returns empty array from criteria if the host name is undefined and host type is details', () => {
|
||||
const criteria = getCriteriaFromHostType(HostsType.details, undefined);
|
||||
expect(criteria).toEqual([]);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { HostsType } from '../../../store/hosts/model';
|
||||
import { CriteriaFields } from '../types';
|
||||
|
||||
export const getCriteriaFromHostType = (
|
||||
type: HostsType,
|
||||
hostName: string | undefined
|
||||
): CriteriaFields[] => {
|
||||
if (type === HostsType.details && hostName != null) {
|
||||
return [{ fieldName: 'host.name', fieldValue: hostName }];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
};
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { getCriteriaFromNetworkType } from './get_criteria_from_network_type';
|
||||
import { NetworkType } from '../../../store/network/model';
|
||||
import { FlowTarget } from '../../../graphql/types';
|
||||
|
||||
describe('get_criteria_from_network_type', () => {
|
||||
test('returns network names from criteria if the network type is details and it is source', () => {
|
||||
const criteria = getCriteriaFromNetworkType(
|
||||
NetworkType.details,
|
||||
'127.0.0.1',
|
||||
FlowTarget.source
|
||||
);
|
||||
expect(criteria).toEqual([{ fieldName: 'source.ip', fieldValue: '127.0.0.1' }]);
|
||||
});
|
||||
|
||||
test('returns network names from criteria if the network type is details and it is destination', () => {
|
||||
const criteria = getCriteriaFromNetworkType(
|
||||
NetworkType.details,
|
||||
'127.0.0.1',
|
||||
FlowTarget.destination
|
||||
);
|
||||
expect(criteria).toEqual([{ fieldName: 'destination.ip', fieldValue: '127.0.0.1' }]);
|
||||
});
|
||||
|
||||
test('returns empty array if the network type is page', () => {
|
||||
const criteria = getCriteriaFromNetworkType(
|
||||
NetworkType.page,
|
||||
'127.0.0.1',
|
||||
FlowTarget.destination
|
||||
);
|
||||
expect(criteria).toEqual([]);
|
||||
});
|
||||
|
||||
test('returns empty array if flowTarget is missing', () => {
|
||||
const criteria = getCriteriaFromNetworkType(NetworkType.page, '127.0.0.1');
|
||||
expect(criteria).toEqual([]);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,27 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CriteriaFields } from '../types';
|
||||
import { NetworkType } from '../../../store/network/model';
|
||||
import { FlowTarget } from '../../../graphql/types';
|
||||
|
||||
export const getCriteriaFromNetworkType = (
|
||||
type: NetworkType,
|
||||
ip: string | undefined,
|
||||
flowTarget?: FlowTarget
|
||||
): CriteriaFields[] => {
|
||||
if (type === NetworkType.details && ip != null) {
|
||||
if (flowTarget === FlowTarget.source) {
|
||||
return [{ fieldName: 'source.ip', fieldValue: ip }];
|
||||
} else if (flowTarget === FlowTarget.destination) {
|
||||
return [{ fieldName: 'destination.ip', fieldValue: ip }];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
};
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { HostItem } from '../../../graphql/types';
|
||||
import { CriteriaFields } from '../types';
|
||||
import { hostToCriteria } from './host_to_criteria';
|
||||
|
||||
describe('host_to_criteria', () => {
|
||||
test('converts a host to a criteria', () => {
|
||||
const hostItem: HostItem = {
|
||||
host: {
|
||||
name: ['host-name'],
|
||||
},
|
||||
};
|
||||
const expectedCriteria: CriteriaFields[] = [
|
||||
{
|
||||
fieldName: 'host.name',
|
||||
fieldValue: 'host-name',
|
||||
},
|
||||
];
|
||||
expect(hostToCriteria(hostItem)).toEqual(expectedCriteria);
|
||||
});
|
||||
|
||||
test('returns an empty array if the host.name is null', () => {
|
||||
const hostItem: HostItem = {
|
||||
host: {
|
||||
name: null,
|
||||
},
|
||||
};
|
||||
expect(hostToCriteria(hostItem)).toEqual([]);
|
||||
});
|
||||
|
||||
test('returns an empty array if the host is null', () => {
|
||||
const hostItem: HostItem = {
|
||||
host: null,
|
||||
};
|
||||
expect(hostToCriteria(hostItem)).toEqual([]);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CriteriaFields } from '../types';
|
||||
import { HostItem } from '../../../graphql/types';
|
||||
|
||||
export const hostToCriteria = (hostItem: HostItem): CriteriaFields[] => {
|
||||
if (hostItem.host != null && hostItem.host.name != null) {
|
||||
const criteria: CriteriaFields[] = [
|
||||
{
|
||||
fieldName: 'host.name',
|
||||
fieldValue: hostItem.host.name[0],
|
||||
},
|
||||
];
|
||||
return criteria;
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
};
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { FlowTarget } from '../../../graphql/types';
|
||||
import { CriteriaFields } from '../types';
|
||||
import { networkToCriteria } from './network_to_criteria';
|
||||
|
||||
describe('network_to_criteria', () => {
|
||||
test('converts a network to a criteria of source if given a source', () => {
|
||||
const expectedCriteria: CriteriaFields[] = [
|
||||
{
|
||||
fieldName: 'source.ip',
|
||||
fieldValue: '127.0.0.1',
|
||||
},
|
||||
];
|
||||
expect(networkToCriteria('127.0.0.1', FlowTarget.source)).toEqual(expectedCriteria);
|
||||
});
|
||||
|
||||
test('converts a network to a criteria of destination if given a destination', () => {
|
||||
const expectedCriteria: CriteriaFields[] = [
|
||||
{
|
||||
fieldName: 'destination.ip',
|
||||
fieldValue: '127.0.0.1',
|
||||
},
|
||||
];
|
||||
expect(networkToCriteria('127.0.0.1', FlowTarget.destination)).toEqual(expectedCriteria);
|
||||
});
|
||||
|
||||
test('returns an empty array if the Flow Type is anything else', () => {
|
||||
expect(networkToCriteria('127.0.0.1', FlowTarget.server)).toEqual([]);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CriteriaFields } from '../types';
|
||||
import { FlowTarget } from '../../../graphql/types';
|
||||
|
||||
export const networkToCriteria = (ip: string, flowTarget: FlowTarget): CriteriaFields[] => {
|
||||
if (flowTarget === FlowTarget.source) {
|
||||
return [{ fieldName: 'source.ip', fieldValue: ip }];
|
||||
} else if (flowTarget === FlowTarget.destination) {
|
||||
return [{ fieldName: 'destination.ip', fieldValue: ip }];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
};
|
|
@ -16,11 +16,12 @@ import { BackgroundRefetch, BasicTableContainer } from '../../load_more_table';
|
|||
import { LoadingPanel } from '../../loading';
|
||||
import { getIntervalFromAnomalies } from '../anomaly/get_interval_from_anomalies';
|
||||
import { getSizeFromAnomalies } from '../anomaly/get_size_from_anomalies';
|
||||
import { dateTimesAreEqual } from './date_time_equality';
|
||||
import { AnomaliesHostTableProps } from '../types';
|
||||
import { hasMlUserPermissions } from '../permissions/has_ml_user_permissions';
|
||||
import { MlCapabilitiesContext } from '../permissions/ml_capabilities_provider';
|
||||
import { BasicTable } from './basic_table';
|
||||
import { hostEquality } from './host_equality';
|
||||
import { getCriteriaFromHostType } from '../criteria/get_criteria_from_host_type';
|
||||
|
||||
const sorting = {
|
||||
sort: {
|
||||
|
@ -33,10 +34,10 @@ export const AnomaliesHostTable = React.memo<AnomaliesHostTableProps>(
|
|||
({ startDate, endDate, narrowDateRange, hostName, skip, type }): JSX.Element | null => {
|
||||
const capabilities = useContext(MlCapabilitiesContext);
|
||||
const [loading, tableData] = useAnomaliesTableData({
|
||||
influencers: [],
|
||||
startDate,
|
||||
endDate,
|
||||
skip,
|
||||
criteriaFields: getCriteriaFromHostType(type, hostName),
|
||||
});
|
||||
|
||||
const hosts = convertAnomaliesToHosts(tableData, hostName);
|
||||
|
@ -84,5 +85,5 @@ export const AnomaliesHostTable = React.memo<AnomaliesHostTableProps>(
|
|||
);
|
||||
}
|
||||
},
|
||||
dateTimesAreEqual
|
||||
hostEquality
|
||||
);
|
||||
|
|
|
@ -17,10 +17,11 @@ import { AnomaliesNetworkTableProps } from '../types';
|
|||
import { getAnomaliesNetworkTableColumnsCurated } from './get_anomalies_network_table_columns';
|
||||
import { getIntervalFromAnomalies } from '../anomaly/get_interval_from_anomalies';
|
||||
import { getSizeFromAnomalies } from '../anomaly/get_size_from_anomalies';
|
||||
import { dateTimesAreEqual } from './date_time_equality';
|
||||
import { hasMlUserPermissions } from '../permissions/has_ml_user_permissions';
|
||||
import { MlCapabilitiesContext } from '../permissions/ml_capabilities_provider';
|
||||
import { BasicTable } from './basic_table';
|
||||
import { networkEquality } from './network_equality';
|
||||
import { getCriteriaFromNetworkType } from '../criteria/get_criteria_from_network_type';
|
||||
|
||||
const sorting = {
|
||||
sort: {
|
||||
|
@ -30,13 +31,13 @@ const sorting = {
|
|||
};
|
||||
|
||||
export const AnomaliesNetworkTable = React.memo<AnomaliesNetworkTableProps>(
|
||||
({ startDate, endDate, narrowDateRange, skip, ip, type }): JSX.Element | null => {
|
||||
({ startDate, endDate, narrowDateRange, skip, ip, type, flowTarget }): JSX.Element | null => {
|
||||
const capabilities = useContext(MlCapabilitiesContext);
|
||||
const [loading, tableData] = useAnomaliesTableData({
|
||||
influencers: [],
|
||||
startDate,
|
||||
endDate,
|
||||
skip,
|
||||
criteriaFields: getCriteriaFromNetworkType(type, ip, flowTarget),
|
||||
});
|
||||
|
||||
const networks = convertAnomaliesToNetwork(tableData, ip);
|
||||
|
@ -89,5 +90,5 @@ export const AnomaliesNetworkTable = React.memo<AnomaliesNetworkTableProps>(
|
|||
);
|
||||
}
|
||||
},
|
||||
dateTimesAreEqual
|
||||
networkEquality
|
||||
);
|
||||
|
|
|
@ -4,109 +4,122 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { dateTimesAreEqual } from './date_time_equality';
|
||||
import { HostOrNetworkProps } from '../types';
|
||||
import { hostEquality } from './host_equality';
|
||||
import { AnomaliesHostTableProps } from '../types';
|
||||
import { HostsType } from '../../../store/hosts/model';
|
||||
|
||||
describe('date_time_equality', () => {
|
||||
describe('host_equality', () => {
|
||||
test('it returns true if start and end date are equal', () => {
|
||||
const prev: HostOrNetworkProps = {
|
||||
const prev: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const next: HostOrNetworkProps = {
|
||||
const next: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const equal = dateTimesAreEqual(prev, next);
|
||||
const equal = hostEquality(prev, next);
|
||||
expect(equal).toEqual(true);
|
||||
});
|
||||
|
||||
test('it returns false if starts are not equal', () => {
|
||||
const prev: HostOrNetworkProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('1999').valueOf(),
|
||||
const prev: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2001').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const next: HostOrNetworkProps = {
|
||||
const next: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const equal = dateTimesAreEqual(prev, next);
|
||||
const equal = hostEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if starts are not equal for next', () => {
|
||||
const prev: HostOrNetworkProps = {
|
||||
const prev: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const next: HostOrNetworkProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('1999').valueOf(),
|
||||
const next: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2001').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const equal = dateTimesAreEqual(prev, next);
|
||||
const equal = hostEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if ends are not equal', () => {
|
||||
const prev: HostOrNetworkProps = {
|
||||
const prev: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2001').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const next: HostOrNetworkProps = {
|
||||
const next: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const equal = dateTimesAreEqual(prev, next);
|
||||
const equal = hostEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if ends are not equal for next', () => {
|
||||
const prev: HostOrNetworkProps = {
|
||||
const prev: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const next: HostOrNetworkProps = {
|
||||
const next: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2001').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const equal = dateTimesAreEqual(prev, next);
|
||||
const equal = hostEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if skip is not equal', () => {
|
||||
const prev: HostOrNetworkProps = {
|
||||
const prev: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: true,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const next: HostOrNetworkProps = {
|
||||
const next: AnomaliesHostTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: HostsType.details,
|
||||
};
|
||||
const equal = dateTimesAreEqual(prev, next);
|
||||
const equal = hostEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
});
|
|
@ -4,11 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { HostOrNetworkProps } from '../types';
|
||||
import { AnomaliesHostTableProps } from '../types';
|
||||
|
||||
export const dateTimesAreEqual = (
|
||||
prevProps: HostOrNetworkProps,
|
||||
nextProps: HostOrNetworkProps
|
||||
export const hostEquality = (
|
||||
prevProps: AnomaliesHostTableProps,
|
||||
nextProps: AnomaliesHostTableProps
|
||||
): boolean =>
|
||||
prevProps.startDate === nextProps.startDate &&
|
||||
prevProps.endDate === nextProps.endDate &&
|
|
@ -0,0 +1,147 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { networkEquality } from './network_equality';
|
||||
import { AnomaliesNetworkTableProps } from '../types';
|
||||
import { NetworkType } from '../../../store/network/model';
|
||||
import { FlowTarget } from '../../../graphql/types';
|
||||
|
||||
describe('network_equality', () => {
|
||||
test('it returns true if start and end date are equal', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(true);
|
||||
});
|
||||
|
||||
test('it returns false if starts are not equal', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2001').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if starts are not equal for next', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2001').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if ends are not equal', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2001').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if ends are not equal for next', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2001').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if skip is not equal', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: true,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
|
||||
test('it returns false if flowType is not equal', () => {
|
||||
const prev: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: true,
|
||||
type: NetworkType.details,
|
||||
flowTarget: FlowTarget.source,
|
||||
};
|
||||
const next: AnomaliesNetworkTableProps = {
|
||||
startDate: new Date('2000').valueOf(),
|
||||
endDate: new Date('2000').valueOf(),
|
||||
narrowDateRange: jest.fn(),
|
||||
skip: false,
|
||||
type: NetworkType.details,
|
||||
flowTarget: FlowTarget.destination,
|
||||
};
|
||||
const equal = networkEquality(prev, next);
|
||||
expect(equal).toEqual(false);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { AnomaliesNetworkTableProps } from '../types';
|
||||
|
||||
export const networkEquality = (
|
||||
prevProps: AnomaliesNetworkTableProps,
|
||||
nextProps: AnomaliesNetworkTableProps
|
||||
): boolean =>
|
||||
prevProps.startDate === nextProps.startDate &&
|
||||
prevProps.endDate === nextProps.endDate &&
|
||||
prevProps.skip === nextProps.skip &&
|
||||
prevProps.flowTarget === nextProps.flowTarget;
|
|
@ -6,6 +6,7 @@
|
|||
|
||||
import { HostsType } from '../../store/hosts/model';
|
||||
import { NetworkType } from '../../store/network/model';
|
||||
import { FlowTarget } from '../../graphql/types';
|
||||
|
||||
export interface Influencer {
|
||||
influencer_field_name: string;
|
||||
|
@ -39,6 +40,11 @@ export interface Influencer {
|
|||
influencer_field_values: string[];
|
||||
}
|
||||
|
||||
export interface CriteriaFields {
|
||||
fieldName: string;
|
||||
fieldValue: string;
|
||||
}
|
||||
|
||||
export interface InfluencerInput {
|
||||
fieldName: string;
|
||||
fieldValue: string;
|
||||
|
@ -91,6 +97,7 @@ export type AnomaliesHostTableProps = HostOrNetworkProps & {
|
|||
export type AnomaliesNetworkTableProps = HostOrNetworkProps & {
|
||||
ip?: string;
|
||||
type: NetworkType;
|
||||
flowTarget?: FlowTarget;
|
||||
};
|
||||
|
||||
export interface MlCapabilities {
|
||||
|
|
|
@ -33,7 +33,7 @@ const emptyJob: Job[] = [];
|
|||
*/
|
||||
export const groupsData = async (headers: Record<string, string | undefined>): Promise<Group[]> => {
|
||||
try {
|
||||
const response = await fetch('/api/ml/jobs/groups', {
|
||||
const response = await fetch(`${chrome.getBasePath()}/api/ml/jobs/groups`, {
|
||||
method: 'GET',
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
|
@ -104,7 +104,7 @@ export const startDatafeeds = async (
|
|||
headers: Record<string, string | undefined>
|
||||
): Promise<StartDatafeedResponse> => {
|
||||
try {
|
||||
const response = await fetch('/api/ml/jobs/force_start_datafeeds', {
|
||||
const response = await fetch(`${chrome.getBasePath()}/api/ml/jobs/force_start_datafeeds`, {
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify({
|
||||
|
@ -136,25 +136,28 @@ export const stopDatafeeds = async (
|
|||
headers: Record<string, string | undefined>
|
||||
): Promise<[StopDatafeedResponse, CloseJobsResponse]> => {
|
||||
try {
|
||||
const stopDatafeedsResponse = await fetch('/api/ml/jobs/stop_datafeeds', {
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify({
|
||||
datafeedIds,
|
||||
}),
|
||||
headers: {
|
||||
'kbn-system-api': 'true',
|
||||
'content-type': 'application/json',
|
||||
'kbn-xsrf': chrome.getXsrfToken(),
|
||||
...headers,
|
||||
},
|
||||
});
|
||||
const stopDatafeedsResponse = await fetch(
|
||||
`${chrome.getBasePath()}/api/ml/jobs/stop_datafeeds`,
|
||||
{
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify({
|
||||
datafeedIds,
|
||||
}),
|
||||
headers: {
|
||||
'kbn-system-api': 'true',
|
||||
'content-type': 'application/json',
|
||||
'kbn-xsrf': chrome.getXsrfToken(),
|
||||
...headers,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
await throwIfNotOk(stopDatafeedsResponse);
|
||||
const stopDatafeedsResponseJson = await stopDatafeedsResponse.json();
|
||||
|
||||
const datafeedPrefix = 'datafeed-';
|
||||
const closeJobsResponse = await fetch('/api/ml/jobs/close_jobs', {
|
||||
const closeJobsResponse = await fetch(`${chrome.getBasePath()}/api/ml/jobs/close_jobs`, {
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify({
|
||||
|
@ -191,7 +194,7 @@ export const jobsSummary = async (
|
|||
headers: Record<string, string | undefined>
|
||||
): Promise<Job[]> => {
|
||||
try {
|
||||
const response = await fetch('/api/ml/jobs/jobs_summary', {
|
||||
const response = await fetch(`${chrome.getBasePath()}/api/ml/jobs/jobs_summary`, {
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify({ jobIds }),
|
||||
|
|
|
@ -38,12 +38,12 @@ import { HostsEmptyPage } from './hosts_empty_page';
|
|||
import { HostsKql } from './kql';
|
||||
import * as i18n from './translations';
|
||||
import { AnomalyTableProvider } from '../../components/ml/anomaly/anomaly_table_provider';
|
||||
import { hostToInfluencers } from '../../components/ml/influencers/host_to_influencers';
|
||||
import { setAbsoluteRangeDatePicker as dispatchAbsoluteRangeDatePicker } from '../../store/inputs/actions';
|
||||
import { InputsModelId } from '../../store/inputs/constants';
|
||||
import { scoreIntervalToDateTime } from '../../components/ml/score/score_interval_to_datetime';
|
||||
import { KpiHostDetailsQuery } from '../../containers/kpi_host_details';
|
||||
import { AnomaliesHostTable } from '../../components/ml/tables/anomalies_host_table';
|
||||
import { hostToCriteria } from '../../components/ml/criteria/host_to_criteria';
|
||||
|
||||
const type = hostsModel.HostsType.details;
|
||||
|
||||
|
@ -101,9 +101,10 @@ const HostDetailsComponent = pure<HostDetailsComponentProps>(
|
|||
>
|
||||
{({ hostOverview, loading, id, inspect, refetch }) => (
|
||||
<AnomalyTableProvider
|
||||
influencers={hostToInfluencers(hostOverview)}
|
||||
criteriaFields={hostToCriteria(hostOverview)}
|
||||
startDate={from}
|
||||
endDate={to}
|
||||
skip={isInitializing}
|
||||
>
|
||||
{({ isLoadingAnomaliesData, anomaliesData }) => (
|
||||
<HostOverviewManage
|
||||
|
|
|
@ -39,10 +39,10 @@ import { NetworkKql } from './kql';
|
|||
import { NetworkEmptyPage } from './network_empty_page';
|
||||
import * as i18n from './translations';
|
||||
import { AnomalyTableProvider } from '../../components/ml/anomaly/anomaly_table_provider';
|
||||
import { networkToInfluencers } from '../../components/ml/influencers/network_to_influencers';
|
||||
import { InputsModelId } from '../../store/inputs/constants';
|
||||
import { scoreIntervalToDateTime } from '../../components/ml/score/score_interval_to_datetime';
|
||||
import { AnomaliesNetworkTable } from '../../components/ml/tables/anomalies_network_table';
|
||||
import { networkToCriteria } from '../../components/ml/criteria/network_to_criteria';
|
||||
|
||||
const DomainsTableManage = manageQuery(DomainsTable);
|
||||
const TlsTableManage = manageQuery(TlsTable);
|
||||
|
@ -102,9 +102,10 @@ export const IPDetailsComponent = pure<IPDetailsComponentProps>(
|
|||
>
|
||||
{({ id, inspect, ipOverviewData, loading, refetch }) => (
|
||||
<AnomalyTableProvider
|
||||
influencers={networkToInfluencers(ip)}
|
||||
criteriaFields={networkToCriteria(ip, flowTarget)}
|
||||
startDate={from}
|
||||
endDate={to}
|
||||
skip={isInitializing}
|
||||
>
|
||||
{({ isLoadingAnomaliesData, anomaliesData }) => (
|
||||
<IpOverviewManage
|
||||
|
@ -261,6 +262,7 @@ export const IPDetailsComponent = pure<IPDetailsComponentProps>(
|
|||
skip={isInitializing}
|
||||
ip={ip}
|
||||
type={networkModel.NetworkType.details}
|
||||
flowTarget={flowTarget}
|
||||
narrowDateRange={(score, interval) => {
|
||||
const fromTo = scoreIntervalToDateTime(score, interval);
|
||||
setAbsoluteRangeDatePicker({
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue