mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[Discover] Improve context code (#114284)
This commit is contained in:
parent
9e65b12c4b
commit
e1133e11ac
16 changed files with 341 additions and 563 deletions
|
@ -72,6 +72,8 @@ const indexPattern = {
|
|||
getFieldByName: (name: string) => fields.getByName(name),
|
||||
timeFieldName: 'timestamp',
|
||||
getFormatterForField: () => ({ convert: () => 'formatted' }),
|
||||
isTimeNanosBased: () => false,
|
||||
popularizeField: () => {},
|
||||
} as unknown as IndexPattern;
|
||||
|
||||
indexPattern.flattenHit = indexPatterns.flattenHitWrapper(indexPattern, indexPattern.metaFields);
|
||||
|
|
|
@ -26,7 +26,6 @@ const mockNavigationPlugin = { ui: { TopNavMenu: mockTopNavMenu } };
|
|||
describe('ContextApp test', () => {
|
||||
const defaultProps = {
|
||||
indexPattern: indexPatternMock,
|
||||
indexPatternId: 'the-index-pattern-id',
|
||||
anchorId: 'mocked_anchor_id',
|
||||
};
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import classNames from 'classnames';
|
|||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { EuiText, EuiPageContent, EuiPage, EuiSpacer } from '@elastic/eui';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { esFilters, SortDirection } from '../../../../../data/public';
|
||||
import { esFilters } from '../../../../../data/public';
|
||||
import { DOC_TABLE_LEGACY, SEARCH_FIELDS_FROM_SOURCE } from '../../../../common';
|
||||
import { ContextErrorMessage } from './components/context_error_message';
|
||||
import { IndexPattern, IndexPatternField } from '../../../../../data/common';
|
||||
|
@ -31,21 +31,20 @@ const ContextAppContentMemoized = memo(ContextAppContent);
|
|||
|
||||
export interface ContextAppProps {
|
||||
indexPattern: IndexPattern;
|
||||
indexPatternId: string;
|
||||
anchorId: string;
|
||||
}
|
||||
|
||||
export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAppProps) => {
|
||||
export const ContextApp = ({ indexPattern, anchorId }: ContextAppProps) => {
|
||||
const services = getServices();
|
||||
const { uiSettings: config, capabilities, indexPatterns, navigation, filterManager } = services;
|
||||
const { uiSettings, capabilities, indexPatterns, navigation, filterManager } = services;
|
||||
|
||||
const isLegacy = useMemo(() => config.get(DOC_TABLE_LEGACY), [config]);
|
||||
const useNewFieldsApi = useMemo(() => !config.get(SEARCH_FIELDS_FROM_SOURCE), [config]);
|
||||
const isLegacy = useMemo(() => uiSettings.get(DOC_TABLE_LEGACY), [uiSettings]);
|
||||
const useNewFieldsApi = useMemo(() => !uiSettings.get(SEARCH_FIELDS_FROM_SOURCE), [uiSettings]);
|
||||
|
||||
/**
|
||||
* Context app state
|
||||
*/
|
||||
const { appState, setAppState } = useContextAppState({ indexPattern, services });
|
||||
const { appState, setAppState } = useContextAppState({ services });
|
||||
const prevAppState = useRef<AppState>();
|
||||
|
||||
/**
|
||||
|
@ -54,7 +53,6 @@ export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAp
|
|||
const { fetchedState, fetchContextRows, fetchAllRows, fetchSurroundingRows } = useContextAppFetch(
|
||||
{
|
||||
anchorId,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
appState,
|
||||
useNewFieldsApi,
|
||||
|
@ -79,7 +77,6 @@ export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAp
|
|||
prevAppState.current = cloneDeep(appState);
|
||||
}, [
|
||||
appState,
|
||||
indexPatternId,
|
||||
anchorId,
|
||||
fetchContextRows,
|
||||
fetchAllRows,
|
||||
|
@ -89,7 +86,7 @@ export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAp
|
|||
|
||||
const { columns, onAddColumn, onRemoveColumn, onSetColumns } = useDataGridColumns({
|
||||
capabilities,
|
||||
config,
|
||||
config: uiSettings,
|
||||
indexPattern,
|
||||
indexPatterns,
|
||||
state: appState,
|
||||
|
@ -112,7 +109,7 @@ export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAp
|
|||
field,
|
||||
values,
|
||||
operation,
|
||||
indexPatternId
|
||||
indexPattern.id!
|
||||
);
|
||||
filterManager.addFilters(newFilters);
|
||||
if (indexPatterns) {
|
||||
|
@ -120,7 +117,7 @@ export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAp
|
|||
await popularizeField(indexPattern, fieldName, indexPatterns, capabilities);
|
||||
}
|
||||
},
|
||||
[filterManager, indexPatternId, indexPatterns, indexPattern, capabilities]
|
||||
[filterManager, indexPatterns, indexPattern, capabilities]
|
||||
);
|
||||
|
||||
const TopNavMenu = navigation.ui.TopNavMenu;
|
||||
|
@ -166,7 +163,6 @@ export const ContextApp = ({ indexPattern, indexPatternId, anchorId }: ContextAp
|
|||
onAddColumn={onAddColumn}
|
||||
onRemoveColumn={onRemoveColumn}
|
||||
onSetColumns={onSetColumns}
|
||||
sort={appState.sort as [[string, SortDirection]]}
|
||||
predecessorCount={appState.predecessorCount}
|
||||
successorCount={appState.successorCount}
|
||||
setAppState={setAppState}
|
||||
|
|
|
@ -22,6 +22,7 @@ import { DiscoverServices } from '../../../build_services';
|
|||
import { MAX_CONTEXT_SIZE, MIN_CONTEXT_SIZE } from './utils/constants';
|
||||
import { DocTableContext } from '../main/components/doc_table/doc_table_context';
|
||||
import { EsHitRecordList } from '../../types';
|
||||
import { SortPairArr } from '../main/components/doc_table/lib/get_sort';
|
||||
|
||||
export interface ContextAppContentProps {
|
||||
columns: string[];
|
||||
|
@ -33,7 +34,6 @@ export interface ContextAppContentProps {
|
|||
predecessorCount: number;
|
||||
successorCount: number;
|
||||
rows: EsHitRecordList;
|
||||
sort: [[string, SortDirection]];
|
||||
predecessors: EsHitRecordList;
|
||||
successors: EsHitRecordList;
|
||||
anchorStatus: LoadingStatus;
|
||||
|
@ -65,7 +65,6 @@ export function ContextAppContent({
|
|||
predecessorCount,
|
||||
successorCount,
|
||||
rows,
|
||||
sort,
|
||||
predecessors,
|
||||
successors,
|
||||
anchorStatus,
|
||||
|
@ -111,6 +110,9 @@ export function ContextAppContent({
|
|||
},
|
||||
[setAppState]
|
||||
);
|
||||
const sort = useMemo(() => {
|
||||
return [[indexPattern.timeFieldName!, SortDirection.desc]];
|
||||
}, [indexPattern]);
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
|
@ -149,7 +151,7 @@ export function ContextAppContent({
|
|||
expandedDoc={expandedDoc}
|
||||
isLoading={isAnchorLoading}
|
||||
sampleSize={0}
|
||||
sort={sort}
|
||||
sort={sort as SortPairArr[]}
|
||||
isSortEnabled={false}
|
||||
showTimeCol={showTimeCol}
|
||||
services={services}
|
||||
|
|
|
@ -49,5 +49,5 @@ export function ContextAppRoute(props: ContextAppProps) {
|
|||
return <LoadingIndicator />;
|
||||
}
|
||||
|
||||
return <ContextApp indexPatternId={indexPatternId} anchorId={id} indexPattern={indexPattern} />;
|
||||
return <ContextApp anchorId={id} indexPattern={indexPattern} />;
|
||||
}
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
import sinon from 'sinon';
|
||||
import moment from 'moment';
|
||||
|
||||
import { IndexPatternsContract } from '../../../../../../data/public';
|
||||
import { EsHitRecordList } from '../../../types';
|
||||
|
||||
type SortHit = {
|
||||
|
@ -18,18 +17,6 @@ type SortHit = {
|
|||
sort: [number, number];
|
||||
};
|
||||
|
||||
export function createIndexPatternsStub() {
|
||||
return {
|
||||
get: sinon.spy((indexPatternId) =>
|
||||
Promise.resolve({
|
||||
id: indexPatternId,
|
||||
isTimeNanosBased: () => false,
|
||||
popularizeField: () => {},
|
||||
})
|
||||
),
|
||||
} as unknown as IndexPatternsContract;
|
||||
}
|
||||
|
||||
/**
|
||||
* A stubbed search source with a `fetch` method that returns all of `_stubHits`.
|
||||
*/
|
||||
|
|
|
@ -6,30 +6,29 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { EsQuerySortValue, SortDirection } from '../../../../../../data/public';
|
||||
import { createIndexPatternsStub, createSearchSourceStub } from './_stubs';
|
||||
import { fetchAnchorProvider, updateSearchSource } from './anchor';
|
||||
import { IndexPattern, SortDirection } from '../../../../../../data/public';
|
||||
import { createSearchSourceStub } from './_stubs';
|
||||
import { fetchAnchor, updateSearchSource } from './anchor';
|
||||
import { indexPatternMock } from '../../../../__mocks__/index_pattern';
|
||||
import { savedSearchMock } from '../../../../__mocks__/saved_search';
|
||||
import { EsHitRecord, EsHitRecordList } from '../../../types';
|
||||
import { EsHitRecordList } from '../../../types';
|
||||
|
||||
describe('context app', function () {
|
||||
let fetchAnchor: (
|
||||
indexPatternId: string,
|
||||
anchorId: string,
|
||||
sort: EsQuerySortValue[]
|
||||
) => Promise<EsHitRecord>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let searchSourceStub: any;
|
||||
const indexPattern = {
|
||||
id: 'INDEX_PATTERN_ID',
|
||||
isTimeNanosBased: () => false,
|
||||
popularizeField: () => {},
|
||||
} as unknown as IndexPattern;
|
||||
|
||||
describe('function fetchAnchor', function () {
|
||||
beforeEach(() => {
|
||||
searchSourceStub = createSearchSourceStub([{ _id: 'hit1' }] as unknown as EsHitRecordList);
|
||||
fetchAnchor = fetchAnchorProvider(createIndexPatternsStub(), searchSourceStub);
|
||||
});
|
||||
|
||||
it('should use the `fetch` method of the SearchSource', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -38,7 +37,7 @@ describe('context app', function () {
|
|||
});
|
||||
|
||||
it('should configure the SearchSource to not inherit from the implicit root', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -49,7 +48,7 @@ describe('context app', function () {
|
|||
});
|
||||
|
||||
it('should set the SearchSource index pattern', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -59,7 +58,7 @@ describe('context app', function () {
|
|||
});
|
||||
|
||||
it('should set the SearchSource version flag to true', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -70,7 +69,7 @@ describe('context app', function () {
|
|||
});
|
||||
|
||||
it('should set the SearchSource size to 1', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -81,7 +80,7 @@ describe('context app', function () {
|
|||
});
|
||||
|
||||
it('should set the SearchSource query to an ids query', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -103,7 +102,7 @@ describe('context app', function () {
|
|||
});
|
||||
|
||||
it('should set the SearchSource sort order', function () {
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
|
@ -145,7 +144,7 @@ describe('context app', function () {
|
|||
it('should reject with an error when no hits were found', function () {
|
||||
searchSourceStub._stubHits = [];
|
||||
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(
|
||||
|
@ -161,7 +160,7 @@ describe('context app', function () {
|
|||
it('should return the first hit after adding an anchor marker', function () {
|
||||
searchSourceStub._stubHits = [{ property1: 'value1' }, { property2: 'value2' }];
|
||||
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
return fetchAnchor('id', indexPattern, searchSourceStub, [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then((anchorDocument) => {
|
||||
|
@ -174,16 +173,18 @@ describe('context app', function () {
|
|||
describe('useNewFields API', () => {
|
||||
beforeEach(() => {
|
||||
searchSourceStub = createSearchSourceStub([{ _id: 'hit1' }] as unknown as EsHitRecordList);
|
||||
fetchAnchor = fetchAnchorProvider(createIndexPatternsStub(), searchSourceStub, true);
|
||||
});
|
||||
|
||||
it('should request fields if useNewFieldsApi set', function () {
|
||||
searchSourceStub._stubHits = [{ property1: 'value1' }, { property2: 'value2' }];
|
||||
|
||||
return fetchAnchor('INDEX_PATTERN_ID', 'id', [
|
||||
{ '@timestamp': SortDirection.desc },
|
||||
{ _doc: SortDirection.desc },
|
||||
]).then(() => {
|
||||
return fetchAnchor(
|
||||
'id',
|
||||
indexPattern,
|
||||
searchSourceStub,
|
||||
[{ '@timestamp': SortDirection.desc }, { _doc: SortDirection.desc }],
|
||||
true
|
||||
).then(() => {
|
||||
const setFieldsSpy = searchSourceStub.setField.withArgs('fields');
|
||||
const removeFieldsSpy = searchSourceStub.removeField.withArgs('fieldsFromSource');
|
||||
expect(setFieldsSpy.calledOnce).toBe(true);
|
||||
|
|
|
@ -6,46 +6,35 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import {
|
||||
ISearchSource,
|
||||
IndexPatternsContract,
|
||||
EsQuerySortValue,
|
||||
IndexPattern,
|
||||
} from '../../../../../../data/public';
|
||||
import { ISearchSource, EsQuerySortValue, IndexPattern } from '../../../../../../data/public';
|
||||
import { EsHitRecord } from '../../../types';
|
||||
|
||||
export function fetchAnchorProvider(
|
||||
indexPatterns: IndexPatternsContract,
|
||||
export async function fetchAnchor(
|
||||
anchorId: string,
|
||||
indexPattern: IndexPattern,
|
||||
searchSource: ISearchSource,
|
||||
sort: EsQuerySortValue[],
|
||||
useNewFieldsApi: boolean = false
|
||||
) {
|
||||
return async function fetchAnchor(
|
||||
indexPatternId: string,
|
||||
anchorId: string,
|
||||
sort: EsQuerySortValue[]
|
||||
): Promise<EsHitRecord> {
|
||||
const indexPattern = await indexPatterns.get(indexPatternId);
|
||||
updateSearchSource(searchSource, anchorId, sort, useNewFieldsApi, indexPattern);
|
||||
): Promise<EsHitRecord> {
|
||||
updateSearchSource(searchSource, anchorId, sort, useNewFieldsApi, indexPattern);
|
||||
|
||||
const response = await searchSource.fetch();
|
||||
const doc = get(response, ['hits', 'hits', 0]);
|
||||
const response = await searchSource.fetch();
|
||||
const doc = response.hits?.hits?.[0];
|
||||
|
||||
if (!doc) {
|
||||
throw new Error(
|
||||
i18n.translate('discover.context.failedToLoadAnchorDocumentErrorDescription', {
|
||||
defaultMessage: 'Failed to load anchor document.',
|
||||
})
|
||||
);
|
||||
}
|
||||
if (!doc) {
|
||||
throw new Error(
|
||||
i18n.translate('discover.context.failedToLoadAnchorDocumentErrorDescription', {
|
||||
defaultMessage: 'Failed to load anchor document.',
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
...doc,
|
||||
isAnchor: true,
|
||||
} as EsHitRecord;
|
||||
};
|
||||
return {
|
||||
...doc,
|
||||
isAnchor: true,
|
||||
} as EsHitRecord;
|
||||
}
|
||||
|
||||
export function updateSearchSource(
|
||||
|
|
|
@ -8,9 +8,9 @@
|
|||
|
||||
import moment from 'moment';
|
||||
import { get, last } from 'lodash';
|
||||
import { SortDirection } from 'src/plugins/data/common';
|
||||
import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
|
||||
import { fetchContextProvider, SurrDocType } from './context';
|
||||
import { IndexPattern, SortDirection } from 'src/plugins/data/common';
|
||||
import { createContextSearchSourceStub } from './_stubs';
|
||||
import { fetchSurroundingDocs, SurrDocType } from './context';
|
||||
import { setServices } from '../../../../kibana_services';
|
||||
import { Query } from '../../../../../../data/public';
|
||||
import { DiscoverServices } from '../../../../build_services';
|
||||
|
@ -30,9 +30,6 @@ interface Timestamp {
|
|||
|
||||
describe('context predecessors', function () {
|
||||
let fetchPredecessors: (
|
||||
indexPatternId: string,
|
||||
timeField: string,
|
||||
sortDir: SortDirection,
|
||||
timeValIso: string,
|
||||
timeValNr: number,
|
||||
tieBreakerField: string,
|
||||
|
@ -41,6 +38,12 @@ describe('context predecessors', function () {
|
|||
) => Promise<EsHitRecordList>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mockSearchSource: any;
|
||||
const indexPattern = {
|
||||
id: 'INDEX_PATTERN_ID',
|
||||
timeFieldName: '@timestamp',
|
||||
isTimeNanosBased: () => false,
|
||||
popularizeField: () => {},
|
||||
} as unknown as IndexPattern;
|
||||
|
||||
describe('function fetchPredecessors', function () {
|
||||
beforeEach(() => {
|
||||
|
@ -56,30 +59,20 @@ describe('context predecessors', function () {
|
|||
},
|
||||
} as unknown as DiscoverServices);
|
||||
|
||||
fetchPredecessors = (
|
||||
indexPatternId,
|
||||
timeField,
|
||||
sortDir,
|
||||
timeValIso,
|
||||
timeValNr,
|
||||
tieBreakerField,
|
||||
tieBreakerValue,
|
||||
size = 10
|
||||
) => {
|
||||
fetchPredecessors = (timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size = 10) => {
|
||||
const anchor = {
|
||||
_source: {
|
||||
[timeField]: timeValIso,
|
||||
[indexPattern.timeFieldName!]: timeValIso,
|
||||
},
|
||||
sort: [timeValNr, tieBreakerValue],
|
||||
};
|
||||
|
||||
return fetchContextProvider(createIndexPatternsStub()).fetchSurroundingDocs(
|
||||
return fetchSurroundingDocs(
|
||||
SurrDocType.PREDECESSORS,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
anchor as EsHitRecord,
|
||||
timeField,
|
||||
tieBreakerField,
|
||||
sortDir,
|
||||
SortDirection.desc,
|
||||
size,
|
||||
[]
|
||||
);
|
||||
|
@ -95,19 +88,12 @@ describe('context predecessors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 1000),
|
||||
];
|
||||
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits: EsHitRecordList) => {
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 3));
|
||||
});
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 3).then(
|
||||
(hits: EsHitRecordList) => {
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 3));
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () {
|
||||
|
@ -119,33 +105,26 @@ describe('context predecessors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 2990),
|
||||
];
|
||||
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
6
|
||||
).then((hits: EsHitRecordList) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: string) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) =>
|
||||
get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp'])
|
||||
);
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 6).then(
|
||||
(hits: EsHitRecordList) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: string) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) =>
|
||||
get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp'])
|
||||
);
|
||||
|
||||
expect(
|
||||
intervals.every(({ gte, lte }) => (gte && lte ? moment(gte).isBefore(lte) : true))
|
||||
).toBe(true);
|
||||
// should have started at the given time
|
||||
expect(intervals[0].gte).toEqual(moment(MS_PER_DAY * 3000).toISOString());
|
||||
// should have ended with a half-open interval
|
||||
expect(Object.keys(last(intervals) ?? {})).toEqual(['format', 'gte']);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
expect(
|
||||
intervals.every(({ gte, lte }) => (gte && lte ? moment(gte).isBefore(lte) : true))
|
||||
).toBe(true);
|
||||
// should have started at the given time
|
||||
expect(intervals[0].gte).toEqual(moment(MS_PER_DAY * 3000).toISOString());
|
||||
// should have ended with a half-open interval
|
||||
expect(Object.keys(last(intervals) ?? {})).toEqual(['format', 'gte']);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 3));
|
||||
});
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 3));
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should perform multiple queries until the expected hit count is returned', function () {
|
||||
|
@ -156,57 +135,41 @@ describe('context predecessors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 1000),
|
||||
];
|
||||
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_1000,
|
||||
MS_PER_DAY * 1000,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits: EsHitRecordList) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: string) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) => {
|
||||
return get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp']);
|
||||
});
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP_1000, MS_PER_DAY * 1000, '_doc', 0, 3).then(
|
||||
(hits: EsHitRecordList) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: string) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) => {
|
||||
return get(query, [
|
||||
'bool',
|
||||
'must',
|
||||
'constant_score',
|
||||
'filter',
|
||||
'range',
|
||||
'@timestamp',
|
||||
]);
|
||||
});
|
||||
|
||||
// should have started at the given time
|
||||
expect(intervals[0].gte).toEqual(moment(MS_PER_DAY * 1000).toISOString());
|
||||
// should have stopped before reaching MS_PER_DAY * 1700
|
||||
expect(moment(last(intervals)?.lte).valueOf()).toBeLessThan(MS_PER_DAY * 1700);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
});
|
||||
// should have started at the given time
|
||||
expect(intervals[0].gte).toEqual(moment(MS_PER_DAY * 1000).toISOString());
|
||||
// should have stopped before reaching MS_PER_DAY * 1700
|
||||
expect(moment(last(intervals)?.lte).valueOf()).toBeLessThan(MS_PER_DAY * 1700);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should return an empty array when no hits were found', function () {
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3,
|
||||
MS_PER_DAY * 3,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits: EsHitRecordList) => {
|
||||
expect(hits).toEqual([]);
|
||||
});
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP_3, MS_PER_DAY * 3, '_doc', 0, 3).then(
|
||||
(hits: EsHitRecordList) => {
|
||||
expect(hits).toEqual([]);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should configure the SearchSource to not inherit from the implicit root', function () {
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3,
|
||||
MS_PER_DAY * 3,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then(() => {
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP_3, MS_PER_DAY * 3, '_doc', 0, 3).then(() => {
|
||||
const setParentSpy = mockSearchSource.setParent;
|
||||
expect(setParentSpy.alwaysCalledWith(undefined)).toBe(true);
|
||||
expect(setParentSpy.called).toBe(true);
|
||||
|
@ -214,16 +177,7 @@ describe('context predecessors', function () {
|
|||
});
|
||||
|
||||
it('should set the tiebreaker sort order to the opposite as the time field', function () {
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP,
|
||||
MS_PER_DAY,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then(() => {
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP, MS_PER_DAY, '_doc', 0, 3).then(() => {
|
||||
expect(
|
||||
mockSearchSource.setField.calledWith('sort', [
|
||||
{ '@timestamp': { order: 'asc', format: 'strict_date_optional_time' } },
|
||||
|
@ -248,32 +202,23 @@ describe('context predecessors', function () {
|
|||
},
|
||||
} as unknown as DiscoverServices);
|
||||
|
||||
fetchPredecessors = (
|
||||
indexPatternId,
|
||||
timeField,
|
||||
sortDir,
|
||||
timeValIso,
|
||||
timeValNr,
|
||||
tieBreakerField,
|
||||
tieBreakerValue,
|
||||
size = 10
|
||||
) => {
|
||||
fetchPredecessors = (timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size = 10) => {
|
||||
const anchor = {
|
||||
_source: {
|
||||
[timeField]: timeValIso,
|
||||
[indexPattern.timeFieldName!]: timeValIso,
|
||||
},
|
||||
sort: [timeValNr, tieBreakerValue],
|
||||
};
|
||||
|
||||
return fetchContextProvider(createIndexPatternsStub(), true).fetchSurroundingDocs(
|
||||
return fetchSurroundingDocs(
|
||||
SurrDocType.PREDECESSORS,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
anchor as EsHitRecord,
|
||||
timeField,
|
||||
tieBreakerField,
|
||||
sortDir,
|
||||
SortDirection.desc,
|
||||
size,
|
||||
[]
|
||||
[],
|
||||
true
|
||||
);
|
||||
};
|
||||
});
|
||||
|
@ -287,23 +232,16 @@ describe('context predecessors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 1000),
|
||||
];
|
||||
|
||||
return fetchPredecessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits: EsHitRecordList) => {
|
||||
const setFieldsSpy = mockSearchSource.setField.withArgs('fields');
|
||||
const removeFieldsSpy = mockSearchSource.removeField.withArgs('fieldsFromSource');
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(removeFieldsSpy.calledOnce).toBe(true);
|
||||
expect(setFieldsSpy.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 3));
|
||||
});
|
||||
return fetchPredecessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 3).then(
|
||||
(hits: EsHitRecordList) => {
|
||||
const setFieldsSpy = mockSearchSource.setField.withArgs('fields');
|
||||
const removeFieldsSpy = mockSearchSource.removeField.withArgs('fieldsFromSource');
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(removeFieldsSpy.calledOnce).toBe(true);
|
||||
expect(setFieldsSpy.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 3));
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -8,11 +8,11 @@
|
|||
|
||||
import moment from 'moment';
|
||||
import { get, last } from 'lodash';
|
||||
import { SortDirection } from 'src/plugins/data/common';
|
||||
import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
|
||||
import { IndexPattern, SortDirection } from 'src/plugins/data/common';
|
||||
import { createContextSearchSourceStub } from './_stubs';
|
||||
import { setServices } from '../../../../kibana_services';
|
||||
import { Query } from '../../../../../../data/public';
|
||||
import { fetchContextProvider, SurrDocType } from './context';
|
||||
import { fetchSurroundingDocs, SurrDocType } from './context';
|
||||
import { DiscoverServices } from '../../../../build_services';
|
||||
import { EsHitRecord, EsHitRecordList } from '../../../types';
|
||||
|
||||
|
@ -29,9 +29,6 @@ interface Timestamp {
|
|||
|
||||
describe('context successors', function () {
|
||||
let fetchSuccessors: (
|
||||
indexPatternId: string,
|
||||
timeField: string,
|
||||
sortDir: SortDirection,
|
||||
timeValIso: string,
|
||||
timeValNr: number,
|
||||
tieBreakerField: string,
|
||||
|
@ -40,6 +37,12 @@ describe('context successors', function () {
|
|||
) => Promise<EsHitRecordList>;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mockSearchSource: any;
|
||||
const indexPattern = {
|
||||
id: 'INDEX_PATTERN_ID',
|
||||
timeFieldName: '@timestamp',
|
||||
isTimeNanosBased: () => false,
|
||||
popularizeField: () => {},
|
||||
} as unknown as IndexPattern;
|
||||
|
||||
describe('function fetchSuccessors', function () {
|
||||
beforeEach(() => {
|
||||
|
@ -55,30 +58,20 @@ describe('context successors', function () {
|
|||
},
|
||||
} as unknown as DiscoverServices);
|
||||
|
||||
fetchSuccessors = (
|
||||
indexPatternId,
|
||||
timeField,
|
||||
sortDir,
|
||||
timeValIso,
|
||||
timeValNr,
|
||||
tieBreakerField,
|
||||
tieBreakerValue,
|
||||
size
|
||||
) => {
|
||||
fetchSuccessors = (timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => {
|
||||
const anchor = {
|
||||
_source: {
|
||||
[timeField]: timeValIso,
|
||||
[indexPattern.timeFieldName!]: timeValIso,
|
||||
},
|
||||
sort: [timeValNr, tieBreakerValue],
|
||||
};
|
||||
|
||||
return fetchContextProvider(createIndexPatternsStub()).fetchSurroundingDocs(
|
||||
return fetchSurroundingDocs(
|
||||
SurrDocType.SUCCESSORS,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
anchor as EsHitRecord,
|
||||
timeField,
|
||||
tieBreakerField,
|
||||
sortDir,
|
||||
SortDirection.desc,
|
||||
size,
|
||||
[]
|
||||
);
|
||||
|
@ -94,19 +87,12 @@ describe('context successors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 3000 - 2),
|
||||
];
|
||||
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits) => {
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
});
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 3).then(
|
||||
(hits) => {
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () {
|
||||
|
@ -118,33 +104,26 @@ describe('context successors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 2990),
|
||||
];
|
||||
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
6
|
||||
).then((hits) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: [string]) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) =>
|
||||
get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp'])
|
||||
);
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 6).then(
|
||||
(hits) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: [string]) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) =>
|
||||
get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp'])
|
||||
);
|
||||
|
||||
expect(
|
||||
intervals.every(({ gte, lte }) => (gte && lte ? moment(gte).isBefore(lte) : true))
|
||||
).toBe(true);
|
||||
// should have started at the given time
|
||||
expect(intervals[0].lte).toEqual(moment(MS_PER_DAY * 3000).toISOString());
|
||||
// should have ended with a half-open interval
|
||||
expect(Object.keys(last(intervals) ?? {})).toEqual(['format', 'lte']);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
expect(
|
||||
intervals.every(({ gte, lte }) => (gte && lte ? moment(gte).isBefore(lte) : true))
|
||||
).toBe(true);
|
||||
// should have started at the given time
|
||||
expect(intervals[0].lte).toEqual(moment(MS_PER_DAY * 3000).toISOString());
|
||||
// should have ended with a half-open interval
|
||||
expect(Object.keys(last(intervals) ?? {})).toEqual(['format', 'lte']);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
});
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should perform multiple queries until the expected hit count is returned', function () {
|
||||
|
@ -157,58 +136,33 @@ describe('context successors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 1000),
|
||||
];
|
||||
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
4
|
||||
).then((hits) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: [string]) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) =>
|
||||
get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp'])
|
||||
);
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 4).then(
|
||||
(hits) => {
|
||||
const intervals: Timestamp[] = mockSearchSource.setField.args
|
||||
.filter(([property]: [string]) => property === 'query')
|
||||
.map(([, { query }]: [string, { query: Query }]) =>
|
||||
get(query, ['bool', 'must', 'constant_score', 'filter', 'range', '@timestamp'])
|
||||
);
|
||||
|
||||
// should have started at the given time
|
||||
expect(intervals[0].lte).toEqual(moment(MS_PER_DAY * 3000).toISOString());
|
||||
// should have stopped before reaching MS_PER_DAY * 2200
|
||||
expect(moment(last(intervals)?.gte).valueOf()).toBeGreaterThan(MS_PER_DAY * 2200);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
// should have started at the given time
|
||||
expect(intervals[0].lte).toEqual(moment(MS_PER_DAY * 3000).toISOString());
|
||||
// should have stopped before reaching MS_PER_DAY * 2200
|
||||
expect(moment(last(intervals)?.gte).valueOf()).toBeGreaterThan(MS_PER_DAY * 2200);
|
||||
expect(intervals.length).toBeGreaterThan(1);
|
||||
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 4));
|
||||
});
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(0, 4));
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should return an empty array when no hits were found', function () {
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3,
|
||||
MS_PER_DAY * 3,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits) => {
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP_3, MS_PER_DAY * 3, '_doc', 0, 3).then((hits) => {
|
||||
expect(hits).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should configure the SearchSource to not inherit from the implicit root', function () {
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3,
|
||||
MS_PER_DAY * 3,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then(() => {
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP_3, MS_PER_DAY * 3, '_doc', 0, 3).then(() => {
|
||||
const setParentSpy = mockSearchSource.setParent;
|
||||
expect(setParentSpy.alwaysCalledWith(undefined)).toBe(true);
|
||||
expect(setParentSpy.called).toBe(true);
|
||||
|
@ -216,16 +170,7 @@ describe('context successors', function () {
|
|||
});
|
||||
|
||||
it('should set the tiebreaker sort order to the same as the time field', function () {
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP,
|
||||
MS_PER_DAY,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then(() => {
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP, MS_PER_DAY, '_doc', 0, 3).then(() => {
|
||||
expect(
|
||||
mockSearchSource.setField.calledWith('sort', [
|
||||
{ '@timestamp': { order: SortDirection.desc, format: 'strict_date_optional_time' } },
|
||||
|
@ -250,32 +195,23 @@ describe('context successors', function () {
|
|||
},
|
||||
} as unknown as DiscoverServices);
|
||||
|
||||
fetchSuccessors = (
|
||||
indexPatternId,
|
||||
timeField,
|
||||
sortDir,
|
||||
timeValIso,
|
||||
timeValNr,
|
||||
tieBreakerField,
|
||||
tieBreakerValue,
|
||||
size
|
||||
) => {
|
||||
fetchSuccessors = (timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => {
|
||||
const anchor = {
|
||||
_source: {
|
||||
[timeField]: timeValIso,
|
||||
[indexPattern.timeFieldName!]: timeValIso,
|
||||
},
|
||||
sort: [timeValNr, tieBreakerValue],
|
||||
};
|
||||
|
||||
return fetchContextProvider(createIndexPatternsStub(), true).fetchSurroundingDocs(
|
||||
return fetchSurroundingDocs(
|
||||
SurrDocType.SUCCESSORS,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
anchor as EsHitRecord,
|
||||
timeField,
|
||||
tieBreakerField,
|
||||
sortDir,
|
||||
SortDirection.desc,
|
||||
size,
|
||||
[]
|
||||
[],
|
||||
true
|
||||
);
|
||||
};
|
||||
});
|
||||
|
@ -289,23 +225,16 @@ describe('context successors', function () {
|
|||
mockSearchSource._createStubHit(MS_PER_DAY * 3000 - 2),
|
||||
];
|
||||
|
||||
return fetchSuccessors(
|
||||
'INDEX_PATTERN_ID',
|
||||
'@timestamp',
|
||||
SortDirection.desc,
|
||||
ANCHOR_TIMESTAMP_3000,
|
||||
MS_PER_DAY * 3000,
|
||||
'_doc',
|
||||
0,
|
||||
3
|
||||
).then((hits) => {
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
const setFieldsSpy = mockSearchSource.setField.withArgs('fields');
|
||||
const removeFieldsSpy = mockSearchSource.removeField.withArgs('fieldsFromSource');
|
||||
expect(removeFieldsSpy.calledOnce).toBe(true);
|
||||
expect(setFieldsSpy.calledOnce).toBe(true);
|
||||
});
|
||||
return fetchSuccessors(ANCHOR_TIMESTAMP_3000, MS_PER_DAY * 3000, '_doc', 0, 3).then(
|
||||
(hits) => {
|
||||
expect(mockSearchSource.fetch.calledOnce).toBe(true);
|
||||
expect(hits).toEqual(mockSearchSource._stubHits.slice(-3));
|
||||
const setFieldsSpy = mockSearchSource.setField.withArgs('fields');
|
||||
const removeFieldsSpy = mockSearchSource.removeField.withArgs('fieldsFromSource');
|
||||
expect(removeFieldsSpy.calledOnce).toBe(true);
|
||||
expect(setFieldsSpy.calledOnce).toBe(true);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import { Filter, IndexPattern, IndexPatternsContract, SearchSource } from 'src/plugins/data/public';
|
||||
import { Filter, IndexPattern, SearchSource } from 'src/plugins/data/public';
|
||||
import { reverseSortDir, SortDirection } from './utils/sorting';
|
||||
import { convertIsoToMillis, extractNanos } from './utils/date_conversion';
|
||||
import { fetchHitsInInterval } from './utils/fetch_hits_in_interval';
|
||||
|
@ -25,88 +25,82 @@ const DAY_MILLIS = 24 * 60 * 60 * 1000;
|
|||
// look from 1 day up to 10000 days into the past and future
|
||||
const LOOKUP_OFFSETS = [0, 1, 7, 30, 365, 10000].map((days) => days * DAY_MILLIS);
|
||||
|
||||
function fetchContextProvider(indexPatterns: IndexPatternsContract, useNewFieldsApi?: boolean) {
|
||||
return {
|
||||
fetchSurroundingDocs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch successor or predecessor documents of a given anchor document
|
||||
*
|
||||
* @param {SurrDocType} type - `successors` or `predecessors`
|
||||
* @param {string} indexPatternId
|
||||
* @param {EsHitRecord} anchor - anchor record
|
||||
* @param {string} timeField - name of the timefield, that's sorted on
|
||||
* @param {string} tieBreakerField - name of the tie breaker, the 2nd sort field
|
||||
* @param {SortDirection} sortDir - direction of sorting
|
||||
* @param {number} size - number of records to retrieve
|
||||
* @param {Filter[]} filters - to apply in the elastic query
|
||||
* @returns {Promise<object[]>}
|
||||
*/
|
||||
async function fetchSurroundingDocs(
|
||||
type: SurrDocType,
|
||||
indexPatternId: string,
|
||||
anchor: EsHitRecord,
|
||||
timeField: string,
|
||||
tieBreakerField: string,
|
||||
sortDir: SortDirection,
|
||||
size: number,
|
||||
filters: Filter[]
|
||||
): Promise<EsHitRecordList> {
|
||||
if (typeof anchor !== 'object' || anchor === null || !size) {
|
||||
return [];
|
||||
}
|
||||
const indexPattern = await indexPatterns.get(indexPatternId);
|
||||
const { data } = getServices();
|
||||
const searchSource = data.search.searchSource.createEmpty() as SearchSource;
|
||||
updateSearchSource(searchSource, indexPattern, filters, Boolean(useNewFieldsApi));
|
||||
const sortDirToApply = type === SurrDocType.SUCCESSORS ? sortDir : reverseSortDir(sortDir);
|
||||
|
||||
const nanos = indexPattern.isTimeNanosBased() ? extractNanos(anchor.fields[timeField][0]) : '';
|
||||
const timeValueMillis =
|
||||
nanos !== '' ? convertIsoToMillis(anchor.fields[timeField][0]) : anchor.sort[0];
|
||||
|
||||
const intervals = generateIntervals(LOOKUP_OFFSETS, timeValueMillis as number, type, sortDir);
|
||||
let documents: EsHitRecordList = [];
|
||||
|
||||
for (const interval of intervals) {
|
||||
const remainingSize = size - documents.length;
|
||||
|
||||
if (remainingSize <= 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
const searchAfter = getEsQuerySearchAfter(
|
||||
type,
|
||||
documents,
|
||||
timeField,
|
||||
anchor,
|
||||
nanos,
|
||||
useNewFieldsApi
|
||||
);
|
||||
|
||||
const sort = getEsQuerySort(timeField, tieBreakerField, sortDirToApply, nanos);
|
||||
|
||||
const hits = await fetchHitsInInterval(
|
||||
searchSource,
|
||||
timeField,
|
||||
sort,
|
||||
sortDirToApply,
|
||||
interval,
|
||||
searchAfter,
|
||||
remainingSize,
|
||||
nanos,
|
||||
anchor._id
|
||||
);
|
||||
|
||||
documents =
|
||||
type === SurrDocType.SUCCESSORS
|
||||
? [...documents, ...hits]
|
||||
: [...hits.slice().reverse(), ...documents];
|
||||
}
|
||||
|
||||
return documents;
|
||||
/**
|
||||
* Fetch successor or predecessor documents of a given anchor document
|
||||
*
|
||||
* @param {SurrDocType} type - `successors` or `predecessors`
|
||||
* @param {IndexPattern} indexPattern
|
||||
* @param {EsHitRecord} anchor - anchor record
|
||||
* @param {string} tieBreakerField - name of the tie breaker, the 2nd sort field
|
||||
* @param {SortDirection} sortDir - direction of sorting
|
||||
* @param {number} size - number of records to retrieve
|
||||
* @param {Filter[]} filters - to apply in the elastic query
|
||||
* @param {boolean} useNewFieldsApi
|
||||
* @returns {Promise<object[]>}
|
||||
*/
|
||||
export async function fetchSurroundingDocs(
|
||||
type: SurrDocType,
|
||||
indexPattern: IndexPattern,
|
||||
anchor: EsHitRecord,
|
||||
tieBreakerField: string,
|
||||
sortDir: SortDirection,
|
||||
size: number,
|
||||
filters: Filter[],
|
||||
useNewFieldsApi?: boolean
|
||||
): Promise<EsHitRecordList> {
|
||||
if (typeof anchor !== 'object' || anchor === null || !size) {
|
||||
return [];
|
||||
}
|
||||
const { data } = getServices();
|
||||
const timeField = indexPattern.timeFieldName!;
|
||||
const searchSource = data.search.searchSource.createEmpty() as SearchSource;
|
||||
updateSearchSource(searchSource, indexPattern, filters, Boolean(useNewFieldsApi));
|
||||
const sortDirToApply = type === SurrDocType.SUCCESSORS ? sortDir : reverseSortDir(sortDir);
|
||||
|
||||
const nanos = indexPattern.isTimeNanosBased() ? extractNanos(anchor.fields[timeField][0]) : '';
|
||||
const timeValueMillis =
|
||||
nanos !== '' ? convertIsoToMillis(anchor.fields[timeField][0]) : anchor.sort[0];
|
||||
|
||||
const intervals = generateIntervals(LOOKUP_OFFSETS, timeValueMillis as number, type, sortDir);
|
||||
let documents: EsHitRecordList = [];
|
||||
|
||||
for (const interval of intervals) {
|
||||
const remainingSize = size - documents.length;
|
||||
|
||||
if (remainingSize <= 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
const searchAfter = getEsQuerySearchAfter(
|
||||
type,
|
||||
documents,
|
||||
timeField,
|
||||
anchor,
|
||||
nanos,
|
||||
useNewFieldsApi
|
||||
);
|
||||
|
||||
const sort = getEsQuerySort(timeField, tieBreakerField, sortDirToApply, nanos);
|
||||
|
||||
const hits = await fetchHitsInInterval(
|
||||
searchSource,
|
||||
timeField,
|
||||
sort,
|
||||
sortDirToApply,
|
||||
interval,
|
||||
searchAfter,
|
||||
remainingSize,
|
||||
nanos,
|
||||
anchor._id
|
||||
);
|
||||
|
||||
documents =
|
||||
type === SurrDocType.SUCCESSORS
|
||||
? [...documents, ...hits]
|
||||
: [...hits.slice().reverse(), ...documents];
|
||||
}
|
||||
|
||||
return documents;
|
||||
}
|
||||
|
||||
export function updateSearchSource(
|
||||
|
@ -125,5 +119,3 @@ export function updateSearchSource(
|
|||
.setField('filter', filters)
|
||||
.setField('trackTotalHits', false);
|
||||
}
|
||||
|
||||
export { fetchContextProvider };
|
||||
|
|
|
@ -24,7 +24,6 @@ describe('Test Discover Context State', () => {
|
|||
history.push('/');
|
||||
state = getState({
|
||||
defaultSize: 4,
|
||||
timeFieldName: 'time',
|
||||
history,
|
||||
uiSettings: {
|
||||
get: <T>(key: string) =>
|
||||
|
@ -44,12 +43,6 @@ describe('Test Discover Context State', () => {
|
|||
],
|
||||
"filters": Array [],
|
||||
"predecessorCount": 4,
|
||||
"sort": Array [
|
||||
Array [
|
||||
"time",
|
||||
"desc",
|
||||
],
|
||||
],
|
||||
"successorCount": 4,
|
||||
}
|
||||
`);
|
||||
|
@ -62,41 +55,29 @@ describe('Test Discover Context State', () => {
|
|||
state.setAppState({ predecessorCount: 10 });
|
||||
state.flushToUrl();
|
||||
expect(getCurrentUrl()).toMatchInlineSnapshot(
|
||||
`"/#?_a=(columns:!(_source),filters:!(),predecessorCount:10,sort:!(!(time,desc)),successorCount:4)"`
|
||||
`"/#?_a=(columns:!(_source),filters:!(),predecessorCount:10,successorCount:4)"`
|
||||
);
|
||||
});
|
||||
test('getState -> url to appState syncing', async () => {
|
||||
history.push(
|
||||
'/#?_a=(columns:!(_source),predecessorCount:1,sort:!(time,desc),successorCount:1)'
|
||||
);
|
||||
history.push('/#?_a=(columns:!(_source),predecessorCount:1,successorCount:1)');
|
||||
expect(state.appState.getState()).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"columns": Array [
|
||||
"_source",
|
||||
],
|
||||
"predecessorCount": 1,
|
||||
"sort": Array [
|
||||
"time",
|
||||
"desc",
|
||||
],
|
||||
"successorCount": 1,
|
||||
}
|
||||
`);
|
||||
});
|
||||
test('getState -> url to appState syncing with return to a url without state', async () => {
|
||||
history.push(
|
||||
'/#?_a=(columns:!(_source),predecessorCount:1,sort:!(time,desc),successorCount:1)'
|
||||
);
|
||||
history.push('/#?_a=(columns:!(_source),predecessorCount:1,successorCount:1)');
|
||||
expect(state.appState.getState()).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"columns": Array [
|
||||
"_source",
|
||||
],
|
||||
"predecessorCount": 1,
|
||||
"sort": Array [
|
||||
"time",
|
||||
"desc",
|
||||
],
|
||||
"successorCount": 1,
|
||||
}
|
||||
`);
|
||||
|
@ -107,10 +88,6 @@ describe('Test Discover Context State', () => {
|
|||
"_source",
|
||||
],
|
||||
"predecessorCount": 1,
|
||||
"sort": Array [
|
||||
"time",
|
||||
"desc",
|
||||
],
|
||||
"successorCount": 1,
|
||||
}
|
||||
`);
|
||||
|
@ -183,7 +160,7 @@ describe('Test Discover Context State', () => {
|
|||
`);
|
||||
state.flushToUrl();
|
||||
expect(getCurrentUrl()).toMatchInlineSnapshot(
|
||||
`"/#?_g=(filters:!(('$state':(store:globalState),meta:(alias:!n,disabled:!f,index:'logstash-*',key:extension,negate:!f,params:(query:jpg),type:phrase),query:(match_phrase:(extension:(query:jpg))))))&_a=(columns:!(_source),filters:!(('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'logstash-*',key:extension,negate:!t,params:(query:png),type:phrase),query:(match_phrase:(extension:(query:png))))),predecessorCount:4,sort:!(!(time,desc)),successorCount:4)"`
|
||||
`"/#?_g=(filters:!(('$state':(store:globalState),meta:(alias:!n,disabled:!f,index:'logstash-*',key:extension,negate:!f,params:(query:jpg),type:phrase),query:(match_phrase:(extension:(query:jpg))))))&_a=(columns:!(_source),filters:!(('$state':(store:appState),meta:(alias:!n,disabled:!f,index:'logstash-*',key:extension,negate:!t,params:(query:png),type:phrase),query:(match_phrase:(extension:(query:png))))),predecessorCount:4,successorCount:4)"`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -16,7 +16,7 @@ import {
|
|||
withNotifyOnErrors,
|
||||
ReduxLikeStateContainer,
|
||||
} from '../../../../../../kibana_utils/public';
|
||||
import { esFilters, FilterManager, Filter, SortDirection } from '../../../../../../data/public';
|
||||
import { esFilters, FilterManager, Filter } from '../../../../../../data/public';
|
||||
import { handleSourceColumnState } from '../../../helpers/state_helpers';
|
||||
|
||||
export interface AppState {
|
||||
|
@ -32,14 +32,16 @@ export interface AppState {
|
|||
* Number of records to be fetched before anchor records (newer records)
|
||||
*/
|
||||
predecessorCount: number;
|
||||
/**
|
||||
* Sorting of the records to be fetched, assumed to be a legacy parameter
|
||||
*/
|
||||
sort: string[][];
|
||||
/**
|
||||
* Number of records to be fetched after the anchor records (older records)
|
||||
*/
|
||||
successorCount: number;
|
||||
/**
|
||||
* Array of the used sorting [[field,direction],...]
|
||||
* this is actually not needed in Discover Context, there's no sorting
|
||||
* but it's used in the DocTable component
|
||||
*/
|
||||
sort?: string[][];
|
||||
}
|
||||
|
||||
interface GlobalState {
|
||||
|
@ -54,10 +56,6 @@ export interface GetStateParams {
|
|||
* Number of records to be fetched when 'Load' link/button is clicked
|
||||
*/
|
||||
defaultSize: number;
|
||||
/**
|
||||
* The timefield used for sorting
|
||||
*/
|
||||
timeFieldName: string;
|
||||
/**
|
||||
* Determins the use of long vs. short/hashed urls
|
||||
*/
|
||||
|
@ -124,7 +122,6 @@ const APP_STATE_URL_KEY = '_a';
|
|||
*/
|
||||
export function getState({
|
||||
defaultSize,
|
||||
timeFieldName,
|
||||
storeInSessionStorage = false,
|
||||
history,
|
||||
toasts,
|
||||
|
@ -140,12 +137,7 @@ export function getState({
|
|||
const globalStateContainer = createStateContainer<GlobalState>(globalStateInitial);
|
||||
|
||||
const appStateFromUrl = stateStorage.get(APP_STATE_URL_KEY) as AppState;
|
||||
const appStateInitial = createInitialAppState(
|
||||
defaultSize,
|
||||
timeFieldName,
|
||||
appStateFromUrl,
|
||||
uiSettings
|
||||
);
|
||||
const appStateInitial = createInitialAppState(defaultSize, appStateFromUrl, uiSettings);
|
||||
const appStateContainer = createStateContainer<AppState>(appStateInitial);
|
||||
|
||||
const { start, stop } = syncStates([
|
||||
|
@ -267,7 +259,6 @@ function getFilters(state: AppState | GlobalState): Filter[] {
|
|||
*/
|
||||
function createInitialAppState(
|
||||
defaultSize: number,
|
||||
timeFieldName: string,
|
||||
urlState: AppState,
|
||||
uiSettings: IUiSettingsClient
|
||||
): AppState {
|
||||
|
@ -276,7 +267,6 @@ function createInitialAppState(
|
|||
filters: [],
|
||||
predecessorCount: defaultSize,
|
||||
successorCount: defaultSize,
|
||||
sort: [[timeFieldName, SortDirection.desc]],
|
||||
};
|
||||
if (typeof urlState !== 'object') {
|
||||
return defaultState;
|
||||
|
|
|
@ -8,12 +8,9 @@
|
|||
|
||||
import { act, renderHook } from '@testing-library/react-hooks';
|
||||
import { setServices, getServices } from '../../../../kibana_services';
|
||||
import { SortDirection } from '../../../../../../data/public';
|
||||
import { createFilterManagerMock } from '../../../../../../data/public/query/filter_manager/filter_manager.mock';
|
||||
import { CONTEXT_TIE_BREAKER_FIELDS_SETTING } from '../../../../../common';
|
||||
import { DiscoverServices } from '../../../../build_services';
|
||||
import { indexPatternMock } from '../../../../__mocks__/index_pattern';
|
||||
import { indexPatternsMock } from '../../../../__mocks__/index_patterns';
|
||||
import { FailureReason, LoadingStatus } from '../services/context_query_state';
|
||||
import { ContextAppFetchProps, useContextAppFetch } from './use_context_app_fetch';
|
||||
import {
|
||||
|
@ -21,6 +18,9 @@ import {
|
|||
mockPredecessorHits,
|
||||
mockSuccessorHits,
|
||||
} from '../__mocks__/use_context_app_fetch';
|
||||
import { indexPatternWithTimefieldMock } from '../../../../__mocks__/index_pattern_with_timefield';
|
||||
import { createContextSearchSourceStub } from '../services/_stubs';
|
||||
import { IndexPattern } from '../../../../../../data_views/common';
|
||||
|
||||
const mockFilterManager = createFilterManagerMock();
|
||||
|
||||
|
@ -28,20 +28,19 @@ jest.mock('../services/context', () => {
|
|||
const originalModule = jest.requireActual('../services/context');
|
||||
return {
|
||||
...originalModule,
|
||||
fetchContextProvider: () => ({
|
||||
fetchSurroundingDocs: (type: string, indexPatternId: string) => {
|
||||
if (!indexPatternId) {
|
||||
throw new Error();
|
||||
}
|
||||
return type === 'predecessors' ? mockPredecessorHits : mockSuccessorHits;
|
||||
},
|
||||
}),
|
||||
|
||||
fetchSurroundingDocs: (type: string, indexPattern: IndexPattern) => {
|
||||
if (!indexPattern || !indexPattern.id) {
|
||||
throw new Error();
|
||||
}
|
||||
return type === 'predecessors' ? mockPredecessorHits : mockSuccessorHits;
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('../services/anchor', () => ({
|
||||
fetchAnchorProvider: () => (indexPatternId: string) => {
|
||||
if (!indexPatternId) {
|
||||
fetchAnchor: (anchorId: string, indexPattern: IndexPattern) => {
|
||||
if (!indexPattern.id || !anchorId) {
|
||||
throw new Error();
|
||||
}
|
||||
return mockAnchorHit;
|
||||
|
@ -50,16 +49,16 @@ jest.mock('../services/anchor', () => ({
|
|||
|
||||
const initDefaults = (tieBreakerFields: string[], indexPatternId = 'the-index-pattern-id') => {
|
||||
const dangerNotification = jest.fn();
|
||||
const mockSearchSource = createContextSearchSourceStub('timestamp');
|
||||
|
||||
setServices({
|
||||
data: {
|
||||
search: {
|
||||
searchSource: {
|
||||
createEmpty: jest.fn(),
|
||||
createEmpty: jest.fn().mockImplementation(() => mockSearchSource),
|
||||
},
|
||||
},
|
||||
},
|
||||
indexPatterns: indexPatternsMock,
|
||||
toastNotifications: { addDanger: dangerNotification },
|
||||
core: { notifications: { toasts: [] } },
|
||||
history: () => {},
|
||||
|
@ -77,10 +76,8 @@ const initDefaults = (tieBreakerFields: string[], indexPatternId = 'the-index-pa
|
|||
dangerNotification,
|
||||
props: {
|
||||
anchorId: 'mock_anchor_id',
|
||||
indexPatternId,
|
||||
indexPattern: indexPatternMock,
|
||||
indexPattern: { ...indexPatternWithTimefieldMock, id: indexPatternId },
|
||||
appState: {
|
||||
sort: [['order_date', SortDirection.desc]],
|
||||
predecessorCount: 2,
|
||||
successorCount: 2,
|
||||
},
|
||||
|
|
|
@ -7,11 +7,10 @@
|
|||
*/
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { fromPairs } from 'lodash';
|
||||
import { CONTEXT_TIE_BREAKER_FIELDS_SETTING } from '../../../../../common';
|
||||
import { DiscoverServices } from '../../../../build_services';
|
||||
import { fetchAnchorProvider } from '../services/anchor';
|
||||
import { fetchContextProvider, SurrDocType } from '../services/context';
|
||||
import { fetchAnchor } from '../services/anchor';
|
||||
import { fetchSurroundingDocs, SurrDocType } from '../services/context';
|
||||
import { MarkdownSimple, toMountPoint } from '../../../../../../kibana_react/public';
|
||||
import { IndexPattern, SortDirection } from '../../../../../../data/public';
|
||||
import {
|
||||
|
@ -30,7 +29,6 @@ const createError = (statusKey: string, reason: FailureReason, error?: Error) =>
|
|||
|
||||
export interface ContextAppFetchProps {
|
||||
anchorId: string;
|
||||
indexPatternId: string;
|
||||
indexPattern: IndexPattern;
|
||||
appState: AppState;
|
||||
useNewFieldsApi: boolean;
|
||||
|
@ -39,13 +37,12 @@ export interface ContextAppFetchProps {
|
|||
|
||||
export function useContextAppFetch({
|
||||
anchorId,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
appState,
|
||||
useNewFieldsApi,
|
||||
services,
|
||||
}: ContextAppFetchProps) {
|
||||
const { uiSettings: config, data, indexPatterns, toastNotifications, filterManager } = services;
|
||||
const { uiSettings: config, data, toastNotifications, filterManager } = services;
|
||||
|
||||
const searchSource = useMemo(() => {
|
||||
return data.search.searchSource.createEmpty();
|
||||
|
@ -54,13 +51,6 @@ export function useContextAppFetch({
|
|||
() => getFirstSortableField(indexPattern, config.get(CONTEXT_TIE_BREAKER_FIELDS_SETTING)),
|
||||
[config, indexPattern]
|
||||
);
|
||||
const fetchAnchor = useMemo(() => {
|
||||
return fetchAnchorProvider(indexPatterns, searchSource, useNewFieldsApi);
|
||||
}, [indexPatterns, searchSource, useNewFieldsApi]);
|
||||
const { fetchSurroundingDocs } = useMemo(
|
||||
() => fetchContextProvider(indexPatterns, useNewFieldsApi),
|
||||
[indexPatterns, useNewFieldsApi]
|
||||
);
|
||||
|
||||
const [fetchedState, setFetchedState] = useState<ContextFetchState>(
|
||||
getInitialContextQueryState()
|
||||
|
@ -71,8 +61,6 @@ export function useContextAppFetch({
|
|||
}, []);
|
||||
|
||||
const fetchAnchorRow = useCallback(async () => {
|
||||
const { sort } = appState;
|
||||
const [[, sortDir]] = sort;
|
||||
const errorTitle = i18n.translate('discover.context.unableToLoadAnchorDocumentDescription', {
|
||||
defaultMessage: 'Unable to load the anchor document',
|
||||
});
|
||||
|
@ -94,10 +82,11 @@ export function useContextAppFetch({
|
|||
|
||||
try {
|
||||
setState({ anchorStatus: { value: LoadingStatus.LOADING } });
|
||||
const anchor = await fetchAnchor(indexPatternId, anchorId, [
|
||||
fromPairs(sort),
|
||||
{ [tieBreakerField]: sortDir },
|
||||
]);
|
||||
const sort = [
|
||||
{ [indexPattern.timeFieldName!]: SortDirection.desc },
|
||||
{ [tieBreakerField]: SortDirection.desc },
|
||||
];
|
||||
const anchor = await fetchAnchor(anchorId, indexPattern, searchSource, sort, useNewFieldsApi);
|
||||
setState({ anchor, anchorStatus: { value: LoadingStatus.LOADED } });
|
||||
return anchor;
|
||||
} catch (error) {
|
||||
|
@ -108,20 +97,18 @@ export function useContextAppFetch({
|
|||
});
|
||||
}
|
||||
}, [
|
||||
appState,
|
||||
tieBreakerField,
|
||||
setState,
|
||||
toastNotifications,
|
||||
fetchAnchor,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
anchorId,
|
||||
searchSource,
|
||||
useNewFieldsApi,
|
||||
]);
|
||||
|
||||
const fetchSurroundingRows = useCallback(
|
||||
async (type: SurrDocType, fetchedAnchor?: EsHitRecord) => {
|
||||
const filters = filterManager.getFilters();
|
||||
const { sort } = appState;
|
||||
const [[sortField, sortDir]] = sort;
|
||||
|
||||
const count =
|
||||
type === SurrDocType.PREDECESSORS ? appState.predecessorCount : appState.successorCount;
|
||||
|
@ -135,13 +122,13 @@ export function useContextAppFetch({
|
|||
setState({ [statusKey]: { value: LoadingStatus.LOADING } });
|
||||
const rows = await fetchSurroundingDocs(
|
||||
type,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
anchor as EsHitRecord,
|
||||
sortField,
|
||||
tieBreakerField,
|
||||
sortDir as SortDirection,
|
||||
SortDirection.desc,
|
||||
count,
|
||||
filters
|
||||
filters,
|
||||
useNewFieldsApi
|
||||
);
|
||||
setState({ [type]: rows, [statusKey]: { value: LoadingStatus.LOADED } });
|
||||
} catch (error) {
|
||||
|
@ -158,9 +145,9 @@ export function useContextAppFetch({
|
|||
fetchedState.anchor,
|
||||
tieBreakerField,
|
||||
setState,
|
||||
fetchSurroundingDocs,
|
||||
indexPatternId,
|
||||
indexPattern,
|
||||
toastNotifications,
|
||||
useNewFieldsApi,
|
||||
]
|
||||
);
|
||||
|
||||
|
|
|
@ -9,29 +9,21 @@ import { useEffect, useMemo, useState } from 'react';
|
|||
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { CONTEXT_DEFAULT_SIZE_SETTING } from '../../../../../common';
|
||||
import { IndexPattern } from '../../../../../../data/public';
|
||||
import { DiscoverServices } from '../../../../build_services';
|
||||
import { AppState, getState } from '../services/context_state';
|
||||
|
||||
export function useContextAppState({
|
||||
indexPattern,
|
||||
services,
|
||||
}: {
|
||||
indexPattern: IndexPattern;
|
||||
services: DiscoverServices;
|
||||
}) {
|
||||
export function useContextAppState({ services }: { services: DiscoverServices }) {
|
||||
const { uiSettings: config, history, core, filterManager } = services;
|
||||
|
||||
const stateContainer = useMemo(() => {
|
||||
return getState({
|
||||
defaultSize: parseInt(config.get(CONTEXT_DEFAULT_SIZE_SETTING), 10),
|
||||
timeFieldName: indexPattern.timeFieldName!,
|
||||
storeInSessionStorage: config.get('state:storeInSessionStorage'),
|
||||
history: history(),
|
||||
toasts: core.notifications.toasts,
|
||||
uiSettings: config,
|
||||
});
|
||||
}, [config, history, indexPattern, core.notifications.toasts]);
|
||||
}, [config, history, core.notifications.toasts]);
|
||||
|
||||
const [appState, setState] = useState<AppState>(stateContainer.appState.getState());
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue