Refactor, deangularize courier/search source (#45235)

* Initial refactor of search source

* Add abort signal to search source fetch and remove cancel queued

* Remove usages of Angular Promises

* Remove usages of angular "sessionId" service

* Remove config as dependency

* Update deps on config and esShardTimeout

* Remove remaining angular dependencies from SearchSource

* Fix Karma tests

* Separate callClient and handleResponse and add tests for each

* Add tests for fetchSoon

* Add back search source test and convert to jest

* Create search strategy registry test

* Revert empty test

* Remove filter predicates from search source

* Update typings and throw response errors

* Fix proxy to properly return response from ES

* Update jest snapshots

* Remove unused translations

* Don't pass search request to onRequestStart, create it afterwards

* Fix search source & get search params tests

* Fix issue with angular scope not firing after setting state on vis

* Fix tag cloud vis

* Fix setting of visConfig to not happen async

* Remove unused snapshots

* Remove unused reference to IPrivate
This commit is contained in:
Lukas Olson 2019-10-18 11:38:09 -07:00 committed by GitHub
parent 9ff6d86379
commit 3c7b1603f5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
87 changed files with 1506 additions and 3328 deletions

View file

@ -55,6 +55,32 @@ describe('build query', function () {
expect(result.filter).to.eql(expectedESQueries);
});
it('should remove disabled filters', function () {
const filters = [
{
match_all: {},
meta: { type: 'match_all', negate: true, disabled: true },
},
];
const expectedESQueries = [];
const result = buildQueryFromFilters(filters);
expect(result.must_not).to.eql(expectedESQueries);
});
it('should remove falsy filters', function () {
const filters = [null, undefined];
const expectedESQueries = [];
const result = buildQueryFromFilters(filters);
expect(result.must_not).to.eql(expectedESQueries);
expect(result.must).to.eql(expectedESQueries);
});
it('should place negated filters in the must_not clause', function () {
const filters = [
{

View file

@ -60,6 +60,7 @@ const cleanFilter = function (filter) {
};
export function buildQueryFromFilters(filters = [], indexPattern, ignoreFilterIfFieldNotInIndex) {
filters = filters.filter(filter => filter && !_.get(filter, ['meta', 'disabled']));
return {
must: [],
filter: filters

View file

@ -123,12 +123,12 @@ class ListControl extends Control {
this.useTimeFilter,
ancestorFilters
);
this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued());
const abortSignal = this.abortController.signal;
this.lastQuery = query;
let resp;
try {
resp = await searchSource.fetch();
resp = await searchSource.fetch({ abortSignal });
} catch(error) {
// If the fetch was aborted then no need to surface this error in the UI
if (error.name === 'AbortError') return;

View file

@ -66,11 +66,11 @@ class RangeControl extends Control {
const aggs = minMaxAgg(indexPattern.fields.getByName(fieldName));
const searchSource = createSearchSource(this.kbnApi, null, indexPattern, aggs, this.useTimeFilter);
this.abortController.signal.addEventListener('abort', () => searchSource.cancelQueued());
const abortSignal = this.abortController.signal;
let resp;
try {
resp = await searchSource.fetch();
resp = await searchSource.fetch({ abortSignal });
} catch(error) {
// If the fetch was aborted then no need to surface this error in the UI
if (error.name === 'AbortError') return;

View file

@ -29,7 +29,6 @@ import chrome from 'ui/chrome';
import { TimeRange } from 'src/plugins/data/public';
import { SearchSource } from '../../../../ui/public/courier/search_source';
// @ts-ignore
import { SearchSourceProvider } from '../../../../ui/public/courier/search_source';
import { FilterBarQueryFilterProvider } from '../../../../ui/public/filter_manager/query_filter';
import { buildTabularInspectorData } from '../../../../ui/public/inspector/build_tabular_inspector_data';
@ -100,8 +99,8 @@ const handleCourierRequest = async ({
return aggs.toDsl(metricsAtAllLevels);
});
requestSearchSource.onRequestStart((paramSearchSource: SearchSource, searchRequest: unknown) => {
return aggs.onSearchRequestStart(paramSearchSource, searchRequest);
requestSearchSource.onRequestStart((paramSearchSource: SearchSource, options: any) => {
return aggs.onSearchRequestStart(paramSearchSource, options);
});
if (timeRange) {
@ -118,7 +117,7 @@ const handleCourierRequest = async ({
const queryHash = calculateObjectHash(reqBody);
// We only need to reexecute the query, if forceFetch was true or the hash of the request body has changed
// since the last request
const shouldQuery = forceFetch || searchSource.lastQuery !== queryHash;
const shouldQuery = forceFetch || (searchSource as any).lastQuery !== queryHash;
if (shouldQuery) {
inspectorAdapters.requests.reset();
@ -139,18 +138,13 @@ const handleCourierRequest = async ({
request.stats(getRequestInspectorStats(requestSearchSource));
try {
// Abort any in-progress requests before fetching again
if (abortSignal) {
abortSignal.addEventListener('abort', () => requestSearchSource.cancelQueued());
}
const response = await requestSearchSource.fetch({ abortSignal });
const response = await requestSearchSource.fetch();
searchSource.lastQuery = queryHash;
(searchSource as any).lastQuery = queryHash;
request.stats(getResponseInspectorStats(searchSource, response)).ok({ json: response });
searchSource.rawResponse = response;
(searchSource as any).rawResponse = response;
} catch (e) {
// Log any error during request to the inspector
request.error({ json: e });
@ -166,7 +160,7 @@ const handleCourierRequest = async ({
// Note that rawResponse is not deeply cloned here, so downstream applications using courier
// must take care not to mutate it, or it could have unintended side effects, e.g. displaying
// response data incorrectly in the inspector.
let resp = searchSource.rawResponse;
let resp = (searchSource as any).rawResponse;
for (const agg of aggs.aggs) {
if (has(agg, 'type.postFlightRequest')) {
resp = await agg.type.postFlightRequest(
@ -180,7 +174,7 @@ const handleCourierRequest = async ({
}
}
searchSource.finalResponse = resp;
(searchSource as any).finalResponse = resp;
const parsedTimeRange = timeRange ? getTime(aggs.indexPattern, timeRange) : null;
const tabifyParams = {
@ -191,23 +185,24 @@ const handleCourierRequest = async ({
const tabifyCacheHash = calculateObjectHash({ tabifyAggs: aggs, ...tabifyParams });
// We only need to reexecute tabify, if either we did a new request or some input params to tabify changed
const shouldCalculateNewTabify = shouldQuery || searchSource.lastTabifyHash !== tabifyCacheHash;
const shouldCalculateNewTabify =
shouldQuery || (searchSource as any).lastTabifyHash !== tabifyCacheHash;
if (shouldCalculateNewTabify) {
searchSource.lastTabifyHash = tabifyCacheHash;
searchSource.tabifiedResponse = tabifyAggResponse(
(searchSource as any).lastTabifyHash = tabifyCacheHash;
(searchSource as any).tabifiedResponse = tabifyAggResponse(
aggs,
searchSource.finalResponse,
(searchSource as any).finalResponse,
tabifyParams
);
}
inspectorAdapters.data.setTabularLoader(
() => buildTabularInspectorData(searchSource.tabifiedResponse, queryFilter),
() => buildTabularInspectorData((searchSource as any).tabifiedResponse, queryFilter),
{ returnsFormattedValues: true }
);
return searchSource.tabifiedResponse;
return (searchSource as any).tabifiedResponse;
};
export const esaggs = (): ExpressionFunction<typeof name, Context, Arguments, Return> => ({
@ -249,7 +244,6 @@ export const esaggs = (): ExpressionFunction<typeof name, Context, Arguments, Re
const $injector = await chrome.dangerouslyGetActiveInjector();
const Private: Function = $injector.get('Private');
const { indexPatterns } = data.indexPatterns;
const SearchSourceClass = Private(SearchSourceProvider);
const queryFilter = Private(FilterBarQueryFilterProvider);
const aggConfigsState = JSON.parse(args.aggConfigs);
@ -257,7 +251,7 @@ export const esaggs = (): ExpressionFunction<typeof name, Context, Arguments, Re
const aggs = new AggConfigs(indexPattern, aggConfigsState);
// we should move searchSource creation inside courier request handler
const searchSource = new SearchSourceClass();
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
searchSource.setField('size', 0);

View file

@ -30,13 +30,16 @@ export const visualization = () => ({
const { visData, visConfig, params } = config;
const visType = config.visType || visConfig.type;
const $injector = await chrome.dangerouslyGetActiveInjector();
const $rootScope = $injector.get('$rootScope') as any;
const Private = $injector.get('Private') as any;
const Vis = Private(VisProvider);
if (handlers.vis) {
// special case in visualize, we need to render first (without executing the expression), for maps to work
if (visConfig) {
handlers.vis.setCurrentState({ type: visType, params: visConfig });
$rootScope.$apply(() => {
handlers.vis.setCurrentState({ type: visType, params: visConfig });
});
}
} else {
handlers.vis = new Vis({

View file

@ -17,6 +17,7 @@
* under the License.
*/
import { searchSourceMock } from '../../../../../ui/public/courier/search_source/mocks';
import { SavedObjectDashboard } from '../saved_dashboard/saved_dashboard';
export function getSavedDashboardMock(
@ -26,10 +27,7 @@ export function getSavedDashboardMock(
id: '123',
title: 'my dashboard',
panelsJSON: '[]',
searchSource: {
getOwnField: (param: any) => param,
setField: () => {},
},
searchSource: searchSourceMock,
copyOnSave: false,
timeRestore: false,
timeTo: 'now',

View file

@ -230,7 +230,10 @@ function discoverController(
// the saved savedSearch
const savedSearch = $route.current.locals.savedSearch;
let abortController;
$scope.$on('$destroy', () => {
if (abortController) abortController.abort();
savedSearch.destroy();
subscriptions.unsubscribe();
});
@ -753,7 +756,8 @@ function discoverController(
$scope.updateTime();
// Abort any in-progress requests before fetching again
$scope.searchSource.cancelQueued();
if (abortController) abortController.abort();
abortController = new AbortController();
$scope.updateDataSource()
.then(setupVisualization)
@ -761,7 +765,9 @@ function discoverController(
$state.save();
$scope.fetchStatus = fetchStatuses.LOADING;
logInspectorRequest();
return $scope.searchSource.fetch();
return $scope.searchSource.fetch({
abortSignal: abortController.signal
});
})
.then(onResults)
.catch((error) => {
@ -1040,8 +1046,8 @@ function discoverController(
);
visSavedObject.vis = $scope.vis;
$scope.searchSource.onRequestStart((searchSource, searchRequest) => {
return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, searchRequest);
$scope.searchSource.onRequestStart((searchSource, options) => {
return $scope.vis.getAggConfig().onSearchRequestStart(searchSource, options);
});
$scope.searchSource.setField('aggs', function () {

View file

@ -19,6 +19,7 @@
import sinon from 'sinon';
import moment from 'moment';
import { SearchSource } from 'ui/courier';
export function createIndexPatternsStub() {
return {
@ -31,7 +32,10 @@ export function createIndexPatternsStub() {
};
}
export function createSearchSourceStubProvider(hits, timeField) {
/**
* A stubbed search source with a `fetch` method that returns all of `_stubHits`.
*/
export function createSearchSourceStub(hits, timeField) {
const searchSourceStub = {
_stubHits: hits,
_stubTimeField: timeField,
@ -41,13 +45,37 @@ export function createSearchSourceStubProvider(hits, timeField) {
}),
};
searchSourceStub.setParent = sinon.stub().returns(searchSourceStub);
searchSourceStub.setField = sinon.stub().returns(searchSourceStub);
searchSourceStub.getField = sinon.spy(key => {
searchSourceStub.setParent = sinon.stub(SearchSource.prototype, 'setParent').returns(searchSourceStub);
searchSourceStub.setField = sinon.stub(SearchSource.prototype, 'setField').returns(searchSourceStub);
searchSourceStub.getField = sinon.stub(SearchSource.prototype, 'getField').callsFake(key => {
const previousSetCall = searchSourceStub.setField.withArgs(key).lastCall;
return previousSetCall ? previousSetCall.args[1] : null;
});
searchSourceStub.fetch = sinon.spy(() => {
searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => Promise.resolve({
hits: {
hits: searchSourceStub._stubHits,
total: searchSourceStub._stubHits.length,
},
}));
searchSourceStub._restore = () => {
searchSourceStub.setParent.restore();
searchSourceStub.setField.restore();
searchSourceStub.getField.restore();
searchSourceStub.fetch.restore();
};
return searchSourceStub;
}
/**
* A stubbed search source with a `fetch` method that returns a filtered set of `_stubHits`.
*/
export function createContextSearchSourceStub(hits, timeField = '@timestamp') {
const searchSourceStub = createSearchSourceStub(hits, timeField);
searchSourceStub.fetch.restore();
searchSourceStub.fetch = sinon.stub(SearchSource.prototype, 'fetch').callsFake(() => {
const timeField = searchSourceStub._stubTimeField;
const lastQuery = searchSourceStub.setField.withArgs('query').lastCall.args[1];
const timeRange = lastQuery.query.constant_score.filter.range[timeField];
@ -71,7 +99,5 @@ export function createSearchSourceStubProvider(hits, timeField) {
});
});
return function SearchSourceStubProvider() {
return searchSourceStub;
};
return searchSourceStub;
}

View file

@ -19,55 +19,34 @@
import expect from '@kbn/expect';
import ngMock from 'ng_mock';
import sinon from 'sinon';
import { createIndexPatternsStub } from './_stubs';
import { SearchSourceProvider } from 'ui/courier';
import { createIndexPatternsStub, createSearchSourceStub } from './_stubs';
import { fetchAnchorProvider } from '../anchor';
function createSearchSourceStubProvider(hits) {
const searchSourceStub = {
_stubHits: hits,
};
searchSourceStub.setParent = sinon.stub().returns(searchSourceStub);
searchSourceStub.setField = sinon.stub().returns(searchSourceStub);
searchSourceStub.fetch = sinon.spy(() => Promise.resolve({
hits: {
hits: searchSourceStub._stubHits,
total: searchSourceStub._stubHits.length,
},
}));
return function SearchSourceStubProvider() {
return searchSourceStub;
};
}
describe('context app', function () {
beforeEach(ngMock.module('kibana'));
describe('function fetchAnchor', function () {
let fetchAnchor;
let SearchSourceStub;
let searchSourceStub;
beforeEach(ngMock.module(function createServiceStubs($provide) {
$provide.value('indexPatterns', createIndexPatternsStub());
}));
beforeEach(ngMock.inject(function createPrivateStubs(Private) {
SearchSourceStub = createSearchSourceStubProvider([
searchSourceStub = createSearchSourceStub([
{ _id: 'hit1' },
]);
Private.stub(SearchSourceProvider, SearchSourceStub);
fetchAnchor = Private(fetchAnchorProvider);
}));
it('should use the `fetch` method of the SearchSource', function () {
const searchSourceStub = new SearchSourceStub();
afterEach(() => {
searchSourceStub._restore();
});
it('should use the `fetch` method of the SearchSource', function () {
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
expect(searchSourceStub.fetch.calledOnce).to.be(true);
@ -75,8 +54,6 @@ describe('context app', function () {
});
it('should configure the SearchSource to not inherit from the implicit root', function () {
const searchSourceStub = new SearchSourceStub();
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setParentSpy = searchSourceStub.setParent;
@ -86,8 +63,6 @@ describe('context app', function () {
});
it('should set the SearchSource index pattern', function () {
const searchSourceStub = new SearchSourceStub();
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setFieldSpy = searchSourceStub.setField;
@ -96,8 +71,6 @@ describe('context app', function () {
});
it('should set the SearchSource version flag to true', function () {
const searchSourceStub = new SearchSourceStub();
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setVersionSpy = searchSourceStub.setField.withArgs('version');
@ -107,8 +80,6 @@ describe('context app', function () {
});
it('should set the SearchSource size to 1', function () {
const searchSourceStub = new SearchSourceStub();
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setSizeSpy = searchSourceStub.setField.withArgs('size');
@ -118,8 +89,6 @@ describe('context app', function () {
});
it('should set the SearchSource query to an ids query', function () {
const searchSourceStub = new SearchSourceStub();
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setQuerySpy = searchSourceStub.setField.withArgs('query');
@ -140,8 +109,6 @@ describe('context app', function () {
});
it('should set the SearchSource sort order', function () {
const searchSourceStub = new SearchSourceStub();
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setSortSpy = searchSourceStub.setField.withArgs('sort');
@ -154,7 +121,6 @@ describe('context app', function () {
});
it('should reject with an error when no hits were found', function () {
const searchSourceStub = new SearchSourceStub();
searchSourceStub._stubHits = [];
return fetchAnchor('INDEX_PATTERN_ID', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
@ -169,7 +135,6 @@ describe('context app', function () {
});
it('should return the first hit after adding an anchor marker', function () {
const searchSourceStub = new SearchSourceStub();
searchSourceStub._stubHits = [
{ property1: 'value1' },
{ property2: 'value2' },

View file

@ -22,8 +22,7 @@ import ngMock from 'ng_mock';
import moment from 'moment';
import * as _ from 'lodash';
import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs';
import { SearchSourceProvider } from 'ui/courier';
import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
import { fetchContextProvider } from '../context';
@ -38,16 +37,14 @@ describe('context app', function () {
describe('function fetchPredecessors', function () {
let fetchPredecessors;
let getSearchSourceStub;
let searchSourceStub;
beforeEach(ngMock.module(function createServiceStubs($provide) {
$provide.value('indexPatterns', createIndexPatternsStub());
}));
beforeEach(ngMock.inject(function createPrivateStubs(Private) {
getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp', MS_PER_DAY * 8);
Private.stub(SearchSourceProvider, getSearchSourceStub);
searchSourceStub = createContextSearchSourceStub([], '@timestamp', MS_PER_DAY * 8);
fetchPredecessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => {
const anchor = {
_source: {
@ -69,8 +66,11 @@ describe('context app', function () {
};
}));
afterEach(() => {
searchSourceStub._restore();
});
it('should perform exactly one query when enough hits are returned', function () {
const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 2),
searchSourceStub._createStubHit(MS_PER_DAY * 3000 + 1),
@ -97,7 +97,6 @@ describe('context app', function () {
});
it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () {
const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3010),
searchSourceStub._createStubHit(MS_PER_DAY * 3002),
@ -134,7 +133,6 @@ describe('context app', function () {
});
it('should perform multiple queries until the expected hit count is returned', function () {
const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 1700),
searchSourceStub._createStubHit(MS_PER_DAY * 1200),
@ -185,8 +183,6 @@ describe('context app', function () {
});
it('should configure the SearchSource to not inherit from the implicit root', function () {
const searchSourceStub = getSearchSourceStub();
return fetchPredecessors(
'INDEX_PATTERN_ID',
'@timestamp',
@ -206,8 +202,6 @@ describe('context app', function () {
});
it('should set the tiebreaker sort order to the opposite as the time field', function () {
const searchSourceStub = getSearchSourceStub();
return fetchPredecessors(
'INDEX_PATTERN_ID',
'@timestamp',

View file

@ -22,8 +22,7 @@ import ngMock from 'ng_mock';
import moment from 'moment';
import * as _ from 'lodash';
import { createIndexPatternsStub, createSearchSourceStubProvider } from './_stubs';
import { SearchSourceProvider } from 'ui/courier';
import { createIndexPatternsStub, createContextSearchSourceStub } from './_stubs';
import { fetchContextProvider } from '../context';
@ -37,15 +36,14 @@ describe('context app', function () {
describe('function fetchSuccessors', function () {
let fetchSuccessors;
let getSearchSourceStub;
let searchSourceStub;
beforeEach(ngMock.module(function createServiceStubs($provide) {
$provide.value('indexPatterns', createIndexPatternsStub());
}));
beforeEach(ngMock.inject(function createPrivateStubs(Private) {
getSearchSourceStub = createSearchSourceStubProvider([], '@timestamp');
Private.stub(SearchSourceProvider, getSearchSourceStub);
searchSourceStub = createContextSearchSourceStub([], '@timestamp');
fetchSuccessors = (indexPatternId, timeField, sortDir, timeValIso, timeValNr, tieBreakerField, tieBreakerValue, size) => {
const anchor = {
@ -68,8 +66,11 @@ describe('context app', function () {
};
}));
afterEach(() => {
searchSourceStub._restore();
});
it('should perform exactly one query when enough hits are returned', function () {
const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 5000),
searchSourceStub._createStubHit(MS_PER_DAY * 4000),
@ -96,7 +97,6 @@ describe('context app', function () {
});
it('should perform multiple queries with the last being unrestricted when too few hits are returned', function () {
const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3010),
searchSourceStub._createStubHit(MS_PER_DAY * 3002),
@ -133,7 +133,6 @@ describe('context app', function () {
});
it('should perform multiple queries until the expected hit count is returned', function () {
const searchSourceStub = getSearchSourceStub();
searchSourceStub._stubHits = [
searchSourceStub._createStubHit(MS_PER_DAY * 3000),
searchSourceStub._createStubHit(MS_PER_DAY * 3000 - 1),
@ -187,8 +186,6 @@ describe('context app', function () {
});
it('should configure the SearchSource to not inherit from the implicit root', function () {
const searchSourceStub = getSearchSourceStub();
return fetchSuccessors(
'INDEX_PATTERN_ID',
'@timestamp',
@ -208,8 +205,6 @@ describe('context app', function () {
});
it('should set the tiebreaker sort order to the same as the time field', function () {
const searchSourceStub = getSearchSourceStub();
return fetchSuccessors(
'INDEX_PATTERN_ID',
'@timestamp',

View file

@ -21,11 +21,9 @@ import _ from 'lodash';
import { i18n } from '@kbn/i18n';
import { SearchSourceProvider } from 'ui/courier';
export function fetchAnchorProvider(indexPatterns, Private) {
const SearchSource = Private(SearchSourceProvider);
import { SearchSource } from 'ui/courier';
export function fetchAnchorProvider(indexPatterns) {
return async function fetchAnchor(
indexPatternId,
anchorId,

View file

@ -18,8 +18,7 @@
*/
// @ts-ignore
import { SearchSourceProvider } from 'ui/courier';
import { IPrivate } from 'ui/private';
import { SearchSource } from 'ui/courier';
import { Filter } from '@kbn/es-query';
import { IndexPatterns, IndexPattern } from 'ui/index_patterns';
import { reverseSortDir, SortDirection } from './utils/sorting';
@ -42,9 +41,7 @@ const DAY_MILLIS = 24 * 60 * 60 * 1000;
// look from 1 day up to 10000 days into the past and future
const LOOKUP_OFFSETS = [0, 1, 7, 30, 365, 10000].map(days => days * DAY_MILLIS);
function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) {
const SearchSourcePrivate: any = Private(SearchSourceProvider);
function fetchContextProvider(indexPatterns: IndexPatterns) {
return {
fetchSurroundingDocs,
};
@ -116,7 +113,7 @@ function fetchContextProvider(indexPatterns: IndexPatterns, Private: IPrivate) {
}
async function createSearchSource(indexPattern: IndexPattern, filters: Filter[]) {
return new SearchSourcePrivate()
return new SearchSource()
.setParent(false)
.setField('index', indexPattern)
.setField('filter', filters);

View file

@ -102,12 +102,13 @@ export class SearchEmbeddable extends Embeddable<SearchInput, SearchOutput>
private inspectorAdaptors: Adapters;
private searchScope?: SearchScope;
private panelTitle: string = '';
private filtersSearchSource: SearchSource;
private filtersSearchSource?: SearchSource;
private searchInstance?: JQLite;
private autoRefreshFetchSubscription?: Subscription;
private subscription?: Subscription;
public readonly type = SEARCH_EMBEDDABLE_TYPE;
private filterGen: FilterManager;
private abortController?: AbortController;
private prevTimeRange?: TimeRange;
private prevFilters?: Filter[];
@ -193,7 +194,7 @@ export class SearchEmbeddable extends Embeddable<SearchInput, SearchOutput>
if (this.autoRefreshFetchSubscription) {
this.autoRefreshFetchSubscription.unsubscribe();
}
this.savedSearch.searchSource.cancelQueued();
if (this.abortController) this.abortController.abort();
}
private initializeSearchScope() {
@ -273,7 +274,8 @@ export class SearchEmbeddable extends Embeddable<SearchInput, SearchOutput>
const { searchSource } = this.savedSearch;
// Abort any in-progress requests
searchSource.cancelQueued();
if (this.abortController) this.abortController.abort();
this.abortController = new AbortController();
searchSource.setField('size', config.get('discover:sampleSize'));
searchSource.setField(
@ -299,7 +301,9 @@ export class SearchEmbeddable extends Embeddable<SearchInput, SearchOutput>
try {
// Make the request
const resp = await searchSource.fetch();
const resp = await searchSource.fetch({
abortSignal: this.abortController.signal,
});
this.searchScope.isLoading = false;
@ -337,8 +341,8 @@ export class SearchEmbeddable extends Embeddable<SearchInput, SearchOutput>
searchScope.sharedItemTitle = this.panelTitle;
if (isFetchRequired) {
this.filtersSearchSource.setField('filter', this.input.filters);
this.filtersSearchSource.setField('query', this.input.query);
this.filtersSearchSource!.setField('filter', this.input.filters);
this.filtersSearchSource!.setField('query', this.input.query);
this.fetch();

View file

@ -27,7 +27,7 @@ import { fatalError, toastNotifications } from 'ui/notify';
import uiRoutes from 'ui/routes';
import { uiModules } from 'ui/modules';
import template from './edit_index_pattern.html';
import { FieldWildcardProvider } from 'ui/field_wildcard';
import { fieldWildcardMatcher } from 'ui/field_wildcard';
import { IndexPatternListFactory } from 'ui/management/index_pattern_list';
import React from 'react';
import { render, unmountComponentAtNode } from 'react-dom';
@ -173,10 +173,9 @@ uiModules.get('apps/management')
.controller('managementIndexPatternsEdit', function (
$scope, $location, $route, Promise, config, indexPatterns, Private, AppState, confirmModal) {
const $state = $scope.state = new AppState();
const { fieldWildcardMatcher } = Private(FieldWildcardProvider);
const indexPatternListProvider = Private(IndexPatternListFactory)();
$scope.fieldWildcardMatcher = fieldWildcardMatcher;
$scope.fieldWildcardMatcher = (...args) => fieldWildcardMatcher(...args, config.get('metaFields'));
$scope.editSectionsProvider = Private(IndicesEditSectionsProvider);
$scope.kbnUrl = Private(KbnUrlProvider);
$scope.indexPattern = $route.current.locals.indexPattern;

View file

@ -13,7 +13,6 @@
@import './courier/index';
@import './collapsible_sidebar/index';
@import './directives/index';
@import './error_allow_explicit_index/index';
@import './error_auto_create_index/index';
@import './error_url_overflow/index';
@import './exit_full_screen/index';

View file

@ -203,7 +203,6 @@ describe('parent pipeline aggs', function () {
});
const searchSource = {};
const request = {};
const customMetricSpy = sinon.spy();
const customMetric = aggConfig.params.customMetric;
@ -211,9 +210,9 @@ describe('parent pipeline aggs', function () {
customMetric.type.params[0].modifyAggConfigOnSearchRequestStart = customMetricSpy;
aggConfig.type.params.forEach(param => {
param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request);
param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource);
});
expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true);
expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true);
});
});
});

View file

@ -145,7 +145,6 @@ describe('sibling pipeline aggs', function () {
init();
const searchSource = {};
const request = {};
const customMetricSpy = sinon.spy();
const customBucketSpy = sinon.spy();
const { customMetric, customBucket } = aggConfig.params;
@ -155,10 +154,10 @@ describe('sibling pipeline aggs', function () {
customBucket.type.params[0].modifyAggConfigOnSearchRequestStart = customBucketSpy;
aggConfig.type.params.forEach(param => {
param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource, request);
param.modifyAggConfigOnSearchRequestStart(aggConfig, searchSource);
});
expect(customMetricSpy.calledWith(customMetric, searchSource, request)).to.be(true);
expect(customBucketSpy.calledWith(customBucket, searchSource, request)).to.be(true);
expect(customMetricSpy.calledWith(customMetric, searchSource)).to.be(true);
expect(customBucketSpy.calledWith(customBucket, searchSource)).to.be(true);
});
});

View file

@ -238,14 +238,14 @@ export class AggConfig {
* @param {Courier.SearchRequest} searchRequest
* @return {Promise<undefined>}
*/
onSearchRequestStart(searchSource: any, searchRequest: any) {
onSearchRequestStart(searchSource: any, options: any) {
if (!this.type) {
return Promise.resolve();
}
return Promise.all(
this.type.params.map((param: any) =>
param.modifyAggConfigOnSearchRequestStart(this, searchSource, searchRequest)
param.modifyAggConfigOnSearchRequestStart(this, searchSource, options)
)
);
}

View file

@ -307,12 +307,10 @@ export class AggConfigs {
return _.find(reqAgg.getResponseAggs(), { id });
}
onSearchRequestStart(searchSource: any, searchRequest: any) {
onSearchRequestStart(searchSource: any, options: any) {
return Promise.all(
// @ts-ignore
this.getRequestAggs().map((agg: AggConfig) =>
agg.onSearchRequestStart(searchSource, searchRequest)
)
this.getRequestAggs().map((agg: AggConfig) => agg.onSearchRequestStart(searchSource, options))
);
}
}

View file

@ -92,7 +92,7 @@ export const histogramBucketAgg = new BucketAggType<IBucketHistogramAggConfig>({
modifyAggConfigOnSearchRequestStart(
aggConfig: IBucketHistogramAggConfig,
searchSource: any,
searchRequest: any
options: any
) {
const field = aggConfig.getField();
const aggBody = field.scripted
@ -111,10 +111,8 @@ export const histogramBucketAgg = new BucketAggType<IBucketHistogramAggConfig>({
},
});
searchRequest.whenAborted(() => childSearchSource.cancelQueued());
return childSearchSource
.fetch()
.fetch(options)
.then((resp: any) => {
aggConfig.setAutoBounds({
min: _.get(resp, 'aggregations.minAgg.value'),

View file

@ -111,9 +111,6 @@ export const termsBucketAgg = new BucketAggType({
if (aggConfig.params.otherBucket) {
const filterAgg = buildOtherBucketAgg(aggConfigs, aggConfig, resp);
if (!filterAgg) return resp;
if (abortSignal) {
abortSignal.addEventListener('abort', () => nestedSearchSource.cancelQueued());
}
nestedSearchSource.setField('aggs', filterAgg);
@ -134,7 +131,7 @@ export const termsBucketAgg = new BucketAggType({
});
request.stats(getRequestInspectorStats(nestedSearchSource));
const response = await nestedSearchSource.fetch();
const response = await nestedSearchSource.fetch({ abortSignal });
request.stats(getResponseInspectorStats(nestedSearchSource, response)).ok({ json: response });
resp = mergeOtherBucketAggResponse(aggConfigs, resp, response, aggConfig, filterAgg());
}

View file

@ -46,18 +46,17 @@ export class BaseParamType implements AggParam {
/**
* A function that will be called before an aggConfig is serialized and sent to ES.
* Allows aggConfig to retrieve values needed for serialization by creating a {SearchRequest}
* Allows aggConfig to retrieve values needed for serialization
* Example usage: an aggregation needs to know the min/max of a field to determine an appropriate interval
*
* @param {AggConfig} aggconfig
* @param {AggConfig} aggConfig
* @param {Courier.SearchSource} searchSource
* @param {Courier.SearchRequest} searchRequest
* @returns {Promise<undefined>|undefined}
*/
modifyAggConfigOnSearchRequestStart: (
aggconfig: AggConfig,
aggConfig: AggConfig,
searchSource?: SearchSource,
searchRequest?: any
options?: any
) => void;
constructor(config: Record<string, any>) {

View file

@ -1,349 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import expect from '@kbn/expect';
import ngMock from 'ng_mock';
import NoDigestPromises from 'test_utils/no_digest_promises';
import { delay } from 'bluebird';
import { CallClientProvider } from '../call_client';
import { RequestStatus } from '../req_status';
import { SearchRequestProvider } from '../request';
import { addSearchStrategy } from '../../search_strategy';
describe('callClient', () => {
NoDigestPromises.activateForSuite();
const ABORTED = RequestStatus.ABORTED;
let SearchRequest;
let callClient;
let fakeSearch;
let searchRequests;
let esRequestDelay;
let esShouldError;
let esPromiseAbortSpy;
const createSearchRequest = (id, overrides = {}, errorHandler = () => {}) => {
const { source: overrideSource, ...rest } = overrides;
const source = {
_flatten: () => Promise.resolve({
index: id
}),
requestIsStopped: () => {},
getField: () => 'indexPattern',
getPreferredSearchStrategyId: () => undefined,
...overrideSource
};
const searchRequest = new SearchRequest({ source, errorHandler, ...rest });
searchRequest.__testId__ = id;
return searchRequest;
};
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.module(function stubEs($provide) {
esRequestDelay = 0;
esShouldError = false;
$provide.service('es', (Promise) => {
fakeSearch = sinon.spy(({ index }) => {
const esPromise = new Promise((resolve, reject) => {
if (esShouldError) {
return reject('fake es error');
}
setTimeout(() => {
resolve(index);
}, esRequestDelay);
});
esPromise.abort = esPromiseAbortSpy = sinon.spy();
return esPromise;
});
return {
search: fakeSearch
};
});
}));
beforeEach(ngMock.inject(Private => {
callClient = Private(CallClientProvider);
SearchRequest = Private(SearchRequestProvider);
}));
describe('basic contract', () => {
it('returns a promise', () => {
searchRequests = [ createSearchRequest() ];
const callingClient = callClient(searchRequests);
expect(callingClient.then).to.be.a('function');
});
it(`resolves the promise with the 'responses' property of the es.search() result`, () => {
searchRequests = [ createSearchRequest(1) ];
return callClient(searchRequests).then(results => {
expect(results).to.eql([1]);
});
});
describe('for failing requests', () => {
beforeEach(() => {
addSearchStrategy({
id: 'fail',
isViable: indexPattern => {
return indexPattern.type === 'fail';
},
search: () => {
return {
searching: Promise.reject(new Error('Search failed')),
failedSearchRequests: [],
abort: () => {},
};
},
});
});
it(`still bubbles up the failure`, () => {
const searchRequestFail1 = createSearchRequest('fail1', {
source: {
getField: () => ({ type: 'fail' }),
},
});
const searchRequestFail2 = createSearchRequest('fail2', {
source: {
getField: () => ({ type: 'fail' }),
},
});
searchRequests = [ searchRequestFail1, searchRequestFail2 ];
return callClient(searchRequests).then(results => {
expect(results).to.eql([
{ error: new Error('Search failed') },
{ error: new Error('Search failed') },
]);
});
});
});
});
describe('implementation', () => {
it('calls searchRequest.whenAborted() as part of setup', async () => {
const whenAbortedSpy = sinon.spy();
const searchRequest = createSearchRequest();
searchRequest.whenAborted = whenAbortedSpy;
searchRequests = [ searchRequest ];
return callClient(searchRequests).then(() => {
expect(whenAbortedSpy.callCount).to.be(1);
});
});
});
describe('aborting at different points in the request lifecycle:', () => {
it('while the search body is being formed rejects with an AbortError', () => {
const searchRequest = createSearchRequest(1, {
source: {
_flatten: () => {
return new Promise(resolve => {
setTimeout(() => {
resolve({});
}, 100);
});
},
requestIsStopped: () => {},
},
});
searchRequests = [ searchRequest ];
const callingClient = callClient(searchRequests);
// Abort the request while the search body is being formed.
setTimeout(() => {
searchRequest.abort();
}, 20);
return callingClient.catch(error => {
expect(error.name).to.be('AbortError');
});
});
it('while the search is in flight rejects with an AbortError', () => {
esRequestDelay = 100;
const searchRequest = createSearchRequest();
searchRequests = [ searchRequest ];
const callingClient = callClient(searchRequests);
// Abort the request while the search is in flight..
setTimeout(() => {
searchRequest.abort();
}, 80);
return callingClient.catch(error => {
expect(error.name).to.be('AbortError');
});
});
});
describe('aborting number of requests:', () => {
it(`aborting all searchRequests rejects with an AbortError`, () => {
const searchRequest1 = createSearchRequest();
const searchRequest2 = createSearchRequest();
searchRequests = [ searchRequest1, searchRequest2 ];
const callingClient = callClient(searchRequests);
searchRequest1.abort();
searchRequest2.abort();
return callingClient.catch(error => {
expect(error.name).to.be('AbortError');
});
});
it(`aborting all searchRequests calls abort() on the promise returned by searchStrategy.search()`, () => {
esRequestDelay = 100;
const searchRequest1 = createSearchRequest();
const searchRequest2 = createSearchRequest();
searchRequests = [ searchRequest1, searchRequest2 ];
const callingClient = callClient(searchRequests);
return Promise.all([
delay(70).then(() => {
// At this point we expect the request to be in flight.
expect(esPromiseAbortSpy.callCount).to.be(0);
searchRequest1.abort();
searchRequest2.abort();
}),
callingClient.catch(() => {
expect(esPromiseAbortSpy.callCount).to.be(1);
}),
]);
});
it('aborting some searchRequests rejects with an AbortError', () => {
const searchRequest1 = createSearchRequest(1);
const searchRequest2 = createSearchRequest(2);
searchRequests = [ searchRequest1, searchRequest2 ];
const callingClient = callClient(searchRequests);
searchRequest2.abort();
return callingClient.catch(error => {
expect(error.name).to.be('AbortError');
});
});
});
describe('searchRequests with multiple searchStrategies map correctly to their responses', () => {
const search = ({ searchRequests }) => {
return {
searching: Promise.resolve(searchRequests.map(searchRequest => searchRequest.__testId__)),
failedSearchRequests: [],
abort: () => {},
};
};
const searchStrategyA = {
id: 'a',
isViable: indexPattern => {
return indexPattern.type === 'a';
},
search,
};
const searchStrategyB = {
id: 'b',
isViable: indexPattern => {
return indexPattern.type === 'b';
},
search,
};
let searchRequestA;
let searchRequestB;
let searchRequestA2;
beforeEach(() => {
addSearchStrategy(searchStrategyA);
addSearchStrategy(searchStrategyB);
searchRequestA = createSearchRequest('a', {
source: {
getField: () => ({ type: 'a' }),
getSearchStrategyForSearchRequest: () => {},
getPreferredSearchStrategyId: () => {},
},
});
searchRequestB = createSearchRequest('b', {
source: {
getField: () => ({ type: 'b' }),
getSearchStrategyForSearchRequest: () => {},
getPreferredSearchStrategyId: () => {},
},
});
searchRequestA2 = createSearchRequest('a2', {
source: {
getField: () => ({ type: 'a' }),
getSearchStrategyForSearchRequest: () => {},
getPreferredSearchStrategyId: () => {},
},
});
});
it('if the searchRequests are reordered by the searchStrategies', () => {
// Add requests in an order which will be reordered by the strategies.
searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ];
const callingClient = callClient(searchRequests);
return callingClient.then(results => {
expect(results).to.eql(['a', 'b', 'a2']);
});
});
it('if one is aborted after being provided', () => {
// Add requests in an order which will be reordered by the strategies.
searchRequests = [ searchRequestA, searchRequestB, searchRequestA2 ];
const callingClient = callClient(searchRequests);
searchRequestA2.abort();
return callingClient.then(results => {
expect(results).to.eql(['a', 'b', ABORTED]);
});
});
it(`if one is already aborted when it's provided`, () => {
searchRequests = [ searchRequestA, searchRequestB, ABORTED, searchRequestA2 ];
const callingClient = callClient(searchRequests);
return callingClient.then(results => {
expect(results).to.eql(['a', 'b', ABORTED, 'a2']);
});
});
});
});

View file

@ -1,122 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import sinon from 'sinon';
import expect from '@kbn/expect';
import ngMock from 'ng_mock';
import { CallClientProvider } from '../call_client';
import { CallResponseHandlersProvider } from '../call_response_handlers';
import { ContinueIncompleteProvider } from '../continue_incomplete';
import { FetchNowProvider } from '../fetch_now';
function mockRequest() {
return {
strategy: 'mock',
started: true,
aborted: false,
handleFailure: sinon.spy(),
retry: sinon.spy(function () { return this; }),
continue: sinon.spy(function () { return this; }),
start: sinon.spy(function () { return this; })
};
}
describe('FetchNowProvider', () => {
let Promise;
let $rootScope;
let fetchNow;
let request;
let requests;
let fakeResponses;
beforeEach(ngMock.module('kibana', (PrivateProvider) => {
function FakeResponsesProvider(Promise) {
fakeResponses = sinon.spy(function () {
return Promise.map(requests, mockRequest => {
return { mockRequest };
});
});
return fakeResponses;
}
PrivateProvider.swap(CallClientProvider, FakeResponsesProvider);
PrivateProvider.swap(CallResponseHandlersProvider, FakeResponsesProvider);
PrivateProvider.swap(ContinueIncompleteProvider, FakeResponsesProvider);
}));
beforeEach(ngMock.inject((Private, $injector) => {
$rootScope = $injector.get('$rootScope');
Promise = $injector.get('Promise');
fetchNow = Private(FetchNowProvider);
request = mockRequest();
requests = [ request ];
}));
describe('when request has not started', () => {
beforeEach(() => requests.forEach(req => req.started = false));
it('starts request', () => {
fetchNow(requests);
expect(request.start.called).to.be(true);
expect(request.continue.called).to.be(false);
});
it('waits for returned promise from start() to be fulfilled', () => {
request.start = sinon.stub().returns(Promise.resolve(request));
fetchNow(requests);
expect(request.start.callCount).to.be(1);
expect(fakeResponses.callCount).to.be(0);
$rootScope.$apply();
expect(fakeResponses.callCount).to.be(3);
});
it('invokes request failure handler if starting fails', () => {
request.start = sinon.stub().returns(Promise.reject('some error'));
fetchNow(requests);
$rootScope.$apply();
sinon.assert.calledWith(request.handleFailure, 'some error');
});
});
describe('when request has already started', () => {
it('continues request', () => {
fetchNow(requests);
expect(request.start.called).to.be(false);
expect(request.continue.called).to.be(true);
});
it('waits for returned promise to be fulfilled', () => {
request.continue = sinon.stub().returns(Promise.resolve(request));
fetchNow(requests);
expect(request.continue.callCount).to.be(1);
expect(fakeResponses.callCount).to.be(0);
$rootScope.$apply();
expect(fakeResponses.callCount).to.be(3);
});
it('invokes request failure handler if continuing fails', () => {
request.continue = sinon.stub().returns(Promise.reject('some error'));
fetchNow(requests);
$rootScope.$apply();
sinon.assert.calledWith(request.handleFailure, 'some error');
});
});
});

View file

@ -17,187 +17,37 @@
* under the License.
*/
import { ErrorAllowExplicitIndexProvider } from '../../error_allow_explicit_index';
import { assignSearchRequestsToSearchStrategies } from '../search_strategy';
import { IsRequestProvider } from './is_request';
import { RequestStatus } from './req_status';
import { SerializeFetchParamsProvider } from './request/serialize_fetch_params';
import { i18n } from '@kbn/i18n';
import { createDefer } from 'ui/promises';
import { groupBy } from 'lodash';
import { getSearchStrategyForSearchRequest, getSearchStrategyById } from '../search_strategy';
import { handleResponse } from './handle_response';
export function CallClientProvider(Private, Promise, es, config, sessionId, esShardTimeout) {
const errorAllowExplicitIndex = Private(ErrorAllowExplicitIndexProvider);
const isRequest = Private(IsRequestProvider);
const serializeFetchParams = Private(SerializeFetchParamsProvider);
export function callClient(searchRequests, requestsOptions = [], { es, config, esShardTimeout } = {}) {
// Correlate the options with the request that they're associated with
const requestOptionEntries = searchRequests.map((request, i) => [request, requestsOptions[i]]);
const requestOptionsMap = new Map(requestOptionEntries);
const ABORTED = RequestStatus.ABORTED;
// Group the requests by the strategy used to search that specific request
const searchStrategyMap = groupBy(searchRequests, (request, i) => {
const searchStrategy = getSearchStrategyForSearchRequest(request, requestsOptions[i]);
return searchStrategy.id;
});
function callClient(searchRequests) {
// get the actual list of requests that we will be fetching
const requestsToFetch = searchRequests.filter(isRequest);
let requestsToFetchCount = requestsToFetch.length;
if (requestsToFetchCount === 0) {
return Promise.resolve([]);
}
// This is how we'll provide the consumer with search responses. Resolved by
// respondToSearchRequests.
const defer = createDefer(Promise);
const abortableSearches = [];
let areAllSearchRequestsAborted = false;
// When we traverse our search requests and send out searches, some of them may fail. We'll
// store those that don't fail here.
const activeSearchRequests = [];
// Respond to each searchRequest with the response or ABORTED.
const respondToSearchRequests = (responsesInOriginalRequestOrder = []) => {
// We map over searchRequests because if we were originally provided an ABORTED
// request then we'll return that value.
return Promise.map(searchRequests, function (searchRequest, searchRequestIndex) {
if (searchRequest.aborted) {
return ABORTED;
}
const status = searchRequests[searchRequestIndex];
if (status === ABORTED) {
return ABORTED;
}
const activeSearchRequestIndex = activeSearchRequests.indexOf(searchRequest);
const isFailedSearchRequest = activeSearchRequestIndex === -1;
if (isFailedSearchRequest) {
return ABORTED;
}
return responsesInOriginalRequestOrder[searchRequestIndex];
})
.then(
(res) => defer.resolve(res),
(err) => defer.reject(err)
);
};
// handle a request being aborted while being fetched
const requestWasAborted = Promise.method(function (searchRequest, index) {
if (searchRequests[index] === ABORTED) {
defer.reject(new Error(
i18n.translate('common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage', {
defaultMessage: 'Request was aborted twice?',
})
));
}
requestsToFetchCount--;
if (requestsToFetchCount !== 0) {
// We can't resolve early unless all searchRequests have been aborted.
return;
}
abortableSearches.forEach(({ abort }) => {
abort();
});
areAllSearchRequestsAborted = true;
return respondToSearchRequests();
// Execute each search strategy with the group of requests, but return the responses in the same
// order in which they were received. We use a map to correlate the original request with its
// response.
const requestResponseMap = new Map();
Object.keys(searchStrategyMap).forEach(searchStrategyId => {
const searchStrategy = getSearchStrategyById(searchStrategyId);
const requests = searchStrategyMap[searchStrategyId];
const { searching, abort } = searchStrategy.search({ searchRequests: requests, es, config, esShardTimeout });
requests.forEach((request, i) => {
const response = searching.then(results => handleResponse(request, results[i]));
const { abortSignal } = requestOptionsMap.get(request) || {};
if (abortSignal) abortSignal.addEventListener('abort', abort);
requestResponseMap.set(request, response);
});
// attach abort handlers, close over request index
searchRequests.forEach(function (searchRequest, index) {
if (!isRequest(searchRequest)) {
return;
}
searchRequest.whenAborted(function () {
requestWasAborted(searchRequest, index).catch(defer.reject);
});
});
const searchStrategiesWithRequests = assignSearchRequestsToSearchStrategies(requestsToFetch);
// We're going to create a new async context here, so that the logic within it can execute
// asynchronously after we've returned a reference to defer.promise.
Promise.resolve().then(async () => {
// Execute each request using its search strategy.
for (let i = 0; i < searchStrategiesWithRequests.length; i++) {
const searchStrategyWithSearchRequests = searchStrategiesWithRequests[i];
const { searchStrategy, searchRequests } = searchStrategyWithSearchRequests;
const {
searching,
abort,
failedSearchRequests,
} = await searchStrategy.search({ searchRequests, es, Promise, serializeFetchParams, config, sessionId, esShardTimeout });
// Collect searchRequests which have successfully been sent.
searchRequests.forEach(searchRequest => {
if (failedSearchRequests.includes(searchRequest)) {
return;
}
activeSearchRequests.push(searchRequest);
});
abortableSearches.push({
searching,
abort,
requestsCount: searchRequests.length,
});
}
try {
// The request was aborted while we were doing the above logic.
if (areAllSearchRequestsAborted) {
return;
}
const segregatedResponses = await Promise.all(abortableSearches.map(async ({ searching, requestsCount }) => {
return searching.catch((e) => {
// Duplicate errors so that they correspond to the original requests.
return new Array(requestsCount).fill({ error: e });
});
}));
// Assigning searchRequests to strategies means that the responses come back in a different
// order than the original searchRequests. So we'll put them back in order so that we can
// use the order to associate each response with the original request.
const responsesInOriginalRequestOrder = new Array(searchRequests.length);
segregatedResponses.forEach((responses, strategyIndex) => {
responses.forEach((response, responseIndex) => {
const searchRequest = searchStrategiesWithRequests[strategyIndex].searchRequests[responseIndex];
const requestIndex = searchRequests.indexOf(searchRequest);
responsesInOriginalRequestOrder[requestIndex] = response;
});
});
await respondToSearchRequests(responsesInOriginalRequestOrder);
} catch(error) {
if (errorAllowExplicitIndex.test(error)) {
return errorAllowExplicitIndex.takeover();
}
defer.reject(error);
}
});
// Return the promise which acts as our vehicle for providing search responses to the consumer.
// However, if there are any errors, notify the searchRequests of them *instead* of bubbling
// them up to the consumer.
return defer.promise.catch((err) => {
// By returning the return value of this catch() without rethrowing the error, we delegate
// error-handling to the searchRequest instead of the consumer.
searchRequests.forEach((searchRequest, index) => {
if (searchRequests[index] !== ABORTED) {
searchRequest.handleFailure(err);
}
});
});
}
return callClient;
}, []);
return searchRequests.map(request => requestResponseMap.get(request));
}

View file

@ -0,0 +1,128 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { callClient } from './call_client';
import { handleResponse } from './handle_response';
const mockResponses = [{}, {}];
const mockAbortFns = [jest.fn(), jest.fn()];
const mockSearchFns = [
jest.fn(({ searchRequests }) => ({
searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[0])),
abort: mockAbortFns[0]
})),
jest.fn(({ searchRequests }) => ({
searching: Promise.resolve(Array(searchRequests.length).fill(mockResponses[1])),
abort: mockAbortFns[1]
}))
];
const mockSearchStrategies = mockSearchFns.map((search, i) => ({ search, id: i }));
jest.mock('./handle_response', () => ({
handleResponse: jest.fn((request, response) => response)
}));
jest.mock('../search_strategy', () => ({
getSearchStrategyForSearchRequest: request => mockSearchStrategies[request._searchStrategyId],
getSearchStrategyById: id => mockSearchStrategies[id]
}));
describe('callClient', () => {
beforeEach(() => {
handleResponse.mockClear();
mockAbortFns.forEach(fn => fn.mockClear());
mockSearchFns.forEach(fn => fn.mockClear());
});
test('Executes each search strategy with its group of matching requests', () => {
const searchRequests = [{
_searchStrategyId: 0
}, {
_searchStrategyId: 1
}, {
_searchStrategyId: 0
}, {
_searchStrategyId: 1
}];
callClient(searchRequests);
expect(mockSearchFns[0]).toBeCalled();
expect(mockSearchFns[0].mock.calls[0][0].searchRequests).toEqual([searchRequests[0], searchRequests[2]]);
expect(mockSearchFns[1]).toBeCalled();
expect(mockSearchFns[1].mock.calls[0][0].searchRequests).toEqual([searchRequests[1], searchRequests[3]]);
});
test('Passes the additional arguments it is given to the search strategy', () => {
const searchRequests = [{
_searchStrategyId: 0
}];
const args = { es: {}, config: {}, esShardTimeout: 0 };
callClient(searchRequests, [], args);
expect(mockSearchFns[0]).toBeCalled();
expect(mockSearchFns[0].mock.calls[0][0]).toEqual({ searchRequests, ...args });
});
test('Returns the responses in the original order', async () => {
const searchRequests = [{
_searchStrategyId: 1
}, {
_searchStrategyId: 0
}];
const responses = await Promise.all(callClient(searchRequests));
expect(responses).toEqual([mockResponses[1], mockResponses[0]]);
});
test('Calls handleResponse with each request and response', async () => {
const searchRequests = [{
_searchStrategyId: 0
}, {
_searchStrategyId: 1
}];
const responses = callClient(searchRequests);
await Promise.all(responses);
expect(handleResponse).toBeCalledTimes(2);
expect(handleResponse).toBeCalledWith(searchRequests[0], mockResponses[0]);
expect(handleResponse).toBeCalledWith(searchRequests[1], mockResponses[1]);
});
test('If passed an abortSignal, calls abort on the strategy if the signal is aborted', () => {
const searchRequests = [{
_searchStrategyId: 0
}, {
_searchStrategyId: 1
}];
const abortController = new AbortController();
const requestOptions = [{
abortSignal: abortController.signal
}];
callClient(searchRequests, requestOptions);
abortController.abort();
expect(mockAbortFns[0]).toBeCalled();
expect(mockAbortFns[1]).not.toBeCalled();
});
});

View file

@ -1,104 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import { i18n } from '@kbn/i18n';
import { EuiSpacer } from '@elastic/eui';
import { toastNotifications } from '../../notify';
import { RequestFailure } from './errors';
import { RequestStatus } from './req_status';
import { SearchError } from '../search_strategy/search_error';
import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button';
export function CallResponseHandlersProvider(Promise) {
const ABORTED = RequestStatus.ABORTED;
const INCOMPLETE = RequestStatus.INCOMPLETE;
function callResponseHandlers(searchRequests, responses) {
return Promise.map(searchRequests, function (searchRequest, index) {
if (searchRequest === ABORTED || searchRequest.aborted) {
return ABORTED;
}
const response = responses[index];
if (response.timed_out) {
toastNotifications.addWarning({
title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', {
defaultMessage: 'Data might be incomplete because your request timed out',
}),
});
}
if (response._shards && response._shards.failed) {
const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', {
defaultMessage: '{shardsFailed} of {shardsTotal} shards failed',
values: {
shardsFailed: response._shards.failed,
shardsTotal: response._shards.total,
},
});
const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', {
defaultMessage: 'The data you are seeing might be incomplete or wrong.',
});
const text = (
<>
{description}
<EuiSpacer size="s"/>
<ShardFailureOpenModalButton
request={searchRequest.fetchParams.body}
response={response}
title={title}
/>
</>
);
toastNotifications.addWarning({
title,
text,
});
}
function progress() {
if (searchRequest.isIncomplete()) {
return INCOMPLETE;
}
searchRequest.complete();
return response;
}
if (response.error) {
if (searchRequest.filterError(response)) {
return progress();
} else {
return searchRequest.handleFailure(
response.error instanceof SearchError
? response.error
: new RequestFailure(null, response)
);
}
}
return Promise.try(() => searchRequest.handleResponse(response)).then(progress);
});
}
return callResponseHandlers;
}

View file

@ -1,51 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { RequestStatus } from './req_status';
export function ContinueIncompleteProvider() {
const INCOMPLETE = RequestStatus.INCOMPLETE;
function continueIncompleteRequests(searchRequests, responses, fetchSearchResults) {
const incompleteSearchRequests = [];
responses.forEach(function (response, index) {
if (response === INCOMPLETE) {
incompleteSearchRequests.push(searchRequests[index]);
}
});
if (!incompleteSearchRequests.length) {
return responses;
}
return fetchSearchResults(incompleteSearchRequests)
.then(function (completedResponses) {
return responses.map(function (prevResponse) {
if (prevResponse !== INCOMPLETE) {
return prevResponse;
}
return completedResponses.shift();
});
});
}
return continueIncompleteRequests;
}

View file

@ -1,124 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { fatalError } from '../../notify';
import { CallClientProvider } from './call_client';
import { CallResponseHandlersProvider } from './call_response_handlers';
import { ContinueIncompleteProvider } from './continue_incomplete';
import { RequestStatus } from './req_status';
import { i18n } from '@kbn/i18n';
/**
* Fetch now provider should be used if you want the results searched and returned immediately.
* This can be slightly inefficient if a large number of requests are queued up, we can batch these
* by using fetchSoon. This introduces a slight delay which allows other requests to queue up before
* sending out requests in a batch.
*
* @param Private
* @param Promise
* @return {fetchNow}
* @constructor
*/
export function FetchNowProvider(Private, Promise) {
// core tasks
const callClient = Private(CallClientProvider);
const callResponseHandlers = Private(CallResponseHandlersProvider);
const continueIncomplete = Private(ContinueIncompleteProvider);
const ABORTED = RequestStatus.ABORTED;
const INCOMPLETE = RequestStatus.INCOMPLETE;
function fetchNow(searchRequests) {
return fetchSearchResults(searchRequests.map(function (searchRequest) {
if (!searchRequest.started) {
return searchRequest;
}
return searchRequest.retry();
}))
.catch(error => {
// If any errors occur after the search requests have resolved, then we kill Kibana.
fatalError(error, 'Courier fetch');
});
}
function fetchSearchResults(searchRequests) {
function replaceAbortedRequests() {
searchRequests = searchRequests.map(searchRequest => {
if (searchRequest.aborted) {
return ABORTED;
}
return searchRequest;
});
}
replaceAbortedRequests();
return startRequests(searchRequests)
.then(function () {
replaceAbortedRequests();
return callClient(searchRequests)
.catch(() => {
// Silently swallow errors that result from search requests so the consumer can surface
// them as notifications instead of courier forcing fatal errors.
});
})
.then(function (responses) {
replaceAbortedRequests();
return callResponseHandlers(searchRequests, responses);
})
.then(function (responses) {
replaceAbortedRequests();
return continueIncomplete(searchRequests, responses, fetchSearchResults);
})
.then(function (responses) {
replaceAbortedRequests();
return responses.map(function (resp) {
switch (resp) {
case ABORTED:
return null;
case INCOMPLETE:
throw new Error(
i18n.translate('common.ui.courier.fetch.failedToClearRequestErrorMessage', {
defaultMessage: 'Failed to clear incomplete or duplicate request from responses.',
})
);
default:
return resp;
}
});
});
}
function startRequests(searchRequests) {
return Promise.map(searchRequests, function (searchRequest) {
if (searchRequest === ABORTED) {
return searchRequest;
}
return new Promise(function (resolve) {
const action = searchRequest.started ? searchRequest.continue : searchRequest.start;
resolve(action.call(searchRequest));
})
.catch(err => searchRequest.handleFailure(err));
});
}
return fetchNow;
}

View file

@ -17,41 +17,54 @@
* under the License.
*/
import _ from 'lodash';
import { searchRequestQueue } from '../search_request_queue';
import { FetchNowProvider } from './fetch_now';
import { callClient } from './call_client';
/**
* This is usually the right fetch provider to use, rather than FetchNowProvider, as this class introduces
* a slight delay in the request process to allow multiple requests to queue up (e.g. when a dashboard
* is loading).
* This function introduces a slight delay in the request process to allow multiple requests to queue
* up (e.g. when a dashboard is loading).
*/
export function FetchSoonProvider(Private, Promise, config) {
const fetchNow = Private(FetchNowProvider);
const fetch = () => fetchNow(searchRequestQueue.getPending());
const debouncedFetch = _.debounce(fetch, {
wait: 10,
maxWait: 50
});
/**
* Fetch a list of requests
* @param {array} requests - the requests to fetch
* @async
*/
this.fetchSearchRequests = (requests) => {
requests.forEach(req => req._setFetchRequested());
config.get('courier:batchSearches') ? debouncedFetch() : fetch();
return Promise.all(requests.map(req => req.getCompletePromise()));
};
/**
* Return a promise that resembles the success of the fetch completing so we can execute
* logic based on this state change. Individual errors are routed to their respective requests.
*/
this.fetchQueued = () => {
return this.fetchSearchRequests(searchRequestQueue.getStartable());
};
export async function fetchSoon(request, options, { es, config, esShardTimeout }) {
const delay = config.get('courier:batchSearches') ? 50 : 0;
return delayedFetch(request, options, { es, config, esShardTimeout }, delay);
}
/**
* Delays executing a function for a given amount of time, and returns a promise that resolves
* with the result.
* @param fn The function to invoke
* @param ms The number of milliseconds to wait
* @return Promise<any> A promise that resolves with the result of executing the function
*/
function delay(fn, ms) {
return new Promise(resolve => {
setTimeout(() => resolve(fn()), ms);
});
}
// The current batch/queue of requests to fetch
let requestsToFetch = [];
let requestOptions = [];
// The in-progress fetch (if there is one)
let fetchInProgress = null;
/**
* Delay fetching for a given amount of time, while batching up the requests to be fetched.
* Returns a promise that resolves with the response for the given request.
* @param request The request to fetch
* @param ms The number of milliseconds to wait (and batch requests)
* @return Promise<SearchResponse> The response for the given request
*/
async function delayedFetch(request, options, { es, config, esShardTimeout }, ms) {
const i = requestsToFetch.length;
requestsToFetch = [...requestsToFetch, request];
requestOptions = [...requestOptions, options];
const responses = await (fetchInProgress = fetchInProgress || delay(() => {
const response = callClient(requestsToFetch, requestOptions, { es, config, esShardTimeout });
requestsToFetch = [];
requestOptions = [];
fetchInProgress = null;
return response;
}, ms));
return responses[i];
}

View file

@ -0,0 +1,140 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { fetchSoon } from './fetch_soon';
import { callClient } from './call_client';
function getMockConfig(config) {
const entries = Object.entries(config);
return new Map(entries);
}
const mockResponses = {
'foo': {},
'bar': {},
'baz': {},
};
jest.useFakeTimers();
jest.mock('./call_client', () => ({
callClient: jest.fn(requests => {
// Allow a request object to specify which mockResponse it wants to receive (_mockResponseId)
// in addition to how long to simulate waiting before returning a response (_waitMs)
const responses = requests.map(request => {
const waitMs = requests.reduce((total, request) => request._waitMs || 0, 0);
return new Promise(resolve => {
resolve(mockResponses[request._mockResponseId]);
}, waitMs);
});
return Promise.resolve(responses);
})
}));
describe('fetchSoon', () => {
beforeEach(() => {
callClient.mockClear();
});
test('should delay by 0ms if config is set to not batch searches', () => {
const config = getMockConfig({
'courier:batchSearches': false
});
const request = {};
const options = {};
fetchSoon(request, options, { config });
expect(callClient).not.toBeCalled();
jest.advanceTimersByTime(0);
expect(callClient).toBeCalled();
});
test('should delay by 50ms if config is set to batch searches', () => {
const config = getMockConfig({
'courier:batchSearches': true
});
const request = {};
const options = {};
fetchSoon(request, options, { config });
expect(callClient).not.toBeCalled();
jest.advanceTimersByTime(0);
expect(callClient).not.toBeCalled();
jest.advanceTimersByTime(50);
expect(callClient).toBeCalled();
});
test('should send a batch of requests to callClient', () => {
const config = getMockConfig({
'courier:batchSearches': true
});
const requests = [{ foo: 1 }, { foo: 2 }];
const options = [{ bar: 1 }, { bar: 2 }];
requests.forEach((request, i) => {
fetchSoon(request, options[i], { config });
});
jest.advanceTimersByTime(50);
expect(callClient).toBeCalledTimes(1);
expect(callClient.mock.calls[0][0]).toEqual(requests);
expect(callClient.mock.calls[0][1]).toEqual(options);
});
test('should return the response to the corresponding call for multiple batched requests', async () => {
const config = getMockConfig({
'courier:batchSearches': true
});
const requests = [{ _mockResponseId: 'foo' }, { _mockResponseId: 'bar' }];
const promises = requests.map(request => {
return fetchSoon(request, {}, { config });
});
jest.advanceTimersByTime(50);
const results = await Promise.all(promises);
expect(results).toEqual([mockResponses.foo, mockResponses.bar]);
});
test('should wait for the previous batch to start before starting a new batch', () => {
const config = getMockConfig({
'courier:batchSearches': true
});
const firstBatch = [{ foo: 1 }, { foo: 2 }];
const secondBatch = [{ bar: 1 }, { bar: 2 }];
firstBatch.forEach(request => {
fetchSoon(request, {}, { config });
});
jest.advanceTimersByTime(50);
secondBatch.forEach(request => {
fetchSoon(request, {}, { config });
});
expect(callClient).toBeCalledTimes(1);
expect(callClient.mock.calls[0][0]).toEqual(firstBatch);
jest.advanceTimersByTime(50);
expect(callClient).toBeCalledTimes(2);
expect(callClient.mock.calls[1][0]).toEqual(secondBatch);
});
});

View file

@ -17,6 +17,8 @@
* under the License.
*/
const sessionId = Date.now();
export function getMSearchParams(config) {
return {
rest_total_hits_as_int: true,
@ -25,13 +27,13 @@ export function getMSearchParams(config) {
};
}
export function getSearchParams(config, sessionId, esShardTimeout) {
export function getSearchParams(config, esShardTimeout) {
return {
rest_total_hits_as_int: true,
ignore_unavailable: true,
ignore_throttled: getIgnoreThrottled(config),
max_concurrent_shard_requests: getMaxConcurrentShardRequests(config),
preference: getPreference(config, sessionId),
preference: getPreference(config),
timeout: getTimeout(esShardTimeout),
};
}
@ -45,7 +47,7 @@ export function getMaxConcurrentShardRequests(config) {
return maxConcurrentShardRequests > 0 ? maxConcurrentShardRequests : undefined;
}
export function getPreference(config, sessionId) {
export function getPreference(config) {
const setRequestPreference = config.get('courier:setRequestPreference');
if (setRequestPreference === 'sessionId') return sessionId;
return setRequestPreference === 'custom' ? config.get('courier:customRequestPreference') : undefined;

View file

@ -99,10 +99,10 @@ describe('getSearchParams', () => {
test('includes timeout according to esShardTimeout if greater than 0', () => {
const config = getConfigStub();
let searchParams = getSearchParams(config, null, 0);
let searchParams = getSearchParams(config, 0);
expect(searchParams.timeout).toBe(undefined);
searchParams = getSearchParams(config, null, 100);
searchParams = getSearchParams(config, 100);
expect(searchParams.timeout).toBe('100ms');
});
});

View file

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import { toastNotifications } from '../../notify/toasts';
import { i18n } from '@kbn/i18n';
import { EuiSpacer } from '@elastic/eui';
import { ShardFailureOpenModalButton } from './components/shard_failure_open_modal_button';
export function handleResponse(request, response) {
if (response.timed_out) {
toastNotifications.addWarning({
title: i18n.translate('common.ui.courier.fetch.requestTimedOutNotificationMessage', {
defaultMessage: 'Data might be incomplete because your request timed out',
}),
});
}
if (response._shards && response._shards.failed) {
const title = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationMessage', {
defaultMessage: '{shardsFailed} of {shardsTotal} shards failed',
values: {
shardsFailed: response._shards.failed,
shardsTotal: response._shards.total,
},
});
const description = i18n.translate('common.ui.courier.fetch.shardsFailedNotificationDescription', {
defaultMessage: 'The data you are seeing might be incomplete or wrong.',
});
const text = (
<>
{description}
<EuiSpacer size="s"/>
<ShardFailureOpenModalButton
request={request.body}
response={response}
title={title}
/>
</>
);
toastNotifications.addWarning({
title,
text,
});
}
return response;
}

View file

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { handleResponse } from './handle_response';
import { toastNotifications } from '../../notify/toasts';
jest.mock('../../notify/toasts', () => {
return {
toastNotifications: {
addWarning: jest.fn()
}
};
});
jest.mock('@kbn/i18n', () => {
return {
i18n: {
translate: (id, { defaultMessage }) => defaultMessage
}
};
});
describe('handleResponse', () => {
beforeEach(() => {
toastNotifications.addWarning.mockReset();
});
test('should notify if timed out', () => {
const request = { body: {} };
const response = {
timed_out: true
};
const result = handleResponse(request, response);
expect(result).toBe(response);
expect(toastNotifications.addWarning).toBeCalled();
expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('request timed out');
});
test('should notify if shards failed', () => {
const request = { body: {} };
const response = {
_shards: {
failed: true
}
};
const result = handleResponse(request, response);
expect(result).toBe(response);
expect(toastNotifications.addWarning).toBeCalled();
expect(toastNotifications.addWarning.mock.calls[0][0].title).toMatch('shards failed');
});
test('returns the response', () => {
const request = {};
const response = {};
const result = handleResponse(request, response);
expect(result).toBe(response);
});
});

View file

@ -17,5 +17,5 @@
* under the License.
*/
export { FetchSoonProvider } from './fetch_soon';
export * from './fetch_soon';
export * from './get_search_params';

View file

@ -1,28 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SearchRequestProvider } from './request';
export function IsRequestProvider(Private) {
const SearchRequest = Private(SearchRequestProvider);
return function isRequest(obj) {
return obj instanceof SearchRequest;
};
}

View file

@ -1,23 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const RequestStatus = {
ABORTED: 'aborted',
INCOMPLETE: 'incomplete',
};

View file

@ -1,20 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { SearchRequestProvider } from './search_request';

View file

@ -1,78 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import ngMock from 'ng_mock';
import sinon from 'sinon';
import expect from '@kbn/expect';
import { SearchRequestProvider } from '../search_request';
import { searchRequestQueue } from '../../../../search_request_queue';
describe('ui/courier/fetch search request', () => {
beforeEach(ngMock.module('kibana'));
afterEach(() => {
searchRequestQueue.removeAll();
});
it('throws exception when created without errorHandler', ngMock.inject((Private) => {
const SearchReq = Private(SearchRequestProvider);
let caughtError = false;
try {
new SearchReq({ source: {} });
} catch(error) {
caughtError = true;
}
expect(caughtError).to.be(true);
}));
describe('start', () => {
it('calls this.source.requestIsStarting(request)', ngMock.inject((Private) => {
const SearchReq = Private(SearchRequestProvider);
const spy = sinon.spy(() => Promise.resolve());
const source = { requestIsStarting: spy };
const req = new SearchReq({ source, errorHandler: () => {} });
expect(req.start()).to.have.property('then').a('function');
sinon.assert.calledOnce(spy);
sinon.assert.calledWithExactly(spy, req);
}));
});
describe('clone', () => {
it('returns a search request with identical constructor arguments', ngMock.inject((Private) => {
const SearchRequest = Private(SearchRequestProvider);
const source = {};
const errorHandler = () => {};
const defer = {};
const originalRequest = new SearchRequest({ source, errorHandler, defer });
const clonedRequest = originalRequest.clone();
expect(clonedRequest).not.to.be(originalRequest);
expect(clonedRequest.source).to.be(source);
expect(clonedRequest.errorHandler).to.be(errorHandler);
expect(clonedRequest.defer).to.be(defer);
}));
});
});

View file

@ -1,20 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { SearchRequestProvider } from './search_request';

View file

@ -1,205 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import moment from 'moment';
import { searchRequestQueue } from '../../../search_request_queue';
import { createDefer } from 'ui/promises';
import { i18n } from '@kbn/i18n';
export function SearchRequestProvider(Promise) {
class SearchRequest {
constructor({ source, defer, errorHandler }) {
if (!errorHandler) {
throw new Error(
i18n.translate('common.ui.courier.fetch.requireErrorHandlerErrorMessage', {
defaultMessage: '{errorHandler} is required',
values: { errorHandler: 'errorHandler' }
})
);
}
this.errorHandler = errorHandler;
this.source = source;
this.defer = defer || createDefer(Promise);
this.abortedDefer = createDefer(Promise);
this.type = 'search';
// Track execution time.
this.moment = undefined;
this.ms = undefined;
// Lifecycle state.
this.started = false;
this.stopped = false;
this._isFetchRequested = false;
searchRequestQueue.add(this);
}
/**
* Called by the searchPoll to find requests that should be sent to the
* fetchSoon module. When a module is sent to fetchSoon its _isFetchRequested flag
* is set, and this consults that flag so requests are not send to fetchSoon
* multiple times.
*
* @return {Boolean}
*/
canStart() {
if (this.source._fetchDisabled) {
return false;
}
if (this.stopped) {
return false;
}
if (this._isFetchRequested) {
return false;
}
return true;
}
/**
* Used to find requests that were previously sent to the fetchSoon module but
* have not been started yet, so they can be started.
*
* @return {Boolean}
*/
isFetchRequestedAndPending() {
if (this.started) {
return false;
}
return this._isFetchRequested;
}
/**
* Called by the fetchSoon module when this request has been sent to
* be fetched. At that point the request is somewhere between `ready-to-start`
* and `started`. The fetch module then waits a short period of time to
* allow requests to build up in the request queue, and then immediately
* fetches all requests that return true from `isFetchRequestedAndPending()`
*
* @return {undefined}
*/
_setFetchRequested() {
this._isFetchRequested = true;
}
start() {
if (this.started) {
throw new TypeError(
i18n.translate('common.ui.courier.fetch.unableStartRequestErrorMessage', {
defaultMessage: 'Unable to start request because it has already started',
})
);
}
this.started = true;
this.moment = moment();
return this.source.requestIsStarting(this);
}
getFetchParams() {
return this.source._flatten();
}
filterError() {
return false;
}
handleResponse(resp) {
this.success = true;
this.resp = resp;
}
handleFailure(error) {
this.success = false;
this.resp = error;
this.resp = (error && error.resp) || error;
return this.errorHandler(this, error);
}
isIncomplete() {
return false;
}
continue() {
throw new Error(
i18n.translate('common.ui.courier.fetch.unableContinueRequestErrorMessage', {
defaultMessage: 'Unable to continue {type} request',
values: { type: this.type }
})
);
}
retry() {
const clone = this.clone();
this.abort();
return clone;
}
_markStopped() {
if (this.stopped) return;
this.stopped = true;
this.source.requestIsStopped(this);
searchRequestQueue.remove(this);
}
abort() {
this._markStopped();
this.aborted = true;
const error = new Error('The request was aborted.');
error.name = 'AbortError';
this.abortedDefer.resolve(error);
this.abortedDefer = null;
this.defer.reject(error);
this.defer = null;
}
whenAborted(cb) {
this.abortedDefer.promise.then(cb);
}
complete() {
this._markStopped();
this.ms = this.moment.diff() * -1;
this.defer.resolve(this.resp);
}
getCompletePromise() {
return this.defer.promise;
}
getCompleteOrAbortedPromise() {
return Promise.race([ this.defer.promise, this.abortedDefer.promise ]);
}
clone = () => {
const { source, defer, errorHandler } = this;
return new SearchRequest({ source, defer, errorHandler });
};
}
return SearchRequest;
}

View file

@ -1,20 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { SerializeFetchParamsProvider } from './serialize_fetch_params_provider';

View file

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { getPreference, getTimeout } from '../../get_search_params';
/**
*
* @param requestsFetchParams {Array.<Object>}
* @param Promise
* @param sessionId
* @return {Promise.<string>}
*/
export function serializeFetchParams(
requestsFetchParams,
Promise,
sessionId,
config,
esShardTimeout) {
const promises = requestsFetchParams.map(function (fetchParams) {
return Promise.resolve(fetchParams.index)
.then(function (indexPattern) {
const body = {
timeout: getTimeout(esShardTimeout),
...fetchParams.body || {},
};
const index = (indexPattern && indexPattern.title) ? indexPattern.title : indexPattern;
const header = {
index,
search_type: fetchParams.search_type,
ignore_unavailable: true,
preference: getPreference(config, sessionId)
};
return `${JSON.stringify(header)}\n${JSON.stringify(body)}`;
});
});
return Promise.all(promises).then(function (requests) {
return requests.join('\n') + '\n';
});
}

View file

@ -1,152 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { serializeFetchParams } from './serialize_fetch_params';
import _ from 'lodash';
const DEFAULT_SESSION_ID = '1';
function serializeFetchParamsWithDefaults(paramOverrides) {
const paramDefaults = {
requestFetchParams: [],
Promise,
sessionId: DEFAULT_SESSION_ID,
config: {
get: () => {
return 'sessionId';
}
},
timeout: 100,
};
const params = { ...paramDefaults, ...paramOverrides };
return serializeFetchParams(
params.requestFetchParams,
Promise,
params.sessionId,
params.config,
params.timeout,
);
}
describe('when indexList is not empty', () => {
test('includes the index', () => {
const requestFetchParams = [
{
index: ['logstash-123'],
type: 'blah',
search_type: 'blah2',
body: { foo: 'bar', $foo: 'bar' }
}
];
return serializeFetchParamsWithDefaults({ requestFetchParams }).then(value => {
expect(_.includes(value, '"index":["logstash-123"]')).toBe(true);
});
});
});
describe('headers', () => {
const requestFetchParams = [
{
index: ['logstash-123'],
type: 'blah',
search_type: 'blah2',
body: { foo: 'bar' }
}
];
const getHeader = async (paramOverrides) => {
const request = await serializeFetchParamsWithDefaults(paramOverrides);
const requestParts = request.split('\n');
if (requestParts.length < 2) {
throw new Error('fetch Body does not contain expected format header newline body.');
}
return JSON.parse(requestParts[0]);
};
describe('search request preference', () => {
test('should be set to sessionId when courier:setRequestPreference is "sessionId"', async () => {
const config = {
get: () => {
return 'sessionId';
}
};
const header = await getHeader({ requestFetchParams, config });
expect(header.preference).toBe(DEFAULT_SESSION_ID);
});
test('should be set to custom string when courier:setRequestPreference is "custom"', async () => {
const CUSTOM_PREFERENCE = '_local';
const config = {
get: (key) => {
if (key === 'courier:setRequestPreference') {
return 'custom';
} else if (key === 'courier:customRequestPreference') {
return CUSTOM_PREFERENCE;
}
}
};
const header = await getHeader({ requestFetchParams, config });
expect(header.preference).toBe(CUSTOM_PREFERENCE);
});
test('should not be set when courier:setRequestPreference is "none"', async () => {
const config = {
get: () => {
return 'none';
}
};
const header = await getHeader({ requestFetchParams, config });
expect(header.preference).toBe(undefined);
});
});
});
describe('body', () => {
const requestFetchParams = [
{
index: ['logstash-123'],
type: 'blah',
search_type: 'blah2',
body: { foo: 'bar' }
}
];
const getBody = async (paramOverrides) => {
const request = await serializeFetchParamsWithDefaults(paramOverrides);
const requestParts = request.split('\n');
if (requestParts.length < 2) {
throw new Error('fetch Body does not contain expected format: header newline body.');
}
return JSON.parse(requestParts[1]);
};
describe('timeout', () => {
test('should set a timeout as specified', async () => {
const request = await getBody({ requestFetchParams, timeout: 200 });
expect(request).toHaveProperty('timeout', '200ms');
});
test('should not set a timeout when timeout is 0', async () => {
const request = await getBody({ requestFetchParams, timeout: 0 });
expect(request.timeout).toBe(undefined);
});
});
});

View file

@ -1,31 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { serializeFetchParams } from './serialize_fetch_params';
export function SerializeFetchParamsProvider(Promise, sessionId, config, esShardTimeout) {
return (fetchParams) => (
serializeFetchParams(
fetchParams,
Promise,
sessionId,
config,
esShardTimeout)
);
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
export { SearchSourceProvider } from './search_source';
export { SearchSource } from './search_source';
export {
addSearchStrategy,

View file

@ -19,98 +19,50 @@
import _ from 'lodash';
import { fatalError } from '../../notify';
import '../../promises';
import { searchRequestQueue } from '../search_request_queue';
import { FetchSoonProvider } from '../fetch';
import { timefilter } from 'ui/timefilter';
export function SearchPollProvider(Private, Promise) {
const fetchSoon = Private(FetchSoonProvider);
class SearchPoll {
constructor() {
this._isPolling = false;
this._intervalInMs = undefined;
this._timerId = null;
this._searchPromise = null;
this._isIntervalFasterThanSearch = false;
}
setIntervalInMs = intervalInMs => {
this._intervalInMs = _.parseInt(intervalInMs);
};
resume = () => {
this._isPolling = true;
this.resetTimer();
};
pause = () => {
this._isPolling = false;
this.clearTimer();
};
resetTimer = () => {
// Cancel the pending search and schedule a new one.
this.clearTimer();
if (this._isPolling) {
this._timerId = setTimeout(this._search, this._intervalInMs);
}
};
clearTimer = () => {
// Cancel the pending search, if there is one.
if (this._timerId) {
clearTimeout(this._timerId);
this._timerId = null;
}
};
_search = () => {
// If our interval is faster than the rate at which searches return results, then trigger
// a new search as soon as the results come back.
if (this._searchPromise) {
this._isIntervalFasterThanSearch = true;
return;
}
// Schedule another search.
this.resetTimer();
// We use resolve() here instead of try() because the latter won't trigger a $digest
// when the promise resolves.
this._searchPromise = Promise.resolve().then(() => {
timefilter.notifyShouldFetch();
const requests = searchRequestQueue.getInactive();
// The promise returned from fetchSearchRequests() only resolves when the requests complete.
// We want to continue even if the requests abort so we return a different promise.
fetchSoon.fetchSearchRequests(requests);
return Promise.all(
requests.map(request => request.getCompleteOrAbortedPromise())
);
})
.then(() => {
this._searchPromise = null;
// If the search response comes back before the interval fires, then we'll wait
// for the interval and let it kick off the next search. But if the interval fires before
// the search returns results, then we'll need to wait for the search to return results
// and then kick off another search again. A new search will also reset the interval.
if (this._isIntervalFasterThanSearch) {
this._isIntervalFasterThanSearch = false;
this._search();
}
})
.catch(err => {
// If there was a problem, then kill Kibana.
fatalError(err);
});
};
export class SearchPoll {
constructor() {
this._isPolling = false;
this._intervalInMs = undefined;
this._timerId = null;
}
return new SearchPoll();
setIntervalInMs = intervalInMs => {
this._intervalInMs = _.parseInt(intervalInMs);
};
resume = () => {
this._isPolling = true;
this.resetTimer();
};
pause = () => {
this._isPolling = false;
this.clearTimer();
};
resetTimer = () => {
// Cancel the pending search and schedule a new one.
this.clearTimer();
if (this._isPolling) {
this._timerId = setTimeout(this._search, this._intervalInMs);
}
};
clearTimer = () => {
// Cancel the pending search, if there is one.
if (this._timerId) {
clearTimeout(this._timerId);
this._timerId = null;
}
};
_search = () => {
// Schedule another search.
this.resetTimer();
timefilter.notifyShouldFetch();
};
}

View file

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import ngMock from 'ng_mock';
import expect from '@kbn/expect';
import sinon from 'sinon';
import { searchRequestQueue } from '../search_request_queue';
describe('Courier Request Queue', function () {
beforeEach(ngMock.module('kibana'));
beforeEach(() => searchRequestQueue.removeAll());
after(() => searchRequestQueue.removeAll());
class MockReq {
constructor(startable = true) {
this.source = {};
this.canStart = sinon.stub().returns(startable);
}
}
describe('#getStartable()', function () {
it('returns only startable requests', function () {
searchRequestQueue.add(new MockReq(false));
searchRequestQueue.add(new MockReq(true));
expect(searchRequestQueue.getStartable()).to.have.length(1);
});
});
// Note: I'm not convinced this discrepancy between how we calculate startable vs inactive requests makes any sense.
// I'm only testing here that the current, (very old) code continues to behave how it always did, but it may turn out
// that we can clean this up, or remove this.
describe('#getInactive()', function () {
it('returns only requests with started = false', function () {
searchRequestQueue.add({ started: true });
searchRequestQueue.add({ started: false });
searchRequestQueue.add({ started: true });
expect(searchRequestQueue.getInactive()).to.have.length(1);
});
});
});

View file

@ -1,20 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { searchRequestQueue } from './search_request_queue';

View file

@ -1,70 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
class SearchRequestQueue {
constructor() {
// Queue of pending requests, requests are removed as they are processed by fetch.[sourceType]().
this._searchRequests = [];
}
getCount() {
return this._searchRequests.length;
}
add(searchRequest) {
this._searchRequests.push(searchRequest);
}
remove(searchRequest) {
// Remove all matching search requests.
this._searchRequests = this._searchRequests.filter(
existingSearchRequest => existingSearchRequest !== searchRequest
);
}
removeAll() {
this._searchRequests.length = 0;
}
abortAll() {
this._searchRequests.forEach(searchRequest => searchRequest.abort());
}
getAll() {
return this._searchRequests;
}
getSearchRequestAt(index) {
return this._searchRequests[index];
}
getInactive() {
return this._searchRequests.filter(searchRequest => !searchRequest.started);
}
getStartable() {
return this._searchRequests.filter(searchRequest => searchRequest.canStart());
}
getPending() {
return this._searchRequests.filter(searchRequest => searchRequest.isFetchRequestedAndPending());
}
}
export const searchRequestQueue = new SearchRequestQueue();

View file

@ -20,18 +20,17 @@
import '../../../private';
import ngMock from 'ng_mock';
import expect from '@kbn/expect';
import { NormalizeSortRequestProvider } from '../_normalize_sort_request';
import { normalizeSortRequest } from '../_normalize_sort_request';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
import _ from 'lodash';
describe('SearchSource#normalizeSortRequest', function () {
let normalizeSortRequest;
let indexPattern;
let normalizedSort;
const defaultSortOptions = { unmapped_type: 'boolean' };
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private) {
normalizeSortRequest = Private(NormalizeSortRequestProvider);
indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
normalizedSort = [{
@ -44,7 +43,7 @@ describe('SearchSource#normalizeSortRequest', function () {
it('should return an array', function () {
const sortable = { someField: 'desc' };
const result = normalizeSortRequest(sortable, indexPattern);
const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions);
expect(result).to.be.an(Array);
expect(result).to.eql(normalizedSort);
// ensure object passed in is not mutated
@ -53,7 +52,7 @@ describe('SearchSource#normalizeSortRequest', function () {
});
it('should make plain string sort into the more verbose format', function () {
const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern);
const result = normalizeSortRequest([{ someField: 'desc' }], indexPattern, defaultSortOptions);
expect(result).to.eql(normalizedSort);
});
@ -64,7 +63,7 @@ describe('SearchSource#normalizeSortRequest', function () {
unmapped_type: 'boolean'
}
}];
const result = normalizeSortRequest(sortState, indexPattern);
const result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions);
expect(result).to.eql(normalizedSort);
});
@ -86,11 +85,11 @@ describe('SearchSource#normalizeSortRequest', function () {
}
};
let result = normalizeSortRequest(sortState, indexPattern);
let result = normalizeSortRequest(sortState, indexPattern, defaultSortOptions);
expect(result).to.eql([normalizedSort]);
sortState[fieldName] = { order: direction };
result = normalizeSortRequest([sortState], indexPattern);
result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions);
expect(result).to.eql([normalizedSort]);
});
@ -105,7 +104,7 @@ describe('SearchSource#normalizeSortRequest', function () {
order: direction,
unmapped_type: 'boolean'
};
const result = normalizeSortRequest([sortState], indexPattern);
const result = normalizeSortRequest([sortState], indexPattern, defaultSortOptions);
expect(result).to.eql([normalizedSort]);
});
@ -118,7 +117,7 @@ describe('SearchSource#normalizeSortRequest', function () {
}
}];
const result = normalizeSortRequest(sortable, indexPattern);
const result = normalizeSortRequest(sortable, indexPattern, defaultSortOptions);
expect(_.isEqual(result, expected)).to.be.ok();
});

View file

@ -1,351 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import ngMock from 'ng_mock';
import expect from '@kbn/expect';
import sinon from 'sinon';
import { searchRequestQueue } from '../../search_request_queue';
import { SearchSourceProvider } from '../search_source';
import StubIndexPattern from 'test_utils/stub_index_pattern';
function timeout() {
return new Promise(resolve => {
setTimeout(resolve);
});
}
describe('SearchSource', function () {
require('test_utils/no_digest_promises').activateForSuite();
let config;
let SearchSource;
let indexPattern;
let indexPattern2;
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (Private, _config_) {
config = _config_;
SearchSource = Private(SearchSourceProvider);
indexPattern = new StubIndexPattern('test-*', cfg => cfg, null, []);
indexPattern2 = new StubIndexPattern('test2-*', cfg => cfg, null, []);
expect(indexPattern).to.not.be(indexPattern2);
}));
beforeEach(() => searchRequestQueue.removeAll());
after(() => searchRequestQueue.removeAll());
describe('#onResults()', function () {
it('adds a request to the searchRequestQueue', function () {
const searchSource = new SearchSource();
expect(searchRequestQueue.getCount()).to.be(0);
searchSource.onResults();
expect(searchRequestQueue.getCount()).to.be(1);
});
it('returns a promise that is resolved with the results', function () {
const searchSource = new SearchSource();
const fakeResults = {};
const promise = searchSource.onResults().then((results) => {
expect(results).to.be(fakeResults);
});
const searchRequest = searchRequestQueue.getSearchRequestAt(0);
searchRequest.defer.resolve(fakeResults);
return promise;
});
});
describe('#destroy()', function () {
it('aborts all startable requests', function () {
const searchSource = new SearchSource();
searchSource.onResults();
const searchRequest = searchRequestQueue.getSearchRequestAt(0);
sinon.stub(searchRequest, 'canStart').returns(true);
searchSource.destroy();
expect(searchRequestQueue.getCount()).to.be(0);
});
it('aborts all non-startable requests', function () {
const searchSource = new SearchSource();
searchSource.onResults();
const searchRequest = searchRequestQueue.getSearchRequestAt(0);
sinon.stub(searchRequest, 'canStart').returns(false);
searchSource.destroy();
expect(searchRequestQueue.getCount()).to.be(0);
});
});
describe('#setField()', function () {
it('sets the value for the property', function () {
const searchSource = new SearchSource();
searchSource.setField('aggs', 5);
expect(searchSource.getField('aggs')).to.be(5);
});
it('throws an error if the property is not accepted', function () {
const searchSource = new SearchSource();
expect(() => searchSource.setField('index', 5)).to.throwError();
});
});
describe('#getField()', function () {
it('gets the value for the property', function () {
const searchSource = new SearchSource();
searchSource.setField('aggs', 5);
expect(searchSource.getField('aggs')).to.be(5);
});
it('throws an error if the property is not accepted', function () {
const searchSource = new SearchSource();
expect(() => searchSource.getField('unacceptablePropName')).to.throwError();
});
});
describe(`#setField('index')`, function () {
describe('auto-sourceFiltering', function () {
describe('new index pattern assigned', function () {
it('generates a searchSource filter', function () {
const searchSource = new SearchSource();
expect(searchSource.getField('index')).to.be(undefined);
expect(searchSource.getField('source')).to.be(undefined);
searchSource.setField('index', indexPattern);
expect(searchSource.getField('index')).to.be(indexPattern);
expect(searchSource.getField('source')).to.be.a('function');
});
it('removes created searchSource filter on removal', function () {
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
searchSource.setField('index', null);
expect(searchSource.getField('index')).to.be(undefined);
expect(searchSource.getField('source')).to.be(undefined);
});
});
describe('new index pattern assigned over another', function () {
it('replaces searchSource filter with new', function () {
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
const searchSourceFilter1 = searchSource.getField('source');
searchSource.setField('index', indexPattern2);
expect(searchSource.getField('index')).to.be(indexPattern2);
expect(searchSource.getField('source')).to.be.a('function');
expect(searchSource.getField('source')).to.not.be(searchSourceFilter1);
});
it('removes created searchSource filter on removal', function () {
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
searchSource.setField('index', indexPattern2);
searchSource.setField('index', null);
expect(searchSource.getField('index')).to.be(undefined);
expect(searchSource.getField('source')).to.be(undefined);
});
});
describe('ip assigned before custom searchSource filter', function () {
it('custom searchSource filter becomes new searchSource', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('index', indexPattern);
expect(searchSource.getField('source')).to.be.a('function');
searchSource.setField('source', football);
expect(searchSource.getField('index')).to.be(indexPattern);
expect(searchSource.getField('source')).to.be(football);
});
it('custom searchSource stays after removal', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('index', indexPattern);
searchSource.setField('source', football);
searchSource.setField('index', null);
expect(searchSource.getField('index')).to.be(undefined);
expect(searchSource.getField('source')).to.be(football);
});
});
describe('ip assigned after custom searchSource filter', function () {
it('leaves the custom filter in place', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('source', football);
searchSource.setField('index', indexPattern);
expect(searchSource.getField('index')).to.be(indexPattern);
expect(searchSource.getField('source')).to.be(football);
});
it('custom searchSource stays after removal', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('source', football);
searchSource.setField('index', indexPattern);
searchSource.setField('index', null);
expect(searchSource.getField('index')).to.be(undefined);
expect(searchSource.getField('source')).to.be(football);
});
});
});
});
describe('#onRequestStart()', () => {
it('should be called when starting a request', async () => {
const searchSource = new SearchSource();
const fn = sinon.spy();
searchSource.onRequestStart(fn);
const request = {};
searchSource.requestIsStarting(request);
await timeout();
expect(fn.calledWith(searchSource, request)).to.be(true);
});
it('should not be called on parent searchSource', async () => {
const parent = new SearchSource();
const searchSource = new SearchSource().setParent(parent);
const fn = sinon.spy();
searchSource.onRequestStart(fn);
const parentFn = sinon.spy();
parent.onRequestStart(parentFn);
const request = {};
searchSource.requestIsStarting(request);
await timeout();
expect(fn.calledWith(searchSource, request)).to.be(true);
expect(parentFn.notCalled).to.be(true);
});
it('should be called on parent searchSource if callParentStartHandlers is true', async () => {
const parent = new SearchSource();
const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true });
const fn = sinon.spy();
searchSource.onRequestStart(fn);
const parentFn = sinon.spy();
parent.onRequestStart(parentFn);
const request = {};
searchSource.requestIsStarting(request);
await timeout();
expect(fn.calledWith(searchSource, request)).to.be(true);
expect(parentFn.calledWith(searchSource, request)).to.be(true);
});
});
describe('#_mergeProp', function () {
describe('filter', function () {
let searchSource;
let state;
beforeEach(function () {
searchSource = new SearchSource();
state = {};
});
[null, undefined].forEach(falsyValue => {
it(`ignores ${falsyValue} filter`, function () {
searchSource._mergeProp(state, falsyValue, 'filter');
expect(state.filters).to.be(undefined);
});
});
[false, 0, '', NaN].forEach(falsyValue => {
it(`doesn't add ${falsyValue} filter`, function () {
searchSource._mergeProp(state, falsyValue, 'filter');
expect(state.filters).to.be.empty();
});
});
it('adds "meta.disabled: undefined" filter', function () {
const filter = {
meta: {}
};
searchSource._mergeProp(state, filter, 'filter');
expect(state.filters).to.eql([filter]);
});
it('adds "meta.disabled: false" filter', function () {
const filter = {
meta: {
disabled: false
}
};
searchSource._mergeProp(state, filter, 'filter');
expect(state.filters).to.eql([filter]);
});
it(`doesn't add "meta.disabled: true" filter`, function () {
const filter = {
meta: {
disabled: true
}
};
searchSource._mergeProp(state, filter, 'filter');
expect(state.filters).to.be.empty();
});
describe('when courier:ignoreFilterIfFieldNotInIndex is false', function () {
it('adds filter for non-existent field', function () {
config.set('courier:ignoreFilterIfFieldNotInIndex', false);
const filter = {
meta: {
key: 'bar'
}
};
state.index = {
fields: []
};
searchSource._mergeProp(state, filter, 'filter');
expect(state.filters).to.eql([ filter ]);
});
});
describe('when courier:ignoreFilterIfFieldNotInIndex is true', function () {
it(`doesn't add filter for non-existent field`, function () {
config.set('courier:ignoreFilterIfFieldNotInIndex', true);
const filter = {
meta: {
key: 'bar'
}
};
state.index = {
fields: []
};
searchSource._mergeProp(state, filter, 'filter');
expect(state.filters).to.be.empty();
});
it(`adds filter for existent field`, function () {
config.set('courier:ignoreFilterIfFieldNotInIndex', true);
const filter = {
meta: {
key: 'bar'
}
};
state.index = {
fields: [{ name: 'bar' }]
};
searchSource._mergeProp(state, filter, 'filter');
expect(state.filters).to.eql([ filter ]);
});
});
});
});
});

View file

@ -19,59 +19,55 @@
import _ from 'lodash';
export function NormalizeSortRequestProvider(config) {
const defaultSortOptions = config.get('sort:options');
/**
/**
* Decorate queries with default parameters
* @param {query} query object
* @returns {object}
*/
return function (sortObject, indexPattern) {
// [].concat({}) -> [{}], [].concat([{}]) -> [{}]
return [].concat(sortObject).map(function (sortable) {
return normalize(sortable, indexPattern);
});
};
export function normalizeSortRequest(sortObject, indexPattern, defaultSortOptions) {
// [].concat({}) -> [{}], [].concat([{}]) -> [{}]
return [].concat(sortObject).map(function (sortable) {
return normalize(sortable, indexPattern, defaultSortOptions);
});
}
/*
/*
Normalize the sort description to the more verbose format:
{ someField: "desc" } into { someField: { "order": "desc"}}
*/
function normalize(sortable, indexPattern) {
const normalized = {};
let sortField = _.keys(sortable)[0];
let sortValue = sortable[sortField];
const indexField = indexPattern.fields.getByName(sortField);
function normalize(sortable, indexPattern, defaultSortOptions) {
const normalized = {};
let sortField = _.keys(sortable)[0];
let sortValue = sortable[sortField];
const indexField = indexPattern.fields.getByName(sortField);
if (indexField && indexField.scripted && indexField.sortable) {
let direction;
if (_.isString(sortValue)) direction = sortValue;
if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order;
if (indexField && indexField.scripted && indexField.sortable) {
let direction;
if (_.isString(sortValue)) direction = sortValue;
if (_.isObject(sortValue) && sortValue.order) direction = sortValue.order;
sortField = '_script';
sortValue = {
script: {
source: indexField.script,
lang: indexField.lang
},
type: castSortType(indexField.type),
order: direction
};
} else {
if (_.isString(sortValue)) {
sortValue = { order: sortValue };
}
sortValue = _.defaults({}, sortValue, defaultSortOptions);
if (sortField === '_score') {
delete sortValue.unmapped_type;
}
sortField = '_script';
sortValue = {
script: {
source: indexField.script,
lang: indexField.lang
},
type: castSortType(indexField.type),
order: direction
};
} else {
if (_.isString(sortValue)) {
sortValue = { order: sortValue };
}
sortValue = _.defaults({}, sortValue, defaultSortOptions);
normalized[sortField] = sortValue;
return normalized;
if (sortField === '_score') {
delete sortValue.unmapped_type;
}
}
normalized[sortField] = sortValue;
return normalized;
}
// The ES API only supports sort scripts of type 'number' and 'string'

View file

@ -17,4 +17,4 @@
* under the License.
*/
export { SearchSourceProvider } from './search_source';
export { SearchSource } from './search_source';

View file

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"), you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const searchSourceMock = {
setPreferredSearchStrategyId: jest.fn(),
getPreferredSearchStrategyId: jest.fn(),
setFields: jest.fn(),
setField: jest.fn(),
getId: jest.fn(),
getFields: jest.fn(),
getField: jest.fn(),
getOwnField: jest.fn(),
create: jest.fn(),
createCopy: jest.fn(),
createChild: jest.fn(),
setParent: jest.fn(),
getParent: jest.fn(),
fetch: jest.fn(),
onRequestStart: jest.fn(),
getSearchRequestBody: jest.fn(),
destroy: jest.fn(),
history: [],
};

View file

@ -17,4 +17,23 @@
* under the License.
*/
export type SearchSource = any;
export declare class SearchSource {
setPreferredSearchStrategyId: (searchStrategyId: string) => void;
getPreferredSearchStrategyId: () => string;
setFields: (newFields: any) => SearchSource;
setField: (field: string, value: any) => SearchSource;
getId: () => string;
getFields: () => any;
getField: (field: string) => any;
getOwnField: () => any;
create: () => SearchSource;
createCopy: () => SearchSource;
createChild: (options?: any) => SearchSource;
setParent: (parent: SearchSource | boolean) => SearchSource;
getParent: () => SearchSource | undefined;
fetch: (options?: any) => Promise<any>;
onRequestStart: (handler: (searchSource: SearchSource, options: any) => void) => void;
getSearchRequestBody: () => any;
destroy: () => void;
history: any[];
}

View file

@ -71,16 +71,16 @@
import _ from 'lodash';
import angular from 'angular';
import { buildEsQuery, getEsQueryConfig, filterMatchesIndex } from '@kbn/es-query';
import { buildEsQuery, getEsQueryConfig } from '@kbn/es-query';
import { createDefer } from 'ui/promises';
import { NormalizeSortRequestProvider } from './_normalize_sort_request';
import { SearchRequestProvider } from '../fetch/request';
import { normalizeSortRequest } from './_normalize_sort_request';
import { searchRequestQueue } from '../search_request_queue';
import { FetchSoonProvider } from '../fetch';
import { FieldWildcardProvider } from '../../field_wildcard';
import { fetchSoon } from '../fetch';
import { fieldWildcardFilter } from '../../field_wildcard';
import { getHighlightRequest } from '../../../../../plugins/data/common/field_formats';
import { npSetup } from 'ui/new_platform';
import chrome from '../../chrome';
import { RequestFailure } from '../fetch/errors';
import { filterDocvalueFields } from './filter_docvalue_fields';
const FIELDS = [
@ -114,327 +114,242 @@ function isIndexPattern(val) {
return Boolean(val && typeof val.title === 'string');
}
export function SearchSourceProvider(Promise, Private, config) {
const SearchRequest = Private(SearchRequestProvider);
const normalizeSortRequest = Private(NormalizeSortRequestProvider);
const fetchSoon = Private(FetchSoonProvider);
const { fieldWildcardFilter } = Private(FieldWildcardProvider);
const getConfig = (...args) => config.get(...args);
const esShardTimeout = npSetup.core.injectedMetadata.getInjectedVar('esShardTimeout');
const config = npSetup.core.uiSettings;
const getConfig = (...args) => config.get(...args);
const forIp = Symbol('for which index pattern?');
const forIp = Symbol('for which index pattern?');
export class SearchSource {
constructor(initialFields) {
this._id = _.uniqueId('data_source');
class SearchSource {
constructor(initialFields) {
this._id = _.uniqueId('data_source');
this._searchStrategyId = undefined;
this._fields = parseInitialFields(initialFields);
this._parent = undefined;
this._searchStrategyId = undefined;
this._fields = parseInitialFields(initialFields);
this._parent = undefined;
this.history = [];
this._requestStartHandlers = [];
this._inheritOptions = {};
}
this.history = [];
this._requestStartHandlers = [];
this._inheritOptions = {};
this._filterPredicates = [
(filter) => {
// remove null/undefined filters
return filter;
},
(filter) => {
const disabled = _.get(filter, 'meta.disabled');
return disabled === undefined || disabled === false;
},
(filter, data) => {
const index = data.index || this.getField('index');
return !config.get('courier:ignoreFilterIfFieldNotInIndex') || filterMatchesIndex(filter, index);
}
];
}
/*****
/*****
* PUBLIC API
*****/
setPreferredSearchStrategyId(searchStrategyId) {
this._searchStrategyId = searchStrategyId;
setPreferredSearchStrategyId(searchStrategyId) {
this._searchStrategyId = searchStrategyId;
}
getPreferredSearchStrategyId() {
return this._searchStrategyId;
}
setFields(newFields) {
this._fields = newFields;
return this;
}
setField(field, value) {
if (!FIELDS.includes(field)) {
throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
}
getPreferredSearchStrategyId() {
return this._searchStrategyId;
}
if (field === 'index') {
const fields = this._fields;
setFields(newFields) {
this._fields = newFields;
return this;
}
setField = (field, value) => {
if (!FIELDS.includes(field)) {
throw new Error(`Can't set field '${field}' on SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
const hasSource = fields.source;
const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp);
const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index;
if (sourceIsForOurIp) {
delete fields.source;
}
if (field === 'index') {
const fields = this._fields;
const hasSource = fields.source;
const sourceCameFromIp = hasSource && fields.source.hasOwnProperty(forIp);
const sourceIsForOurIp = sourceCameFromIp && fields.source[forIp] === fields.index;
if (sourceIsForOurIp) {
delete fields.source;
}
if (value === null || value === undefined) {
delete fields.index;
return this;
}
if (!isIndexPattern(value)) {
throw new TypeError('expected indexPattern to be an IndexPattern duck.');
}
fields[field] = value;
if (!fields.source) {
// imply source filtering based on the index pattern, but allow overriding
// it by simply setting another field for "source". When index is changed
fields.source = function () {
return value.getSourceFiltering();
};
fields.source[forIp] = value;
}
if (value === null || value === undefined) {
delete fields.index;
return this;
}
if (value == null) {
delete this._fields[field];
return this;
if (!isIndexPattern(value)) {
throw new TypeError('expected indexPattern to be an IndexPattern duck.');
}
fields[field] = value;
if (!fields.source) {
// imply source filtering based on the index pattern, but allow overriding
// it by simply setting another field for "source". When index is changed
fields.source = function () {
return value.getSourceFiltering();
};
fields.source[forIp] = value;
}
this._fields[field] = value;
return this;
};
getId() {
return this._id;
}
getFields() {
return _.clone(this._fields);
if (value == null) {
delete this._fields[field];
return this;
}
/**
* Get fields from the fields
*/
getField = field => {
if (!FIELDS.includes(field)) {
throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
}
this._fields[field] = value;
return this;
}
let searchSource = this;
getId() {
return this._id;
}
while (searchSource) {
const value = searchSource._fields[field];
if (value !== void 0) {
return value;
}
getFields() {
return _.clone(this._fields);
}
searchSource = searchSource.getParent();
}
};
/**
* Get fields from the fields
*/
getField(field) {
if (!FIELDS.includes(field)) {
throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
}
/**
* Get the field from our own fields, don't traverse up the chain
*/
getOwnField(field) {
if (!FIELDS.includes(field)) {
throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
}
let searchSource = this;
const value = this._fields[field];
while (searchSource) {
const value = searchSource._fields[field];
if (value !== void 0) {
return value;
}
searchSource = searchSource.getParent();
}
}
/**
* Get the field from our own fields, don't traverse up the chain
*/
getOwnField(field) {
if (!FIELDS.includes(field)) {
throw new Error(`Can't get field '${field}' from SearchSource. Acceptable fields are: ${FIELDS.join(', ')}.`);
}
create() {
return new SearchSource();
const value = this._fields[field];
if (value !== void 0) {
return value;
}
}
createCopy() {
const json = angular.toJson(this._fields);
const newSearchSource = new SearchSource(json);
// when serializing the internal fields we lose the internal classes used in the index
// pattern, so we have to set it again to workaround this behavior
newSearchSource.setField('index', this.getField('index'));
newSearchSource.setParent(this.getParent());
return newSearchSource;
}
create() {
return new SearchSource();
}
createChild(options = {}) {
const childSearchSource = new SearchSource();
childSearchSource.setParent(this, options);
return childSearchSource;
}
createCopy() {
const json = angular.toJson(this._fields);
const newSearchSource = new SearchSource(json);
// when serializing the internal fields we lose the internal classes used in the index
// pattern, so we have to set it again to workaround this behavior
newSearchSource.setField('index', this.getField('index'));
newSearchSource.setParent(this.getParent());
return newSearchSource;
}
/**
createChild(options = {}) {
const childSearchSource = new SearchSource();
childSearchSource.setParent(this, options);
return childSearchSource;
}
/**
* Set a searchSource that this source should inherit from
* @param {SearchSource} searchSource - the parent searchSource
* @return {this} - chainable
*/
setParent(parent, options = {}) {
this._parent = parent;
this._inheritOptions = options;
return this;
}
setParent(parent, options = {}) {
this._parent = parent;
this._inheritOptions = options;
return this;
}
/**
/**
* Get the parent of this SearchSource
* @return {undefined|searchSource}
*/
getParent() {
return this._parent || undefined;
}
getParent() {
return this._parent || undefined;
}
/**
/**
* Fetch this source and reject the returned Promise on error
*
* @async
*/
fetch() {
const self = this;
let req = _.first(self._myStartableQueued());
async fetch(options) {
const $injector = await chrome.dangerouslyGetActiveInjector();
const es = $injector.get('es');
if (!req) {
const errorHandler = (request, error) => {
request.defer.reject(error);
request.abort();
};
req = self._createRequest({ errorHandler });
}
await this.requestIsStarting(options);
fetchSoon.fetchSearchRequests([req]);
return req.getCompletePromise();
const searchRequest = await this._flatten();
this.history = [searchRequest];
const response = await fetchSoon(searchRequest, {
...(this._searchStrategyId && { searchStrategyId: this._searchStrategyId }),
...options,
}, { es, config, esShardTimeout });
if (response.error) {
throw new RequestFailure(null, response);
}
/**
* Fetch all pending requests for this source ASAP
* @async
*/
fetchQueued() {
return fetchSoon.fetchSearchRequests(this._myStartableQueued());
}
return response;
}
/**
* Cancel all pending requests for this searchSource
* @return {undefined}
*/
cancelQueued() {
searchRequestQueue.getAll()
.filter(req => req.source === this)
.forEach(req => req.abort());
}
/**
/**
* Add a handler that will be notified whenever requests start
* @param {Function} handler
* @return {undefined}
*/
onRequestStart(handler) {
this._requestStartHandlers.push(handler);
}
onRequestStart(handler) {
this._requestStartHandlers.push(handler);
}
/**
/**
* Called by requests of this search source when they are started
* @param {Courier.Request} request
* @param options
* @return {Promise<undefined>}
*/
requestIsStarting(request) {
this.activeFetchCount = (this.activeFetchCount || 0) + 1;
this.history = [request];
const handlers = [...this._requestStartHandlers];
// If callparentStartHandlers has been set to true, we also call all
// handlers of parent search sources.
if (this._inheritOptions.callParentStartHandlers) {
let searchSource = this.getParent();
while (searchSource) {
handlers.push(...searchSource._requestStartHandlers);
searchSource = searchSource.getParent();
}
requestIsStarting(options) {
const handlers = [...this._requestStartHandlers];
// If callparentStartHandlers has been set to true, we also call all
// handlers of parent search sources.
if (this._inheritOptions.callParentStartHandlers) {
let searchSource = this.getParent();
while (searchSource) {
handlers.push(...searchSource._requestStartHandlers);
searchSource = searchSource.getParent();
}
return Promise
.map(handlers, fn => fn(this, request))
.then(_.noop);
}
/**
* Put a request in to the courier that this Source should
* be fetched on the next run of the courier
* @return {Promise}
*/
onResults() {
const self = this;
return Promise.all(handlers.map(fn => fn(this, options)));
}
return new Promise(function (resolve, reject) {
const defer = createDefer(Promise);
defer.promise.then(resolve, reject);
async getSearchRequestBody() {
const searchRequest = await this._flatten();
return searchRequest.body;
}
const errorHandler = (request, error) => {
reject(error);
request.abort();
};
self._createRequest({ defer, errorHandler });
});
}
async getSearchRequestBody() {
const searchRequest = await this._flatten();
return searchRequest.body;
}
/**
* Called by requests of this search source when they are done
* @param {Courier.Request} request
* @return {undefined}
*/
requestIsStopped() {
this.activeFetchCount -= 1;
}
/**
/**
* Completely destroy the SearchSource.
* @return {undefined}
*/
destroy() {
this.cancelQueued();
this._requestStartHandlers.length = 0;
}
destroy() {
this._requestStartHandlers.length = 0;
}
/******
/******
* PRIVATE APIS
******/
_myStartableQueued() {
return searchRequestQueue
.getStartable()
.filter(req => req.source === this);
}
/**
* Create a common search request object, which should
* be put into the pending request queue, for this search
* source
*
* @param {Deferred} defer - the deferred object that should be resolved
* when the request is complete
* @return {SearchRequest}
*/
_createRequest({ defer, errorHandler }) {
return new SearchRequest({ source: this, defer, errorHandler });
}
/**
/**
* Used to merge properties into the data within ._flatten().
* The data is passed in and modified by the function
*
@ -443,192 +358,184 @@ export function SearchSourceProvider(Promise, Private, config) {
* @param {*} key - The key of `val`
* @return {undefined}
*/
_mergeProp(data, val, key) {
if (typeof val === 'function') {
const source = this;
return Promise.cast(val(this))
.then(function (newVal) {
return source._mergeProp(data, newVal, key);
});
}
_mergeProp(data, val, key) {
if (typeof val === 'function') {
const source = this;
return Promise.resolve(val(this))
.then(function (newVal) {
return source._mergeProp(data, newVal, key);
});
}
if (val == null || !key || !_.isString(key)) return;
if (val == null || !key || !_.isString(key)) return;
switch (key) {
case 'filter':
let filters = Array.isArray(val) ? val : [val];
filters = filters.filter(filter => {
return this._filterPredicates.every(predicate => predicate(filter, data));
});
data.filters = [...(data.filters || []), ...filters];
return;
case 'index':
case 'type':
case 'id':
case 'highlightAll':
if (key && data[key] == null) {
data[key] = val;
}
return;
case 'searchAfter':
key = 'search_after';
addToBody();
break;
case 'source':
key = '_source';
addToBody();
break;
case 'sort':
val = normalizeSortRequest(val, this.getField('index'));
addToBody();
break;
case 'query':
data.query = (data.query || []).concat(val);
break;
case 'fields':
data[key] = _.uniq([...(data[key] || []), ...val]);
break;
default:
addToBody();
}
/**
* Add the key and val to the body of the request
*/
function addToBody() {
data.body = data.body || {};
// ignore if we already have a value
if (data.body[key] == null) {
data.body[key] = val;
switch (key) {
case 'filter':
const filters = Array.isArray(val) ? val : [val];
data.filters = [...(data.filters || []), ...filters];
return;
case 'index':
case 'type':
case 'id':
case 'highlightAll':
if (key && data[key] == null) {
data[key] = val;
}
}
return;
case 'searchAfter':
key = 'search_after';
addToBody();
break;
case 'source':
key = '_source';
addToBody();
break;
case 'sort':
val = normalizeSortRequest(val, this.getField('index'), config.get('sort:options'));
addToBody();
break;
case 'query':
data.query = (data.query || []).concat(val);
break;
case 'fields':
data[key] = _.uniq([...(data[key] || []), ...val]);
break;
default:
addToBody();
}
/**
* Add the key and val to the body of the request
*/
function addToBody() {
data.body = data.body || {};
// ignore if we already have a value
if (data.body[key] == null) {
data.body[key] = val;
}
}
}
/**
* Walk the inheritance chain of a source and return it's
* flat representation (taking into account merging rules)
* @returns {Promise}
* @resolved {Object|null} - the flat data of the SearchSource
*/
_flatten() {
// the merged data of this dataSource and it's ancestors
const flatData = {};
_flatten() {
// the merged data of this dataSource and it's ancestors
const flatData = {};
// function used to write each property from each data object in the chain to flat data
const root = this;
// function used to write each property from each data object in the chain to flat data
const root = this;
// start the chain at this source
let current = this;
// start the chain at this source
let current = this;
// call the ittr and return it's promise
return (function ittr() {
// iterate the _fields object (not array) and
// pass each key:value pair to source._mergeProp. if _mergeProp
// returns a promise, then wait for it to complete and call _mergeProp again
return Promise.all(_.map(current._fields, function ittr(value, key) {
if (Promise.is(value)) {
return value.then(function (value) {
return ittr(value, key);
});
}
const prom = root._mergeProp(flatData, value, key);
return Promise.is(prom) ? prom : null;
}))
.then(function () {
// move to this sources parent
const parent = current.getParent();
// keep calling until we reach the top parent
if (parent) {
current = parent;
return ittr();
}
// call the ittr and return it's promise
return (function ittr() {
// iterate the _fields object (not array) and
// pass each key:value pair to source._mergeProp. if _mergeProp
// returns a promise, then wait for it to complete and call _mergeProp again
return Promise.all(_.map(current._fields, function ittr(value, key) {
if (value instanceof Promise) {
return value.then(function (value) {
return ittr(value, key);
});
}())
}
const prom = root._mergeProp(flatData, value, key);
return prom instanceof Promise ? prom : null;
}))
.then(function () {
// This is down here to prevent the circular dependency
flatData.body = flatData.body || {};
const computedFields = flatData.index.getComputedFields();
flatData.body.stored_fields = computedFields.storedFields;
flatData.body.script_fields = flatData.body.script_fields || {};
_.extend(flatData.body.script_fields, computedFields.scriptFields);
const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : [];
flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields;
if (flatData.body._source) {
// exclude source fields for this index pattern specified by the user
const filter = fieldWildcardFilter(flatData.body._source.excludes);
flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter(
docvalueField => filter(docvalueField.field)
);
// move to this sources parent
const parent = current.getParent();
// keep calling until we reach the top parent
if (parent) {
current = parent;
return ittr();
}
});
}())
.then(function () {
// This is down here to prevent the circular dependency
flatData.body = flatData.body || {};
// if we only want to search for certain fields
const fields = flatData.fields;
if (fields) {
// filter out the docvalue_fields, and script_fields to only include those that we are concerned with
flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields);
flatData.body.script_fields = _.pick(flatData.body.script_fields, fields);
const computedFields = flatData.index.getComputedFields();
// request the remaining fields from both stored_fields and _source
const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields));
flatData.body.stored_fields = remainingFields;
_.set(flatData.body, '_source.includes', remainingFields);
flatData.body.stored_fields = computedFields.storedFields;
flatData.body.script_fields = flatData.body.script_fields || {};
_.extend(flatData.body.script_fields, computedFields.scriptFields);
const defaultDocValueFields = computedFields.docvalueFields ? computedFields.docvalueFields : [];
flatData.body.docvalue_fields = flatData.body.docvalue_fields || defaultDocValueFields;
if (flatData.body._source) {
// exclude source fields for this index pattern specified by the user
const filter = fieldWildcardFilter(flatData.body._source.excludes, config.get('metaFields'));
flatData.body.docvalue_fields = flatData.body.docvalue_fields.filter(
docvalueField => filter(docvalueField.field)
);
}
// if we only want to search for certain fields
const fields = flatData.fields;
if (fields) {
// filter out the docvalue_fields, and script_fields to only include those that we are concerned with
flatData.body.docvalue_fields = filterDocvalueFields(flatData.body.docvalue_fields, fields);
flatData.body.script_fields = _.pick(flatData.body.script_fields, fields);
// request the remaining fields from both stored_fields and _source
const remainingFields = _.difference(fields, _.keys(flatData.body.script_fields));
flatData.body.stored_fields = remainingFields;
_.set(flatData.body, '_source.includes', remainingFields);
}
const esQueryConfigs = getEsQueryConfig(config);
flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs);
if (flatData.highlightAll != null) {
if (flatData.highlightAll && flatData.body.query) {
flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig);
}
delete flatData.highlightAll;
}
const esQueryConfigs = getEsQueryConfig(config);
flatData.body.query = buildEsQuery(flatData.index, flatData.query, flatData.filters, esQueryConfigs);
if (flatData.highlightAll != null) {
if (flatData.highlightAll && flatData.body.query) {
flatData.body.highlight = getHighlightRequest(flatData.body.query, getConfig);
}
delete flatData.highlightAll;
}
/**
/**
* Translate a filter into a query to support es 3+
* @param {Object} filter - The filter to translate
* @return {Object} the query version of that filter
*/
const translateToQuery = function (filter) {
if (!filter) return;
const translateToQuery = function (filter) {
if (!filter) return;
if (filter.query) {
return filter.query;
if (filter.query) {
return filter.query;
}
return filter;
};
// re-write filters within filter aggregations
(function recurse(aggBranch) {
if (!aggBranch) return;
Object.keys(aggBranch).forEach(function (id) {
const agg = aggBranch[id];
if (agg.filters) {
// translate filters aggregations
const filters = agg.filters.filters;
Object.keys(filters).forEach(function (filterId) {
filters[filterId] = translateToQuery(filters[filterId]);
});
}
return filter;
};
recurse(agg.aggs || agg.aggregations);
});
}(flatData.body.aggs || flatData.body.aggregations));
// re-write filters within filter aggregations
(function recurse(aggBranch) {
if (!aggBranch) return;
Object.keys(aggBranch).forEach(function (id) {
const agg = aggBranch[id];
if (agg.filters) {
// translate filters aggregations
const filters = agg.filters.filters;
Object.keys(filters).forEach(function (filterId) {
filters[filterId] = translateToQuery(filters[filterId]);
});
}
recurse(agg.aggs || agg.aggregations);
});
}(flatData.body.aggs || flatData.body.aggregations));
return flatData;
});
}
return flatData;
});
}
return SearchSource;
}

View file

@ -0,0 +1,193 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SearchSource } from '../search_source';
jest.mock('ui/new_platform', () => ({
npSetup: {
core: {
injectedMetadata: {
getInjectedVar: () => 0,
}
}
}
}));
jest.mock('../fetch', () => ({
fetchSoon: jest.fn(),
}));
const indexPattern = { title: 'foo' };
const indexPattern2 = { title: 'foo' };
describe('SearchSource', function () {
describe('#setField()', function () {
it('sets the value for the property', function () {
const searchSource = new SearchSource();
searchSource.setField('aggs', 5);
expect(searchSource.getField('aggs')).toBe(5);
});
it('throws an error if the property is not accepted', function () {
const searchSource = new SearchSource();
expect(() => searchSource.setField('index', 5)).toThrow();
});
});
describe('#getField()', function () {
it('gets the value for the property', function () {
const searchSource = new SearchSource();
searchSource.setField('aggs', 5);
expect(searchSource.getField('aggs')).toBe(5);
});
it('throws an error if the property is not accepted', function () {
const searchSource = new SearchSource();
expect(() => searchSource.getField('unacceptablePropName')).toThrow();
});
});
describe(`#setField('index')`, function () {
describe('auto-sourceFiltering', function () {
describe('new index pattern assigned', function () {
it('generates a searchSource filter', function () {
const searchSource = new SearchSource();
expect(searchSource.getField('index')).toBe(undefined);
expect(searchSource.getField('source')).toBe(undefined);
searchSource.setField('index', indexPattern);
expect(searchSource.getField('index')).toBe(indexPattern);
expect(typeof searchSource.getField('source')).toBe('function');
});
it('removes created searchSource filter on removal', function () {
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
searchSource.setField('index', null);
expect(searchSource.getField('index')).toBe(undefined);
expect(searchSource.getField('source')).toBe(undefined);
});
});
describe('new index pattern assigned over another', function () {
it('replaces searchSource filter with new', function () {
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
const searchSourceFilter1 = searchSource.getField('source');
searchSource.setField('index', indexPattern2);
expect(searchSource.getField('index')).toBe(indexPattern2);
expect(typeof searchSource.getField('source')).toBe('function');
expect(searchSource.getField('source')).not.toBe(searchSourceFilter1);
});
it('removes created searchSource filter on removal', function () {
const searchSource = new SearchSource();
searchSource.setField('index', indexPattern);
searchSource.setField('index', indexPattern2);
searchSource.setField('index', null);
expect(searchSource.getField('index')).toBe(undefined);
expect(searchSource.getField('source')).toBe(undefined);
});
});
describe('ip assigned before custom searchSource filter', function () {
it('custom searchSource filter becomes new searchSource', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('index', indexPattern);
expect(typeof searchSource.getField('source')).toBe('function');
searchSource.setField('source', football);
expect(searchSource.getField('index')).toBe(indexPattern);
expect(searchSource.getField('source')).toBe(football);
});
it('custom searchSource stays after removal', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('index', indexPattern);
searchSource.setField('source', football);
searchSource.setField('index', null);
expect(searchSource.getField('index')).toBe(undefined);
expect(searchSource.getField('source')).toBe(football);
});
});
describe('ip assigned after custom searchSource filter', function () {
it('leaves the custom filter in place', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('source', football);
searchSource.setField('index', indexPattern);
expect(searchSource.getField('index')).toBe(indexPattern);
expect(searchSource.getField('source')).toBe(football);
});
it('custom searchSource stays after removal', function () {
const searchSource = new SearchSource();
const football = {};
searchSource.setField('source', football);
searchSource.setField('index', indexPattern);
searchSource.setField('index', null);
expect(searchSource.getField('index')).toBe(undefined);
expect(searchSource.getField('source')).toBe(football);
});
});
});
});
describe('#onRequestStart()', () => {
it('should be called when starting a request', () => {
const searchSource = new SearchSource();
const fn = jest.fn();
searchSource.onRequestStart(fn);
const options = {};
searchSource.requestIsStarting(options);
expect(fn).toBeCalledWith(searchSource, options);
});
it('should not be called on parent searchSource', () => {
const parent = new SearchSource();
const searchSource = new SearchSource().setParent(parent);
const fn = jest.fn();
searchSource.onRequestStart(fn);
const parentFn = jest.fn();
parent.onRequestStart(parentFn);
const options = {};
searchSource.requestIsStarting(options);
expect(fn).toBeCalledWith(searchSource, options);
expect(parentFn).not.toBeCalled();
});
it('should be called on parent searchSource if callParentStartHandlers is true', () => {
const parent = new SearchSource();
const searchSource = new SearchSource().setParent(parent, { callParentStartHandlers: true });
const fn = jest.fn();
searchSource.onRequestStart(fn);
const parentFn = jest.fn();
parent.onRequestStart(parentFn);
const options = {};
searchSource.requestIsStarting(options);
expect(fn).toBeCalledWith(searchSource, options);
expect(parentFn).toBeCalledWith(searchSource, options);
});
});
});

View file

@ -19,48 +19,13 @@
import { addSearchStrategy } from './search_strategy_registry';
import { isDefaultTypeIndexPattern } from './is_default_type_index_pattern';
import { SearchError } from './search_error';
import { getSearchParams, getMSearchParams } from '../fetch/get_search_params';
function getAllFetchParams(searchRequests, Promise) {
return Promise.map(searchRequests, (searchRequest) => {
return Promise.try(searchRequest.getFetchParams, void 0, searchRequest)
.then((fetchParams) => {
return (searchRequest.fetchParams = fetchParams);
})
.then(value => ({ resolved: value }))
.catch(error => ({ rejected: error }));
});
}
async function serializeAllFetchParams(fetchParams, searchRequests, serializeFetchParams) {
const searchRequestsWithFetchParams = [];
const failedSearchRequests = [];
// Gather the fetch param responses from all the successful requests.
fetchParams.forEach((result, index) => {
if (result.resolved) {
searchRequestsWithFetchParams.push(result.resolved);
} else {
const searchRequest = searchRequests[index];
searchRequest.handleFailure(result.rejected);
failedSearchRequests.push(searchRequest);
}
});
return {
serializedFetchParams: await serializeFetchParams(searchRequestsWithFetchParams),
failedSearchRequests,
};
}
import { getSearchParams, getMSearchParams, getPreference, getTimeout } from '../fetch/get_search_params';
export const defaultSearchStrategy = {
id: 'default',
search: params => {
const { config } = params;
return config.get('courier:batchSearches') ? msearch(params) : search(params);
return params.config.get('courier:batchSearches') ? msearch(params) : search(params);
},
isViable: (indexPattern) => {
@ -72,79 +37,43 @@ export const defaultSearchStrategy = {
},
};
async function msearch({ searchRequests, es, Promise, serializeFetchParams, config }) {
// Flatten the searchSource within each searchRequest to get the fetch params,
// e.g. body, filters, index pattern, query.
const allFetchParams = await getAllFetchParams(searchRequests, Promise);
// Serialize the fetch params into a format suitable for the body of an ES query.
const {
serializedFetchParams,
failedSearchRequests,
} = await serializeAllFetchParams(allFetchParams, searchRequests, serializeFetchParams);
if (serializedFetchParams.trim() === '') {
return {
failedSearchRequests,
function msearch({ searchRequests, es, config, esShardTimeout }) {
const inlineRequests = searchRequests.map(({ index, body, search_type: searchType }) => {
const inlineHeader = {
index: index.title || index,
search_type: searchType,
ignore_unavailable: true,
preference: getPreference(config)
};
}
const msearchParams = {
const inlineBody = {
...body,
timeout: getTimeout(esShardTimeout)
};
return `${JSON.stringify(inlineHeader)}\n${JSON.stringify(inlineBody)}`;
});
const searching = es.msearch({
...getMSearchParams(config),
body: serializedFetchParams,
};
const searching = es.msearch(msearchParams);
body: `${inlineRequests.join('\n')}\n`,
});
return {
// Munge data into shape expected by consumer.
searching: new Promise((resolve, reject) => {
// Unwrap the responses object returned by the ES client.
searching.then(({ responses }) => {
resolve(responses);
}).catch(error => {
// Format ES client error as a SearchError.
const { statusCode, displayName, message, path } = error;
const searchError = new SearchError({
status: statusCode,
title: displayName,
message,
path,
});
reject(searchError);
});
}),
abort: searching.abort,
failedSearchRequests,
searching: searching.then(({ responses }) => responses),
abort: searching.abort
};
}
function search({ searchRequests, es, Promise, config, sessionId, esShardTimeout }) {
const failedSearchRequests = [];
function search({ searchRequests, es, config, esShardTimeout }) {
const abortController = new AbortController();
const searchParams = getSearchParams(config, sessionId, esShardTimeout);
const promises = searchRequests.map(async searchRequest => {
return searchRequest.getFetchParams()
.then(fetchParams => {
const { index, body } = searchRequest.fetchParams = fetchParams;
const promise = es.search({ index: index.title || index, body, ...searchParams });
abortController.signal.addEventListener('abort', promise.abort);
return promise;
}, error => {
searchRequest.handleFailure(error);
failedSearchRequests.push(searchRequest);
})
.catch(({ response }) => {
// Copying the _msearch behavior where the errors for individual requests are returned
// instead of thrown
return JSON.parse(response);
});
const searchParams = getSearchParams(config, esShardTimeout);
const promises = searchRequests.map(({ index, body }) => {
const searching = es.search({ index: index.title || index, body, ...searchParams })
.catch(({ response }) => JSON.parse(response));
abortController.signal.addEventListener('abort', searching.abort);
return searching;
});
return {
searching: Promise.all(promises),
abort: () => abortController.abort(),
failedSearchRequests
};
}

View file

@ -18,7 +18,6 @@
*/
import { defaultSearchStrategy } from './default_search_strategy';
import Bluebird from 'bluebird';
const { search } = defaultSearchStrategy;
@ -29,14 +28,12 @@ function getConfigStub(config = {}) {
}
describe('defaultSearchStrategy', function () {
describe('search', function () {
let searchArgs;
beforeEach(() => {
const msearchMock = jest.fn().mockReturnValue(Bluebird.resolve([]));
const searchMock = jest.fn().mockReturnValue(Bluebird.resolve([]));
const msearchMock = jest.fn().mockReturnValue(Promise.resolve([]));
const searchMock = jest.fn().mockReturnValue(Promise.resolve([]));
searchArgs = {
searchRequests: [],
@ -44,8 +41,6 @@ describe('defaultSearchStrategy', function () {
msearch: msearchMock,
search: searchMock,
},
Promise: Bluebird,
serializeFetchParams: () => Bluebird.resolve('pretend this is a valid request body'),
};
});
@ -78,7 +73,5 @@ describe('defaultSearchStrategy', function () {
await search(searchArgs);
expect(searchArgs.es.msearch.mock.calls[0][0]).toHaveProperty('ignore_throttled', false);
});
});
});

View file

@ -18,9 +18,10 @@
*/
export {
assignSearchRequestsToSearchStrategies,
addSearchStrategy,
hasSearchStategyForIndexPattern,
getSearchStrategyById,
getSearchStrategyForSearchRequest,
} from './search_strategy_registry';
export { isDefaultTypeIndexPattern } from './is_default_type_index_pattern';

View file

@ -19,7 +19,7 @@
import { noOpSearchStrategy } from './no_op_search_strategy';
const searchStrategies = [];
export const searchStrategies = [];
export const addSearchStrategy = searchStrategy => {
if (searchStrategies.includes(searchStrategy)) {
@ -29,28 +29,26 @@ export const addSearchStrategy = searchStrategy => {
searchStrategies.push(searchStrategy);
};
const getSearchStrategyByViability = indexPattern => {
export const getSearchStrategyByViability = indexPattern => {
return searchStrategies.find(searchStrategy => {
return searchStrategy.isViable(indexPattern);
});
};
const getSearchStrategyById = searchStrategyId => {
export const getSearchStrategyById = searchStrategyId => {
return searchStrategies.find(searchStrategy => {
return searchStrategy.id === searchStrategyId;
});
};
const getSearchStrategyForSearchRequest = searchRequest => {
export const getSearchStrategyForSearchRequest = (searchRequest, { searchStrategyId } = {}) => {
// Allow the searchSource to declare the correct strategy with which to execute its searches.
const preferredSearchStrategyId = searchRequest.source.getPreferredSearchStrategyId();
if (preferredSearchStrategyId != null) {
return getSearchStrategyById(preferredSearchStrategyId);
if (searchStrategyId != null) {
return getSearchStrategyById(searchStrategyId);
}
// Otherwise try to match it to a strategy.
const indexPattern = searchRequest.source.getField('index');
const viableSearchStrategy = getSearchStrategyByViability(indexPattern);
const viableSearchStrategy = getSearchStrategyByViability(searchRequest.index);
if (viableSearchStrategy) {
return viableSearchStrategy;
@ -60,47 +58,6 @@ const getSearchStrategyForSearchRequest = searchRequest => {
return noOpSearchStrategy;
};
/**
* Build a structure like this:
*
* [{
* searchStrategy: rollupSearchStrategy,
* searchRequests: []<SearchRequest>,
* }, {
* searchStrategy: defaultSearchStrategy,
* searchRequests: []<SearchRequest>,
* }]
*
* We use an array of objects to preserve the order of the search requests, which we use to
* deterministically associate each response with the originating request.
*/
export const assignSearchRequestsToSearchStrategies = searchRequests => {
const searchStrategiesWithRequests = [];
const searchStrategyById = {};
searchRequests.forEach(searchRequest => {
const matchingSearchStrategy = getSearchStrategyForSearchRequest(searchRequest);
const { id } = matchingSearchStrategy;
let searchStrategyWithRequest = searchStrategyById[id];
// Create the data structure if we don't already have it.
if (!searchStrategyWithRequest) {
searchStrategyWithRequest = {
searchStrategy: matchingSearchStrategy,
searchRequests: [],
};
searchStrategyById[id] = searchStrategyWithRequest;
searchStrategiesWithRequests.push(searchStrategyWithRequest);
}
searchStrategyWithRequest.searchRequests.push(searchRequest);
});
return searchStrategiesWithRequests;
};
export const hasSearchStategyForIndexPattern = indexPattern => {
return Boolean(getSearchStrategyByViability(indexPattern));
};

View file

@ -17,79 +17,98 @@
* under the License.
*/
import { noOpSearchStrategy } from './no_op_search_strategy';
import {
assignSearchRequestsToSearchStrategies,
searchStrategies,
addSearchStrategy,
getSearchStrategyByViability,
getSearchStrategyById,
getSearchStrategyForSearchRequest,
hasSearchStategyForIndexPattern
} from './search_strategy_registry';
import { noOpSearchStrategy } from './no_op_search_strategy';
const mockSearchStrategies = [{
id: 0,
isViable: index => index === 0
}, {
id: 1,
isViable: index => index === 1
}];
describe('SearchStrategyRegistry', () => {
describe('assignSearchRequestsToSearchStrategies', () => {
test('associates search requests with valid search strategies', () => {
const searchStrategyA = {
id: 'a',
isViable: indexPattern => {
return indexPattern === 'a';
},
};
describe('Search strategy registry', () => {
beforeEach(() => {
searchStrategies.length = 0;
});
addSearchStrategy(searchStrategyA);
const searchStrategyB = {
id: 'b',
isViable: indexPattern => {
return indexPattern === 'b';
},
};
addSearchStrategy(searchStrategyB);
const searchRequest0 = {
id: 0,
source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} },
};
const searchRequest1 = {
id: 1,
source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} },
};
const searchRequest2 = {
id: 2,
source: { getField: () => 'a', getPreferredSearchStrategyId: () => {} },
};
const searchRequest3 = {
id: 3,
source: { getField: () => 'b', getPreferredSearchStrategyId: () => {} },
};
const searchRequests = [ searchRequest0, searchRequest1, searchRequest2, searchRequest3];
const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests);
expect(searchStrategiesWithSearchRequests).toEqual([{
searchStrategy: searchStrategyB,
searchRequests: [ searchRequest0, searchRequest3 ],
}, {
searchStrategy: searchStrategyA,
searchRequests: [ searchRequest1, searchRequest2 ],
}]);
describe('addSearchStrategy', () => {
it('adds a search strategy', () => {
addSearchStrategy(mockSearchStrategies[0]);
expect(searchStrategies.length).toBe(1);
});
test(`associates search requests with noOpSearchStrategy when a viable one can't be found`, () => {
const searchRequest0 = {
id: 0,
source: { getField: () => {}, getPreferredSearchStrategyId: () => {} },
};
it('does not add a search strategy if it is already included', () => {
addSearchStrategy(mockSearchStrategies[0]);
addSearchStrategy(mockSearchStrategies[0]);
expect(searchStrategies.length).toBe(1);
});
});
const searchRequests = [ searchRequest0 ];
const searchStrategiesWithSearchRequests = assignSearchRequestsToSearchStrategies(searchRequests);
describe('getSearchStrategyByViability', () => {
beforeEach(() => {
mockSearchStrategies.forEach(addSearchStrategy);
});
expect(searchStrategiesWithSearchRequests).toEqual([{
searchStrategy: noOpSearchStrategy,
searchRequests: [ searchRequest0 ],
}]);
it('returns the viable strategy', () => {
expect(getSearchStrategyByViability(0)).toBe(mockSearchStrategies[0]);
expect(getSearchStrategyByViability(1)).toBe(mockSearchStrategies[1]);
});
it('returns undefined if there is no viable strategy', () => {
expect(getSearchStrategyByViability(-1)).toBe(undefined);
});
});
describe('getSearchStrategyById', () => {
beforeEach(() => {
mockSearchStrategies.forEach(addSearchStrategy);
});
it('returns the strategy by ID', () => {
expect(getSearchStrategyById(0)).toBe(mockSearchStrategies[0]);
expect(getSearchStrategyById(1)).toBe(mockSearchStrategies[1]);
});
it('returns undefined if there is no strategy with that ID', () => {
expect(getSearchStrategyById(-1)).toBe(undefined);
});
});
describe('getSearchStrategyForSearchRequest', () => {
beforeEach(() => {
mockSearchStrategies.forEach(addSearchStrategy);
});
it('returns the strategy by ID if provided', () => {
expect(getSearchStrategyForSearchRequest({}, { searchStrategyId: 1 })).toBe(mockSearchStrategies[1]);
});
it('returns the strategy by viability if there is one', () => {
expect(getSearchStrategyForSearchRequest({ index: 1 })).toBe(mockSearchStrategies[1]);
});
it('returns the no op strategy if there is no viable strategy', () => {
expect(getSearchStrategyForSearchRequest({ index: 3 })).toBe(noOpSearchStrategy);
});
});
describe('hasSearchStategyForIndexPattern', () => {
beforeEach(() => {
mockSearchStrategies.forEach(addSearchStrategy);
});
it('returns whether there is a search strategy for this index pattern', () => {
expect(hasSearchStategyForIndexPattern(0)).toBe(true);
expect(hasSearchStategyForIndexPattern(-1)).toBe(false);
});
});
});

View file

@ -1,3 +0,0 @@
.kbnError--multi-allow-explicit-index {
padding: $euiSizeL;
}

View file

@ -1 +0,0 @@
@import './error_allow_explicit_index';

View file

@ -1,48 +0,0 @@
<div class="app-container kbnError--multi-allow-explicit-index euiText">
<h3>
<icon aria-hidden="true" type="'alert'" color="'danger'"></icon>
<span
i18n-id="common.ui.errorAllowExplicitIndex.errorTitle"
i18n-default-message="Oh no!">
</span>
</h3>
<p
i18n-id="common.ui.errorAllowExplicitIndex.errorDescription"
i18n-default-message="It looks like your Elasticsearch cluster has the
{allowExplicitIndexConfig} setting set to {allowExplicitIndexValue}, which
prevents Kibana from making search requests. We use this ability to send a single request to
Elasticsearch that searches multiple indexes so that when there are many panels on a dashboard
they will load quickly and uniformly."
i18n-values="{
html_allowExplicitIndexConfig: '<code>rest.action.multi.allow_explicit_index</code>',
html_allowExplicitIndexValue: '<code>false</code>'
}"
></p>
<p
i18n-id="common.ui.errorAllowExplicitIndex.errorDisclaimer"
i18n-default-message="Unfortunately, until this issue is fixed you won't be able to use certain apps in Kibana, like
Discover, Visualize and Dashboard."
></p>
<h3
i18n-id="common.ui.errorAllowExplicitIndex.howToFixErrorTitle"
i18n-default-message="Ok, how do I fix this?"
></h3>
<ol>
<li
i18n-id="common.ui.errorAllowExplicitIndex.howToFix.removeConfigItemText"
i18n-default-message="Remove {allowExplicitIndexConfig} from your Elasticsearch config file."
i18n-values="{ html_allowExplicitIndexConfig: '<code>rest.action.multi.allow_explicit_index: false</code>' }"
></li>
<li
i18n-id="common.ui.errorAllowExplicitIndex.howToFix.restartText"
i18n-default-message="Restart Elasticsearch."
></li>
<li
i18n-id="common.ui.errorAllowExplicitIndex.howToFix.goBackText"
i18n-default-message="Use the browser's back button to return to what you were doing."
></li>
</ol>
</div>

View file

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { i18n } from '@kbn/i18n';
import { get } from 'lodash';
import uiRoutes from '../routes';
import { KbnUrlProvider } from '../url';
import template from './error_allow_explicit_index.html';
uiRoutes
.when('/error/multi.allow_explicit_index', {
template,
k7Breadcrumbs: () => [{ text: i18n.translate('common.ui.errorAllowExplicitIndex.breadcrumbs.errorText', { defaultMessage: 'Error' }) }],
});
export function ErrorAllowExplicitIndexProvider(Private, Promise) {
const kbnUrl = Private(KbnUrlProvider);
return new (class ErrorAllowExplicitIndex {
test(error) {
if (!error || error.status !== 400) {
return false;
}
const type = get(error, 'body.error.type');
const reason = get(error, 'body.error.reason');
return (
type === 'illegal_argument_exception' &&
String(reason).includes('explicit index')
);
}
takeover() {
kbnUrl.change('/error/multi.allow_explicit_index');
return Promise.halt();
}
});
}

View file

@ -1,20 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { ErrorAllowExplicitIndexProvider } from './error_allow_explicit_index';

View file

@ -20,19 +20,12 @@
import expect from '@kbn/expect';
import ngMock from 'ng_mock';
import { FieldWildcardProvider } from '../../field_wildcard';
import { fieldWildcardFilter, makeRegEx } from '../../field_wildcard';
describe('fieldWildcard', function () {
let fieldWildcardFilter;
let makeRegEx;
const metaFields = ['_id', '_type', '_source'];
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.inject(function (config, Private) {
config.set('metaFields', ['_id', '_type', '_source']);
const fieldWildcard = Private(FieldWildcardProvider);
fieldWildcardFilter = fieldWildcard.fieldWildcardFilter;
makeRegEx = fieldWildcard.makeRegEx;
}));
describe('makeRegEx', function () {
it('matches * in any position', function () {
@ -70,7 +63,7 @@ describe('fieldWildcard', function () {
});
it('filters nothing when given an empty array', function () {
const filter = fieldWildcardFilter([]);
const filter = fieldWildcardFilter([], metaFields);
const original = [
'foo',
'bar',
@ -82,7 +75,7 @@ describe('fieldWildcard', function () {
});
it('does not filter metaFields', function () {
const filter = fieldWildcardFilter([ '_*' ]);
const filter = fieldWildcardFilter([ '_*' ], metaFields);
const original = [
'_id',
@ -97,7 +90,7 @@ describe('fieldWildcard', function () {
const filter = fieldWildcardFilter([
'f*',
'*4'
]);
], metaFields);
const original = [
'foo',
@ -114,7 +107,7 @@ describe('fieldWildcard', function () {
'f*',
'*4',
'undefined'
]);
], metaFields);
const original = [
'foo',

View file

@ -19,31 +19,25 @@
import { escapeRegExp, memoize } from 'lodash';
export function FieldWildcardProvider(config) {
const metaFields = config.get('metaFields');
export const makeRegEx = memoize(function makeRegEx(glob) {
return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$');
});
const makeRegEx = memoize(function makeRegEx(glob) {
return new RegExp('^' + glob.split('*').map(escapeRegExp).join('.*') + '$');
});
// Note that this will return an essentially noop function if globs is undefined.
function fieldWildcardMatcher(globs = []) {
return function matcher(val) {
// do not test metaFields or keyword
if (metaFields.indexOf(val) !== -1) {
return false;
}
return globs.some(p => makeRegEx(p).test(val));
};
}
// Note that this will return an essentially noop function if globs is undefined.
function fieldWildcardFilter(globs = []) {
const matcher = fieldWildcardMatcher(globs);
return function filter(val) {
return !matcher(val);
};
}
return { makeRegEx, fieldWildcardMatcher, fieldWildcardFilter };
// Note that this will return an essentially noop function if globs is undefined.
export function fieldWildcardMatcher(globs = [], metaFields) {
return function matcher(val) {
// do not test metaFields or keyword
if (metaFields.indexOf(val) !== -1) {
return false;
}
return globs.some(p => makeRegEx(p).test(val));
};
}
// Note that this will return an essentially noop function if globs is undefined.
export function fieldWildcardFilter(globs = [], metaFields = []) {
const matcher = fieldWildcardMatcher(globs, metaFields);
return function filter(val) {
return !matcher(val);
};
}

View file

@ -17,4 +17,4 @@
* under the License.
*/
export { FieldWildcardProvider } from './field_wildcard';
export * from './field_wildcard';

View file

@ -64,7 +64,6 @@ export const configureAppAngularModule = (angularModule: IModule) => {
.value('buildNum', legacyMetadata.buildNum)
.value('buildSha', legacyMetadata.buildSha)
.value('serverName', legacyMetadata.serverName)
.value('sessionId', Date.now())
.value('esUrl', getEsUrl(newPlatform))
.value('uiCapabilities', capabilities.get())
.config(setupCompileProvider(newPlatform))

View file

@ -34,7 +34,7 @@ import _ from 'lodash';
import { InvalidJSONProperty, SavedObjectNotFound, expandShorthand } from '../../../../plugins/kibana_utils/public';
import { SearchSourceProvider } from '../courier/search_source';
import { SearchSource } from '../courier';
import { findObjectByTitle } from './find_object_by_title';
import { SavedObjectsClientProvider } from './saved_objects_client_provider';
import { migrateLegacyQuery } from '../utils/migrate_legacy_query';
@ -68,7 +68,6 @@ function isErrorNonFatal(error) {
export function SavedObjectProvider(Promise, Private, confirmModalPromise, indexPatterns) {
const savedObjectsClient = Private(SavedObjectsClientProvider);
const SearchSource = Private(SearchSourceProvider);
/**
* The SavedObject class is a base class for saved objects loaded from the server and
@ -527,11 +526,7 @@ export function SavedObjectProvider(Promise, Private, confirmModalPromise, index
});
};
this.destroy = () => {
if (this.searchSource) {
this.searchSource.cancelQueued();
}
};
this.destroy = () => {};
/**
* Delete this object from Elasticsearch

View file

@ -33,14 +33,13 @@ import '../render_complete/directive';
import { AggConfigs } from '../agg_types/agg_configs';
import { PersistedState } from '../persisted_state';
import { updateVisualizationConfig } from './vis_update';
import { SearchSourceProvider } from '../courier/search_source';
import { SearchSource } from '../courier';
import { start as visualizations } from '../../../core_plugins/visualizations/public/np_ready/public/legacy';
import '../directives/bind';
export function VisProvider(Private, getAppState) {
const visTypes = visualizations.types;
const SearchSource = Private(SearchSourceProvider);
class Vis extends EventEmitter {
constructor(indexPattern, visState) {

View file

@ -18,6 +18,7 @@
*/
jest.mock('ui/new_platform');
import { searchSourceMock } from '../../courier/search_source/mocks';
import { mockDataLoaderFetch, timefilter } from './embedded_visualize_handler.test.mocks';
import _ from 'lodash';
@ -85,7 +86,7 @@ describe('EmbeddedVisualizeHandler', () => {
inspectorAdapters: {},
query: undefined,
queryFilter: null,
searchSource: undefined,
searchSource: searchSourceMock,
timeRange: undefined,
uiState: undefined,
};
@ -96,7 +97,7 @@ describe('EmbeddedVisualizeHandler', () => {
{
vis: mockVis,
title: 'My Vis',
searchSource: undefined,
searchSource: searchSourceMock,
destroy: () => ({}),
copyOnSave: false,
save: () => Promise.resolve('123'),
@ -128,7 +129,7 @@ describe('EmbeddedVisualizeHandler', () => {
{
vis: mockVis,
title: 'My Vis',
searchSource: undefined,
searchSource: searchSourceMock,
destroy: () => ({}),
copyOnSave: false,
save: () => Promise.resolve('123'),

View file

@ -518,9 +518,9 @@ export class EmbeddedVisualizeHandler {
// If the data loader was aborted then no need to surface this error in the UI
if (error && error.name === 'AbortError') return;
// TODO: come up with a general way to cancel execution of pipeline expressions.
if (this.dataLoaderParams.searchSource && this.dataLoaderParams.searchSource.cancelQueued) {
this.dataLoaderParams.searchSource.cancelQueued();
// Cancel execution of pipeline expressions
if (this.abortController) {
this.abortController.abort();
}
this.vis.requestError = error;

View file

@ -28,7 +28,7 @@ import {
} from './build_pipeline';
import { Vis, VisState } from 'ui/vis';
import { AggConfig } from 'ui/agg_types/agg_config';
import { SearchSource } from 'ui/courier';
import { searchSourceMock } from 'ui/courier/search_source/mocks';
jest.mock('ui/new_platform');
jest.mock('ui/agg_types/buckets/date_histogram', () => ({
@ -348,10 +348,7 @@ describe('visualize loader pipeline helpers: build pipeline', () => {
toExpression: () => 'testing custom expressions',
},
};
const searchSource: SearchSource = {
getField: () => null,
};
const expression = await buildPipeline(vis, { searchSource });
const expression = await buildPipeline(vis, { searchSource: searchSourceMock });
expect(expression).toMatchSnapshot();
});
});

View file

@ -442,18 +442,9 @@ export const buildVislibDimensions = async (
} else if (xAgg.type.name === 'histogram') {
const intervalParam = xAgg.type.paramByName('interval');
const output = { params: {} as any };
const searchRequest = {
whenAborted: (fn: any) => {
if (params.abortSignal) {
params.abortSignal.addEventListener('abort', fn);
}
},
};
await intervalParam.modifyAggConfigOnSearchRequestStart(
xAgg,
params.searchSource,
searchRequest
);
await intervalParam.modifyAggConfigOnSearchRequestStart(xAgg, params.searchSource, {
abortSignal: params.abortSignal,
});
intervalParam.write(xAgg, output);
dimensions.x.params.interval = output.params.interval;
}

View file

@ -4,17 +4,21 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { uiModules } from 'ui/modules';
import { SearchSourceProvider } from 'ui/courier';
import { getRequestInspectorStats, getResponseInspectorStats } from 'ui/courier/utils/courier_inspector_utils';
export { xpackInfo } from 'plugins/xpack_main/services/xpack_info';
import { start as data } from '../../../../../src/legacy/core_plugins/data/public/legacy';
export { SearchSource } from 'ui/courier';
export const indexPatternService = data.indexPatterns.indexPatterns;
export let SearchSource;
export async function fetchSearchSourceAndRecordWithInspector({ searchSource, requestId, requestName, requestDesc, inspectorAdapters }) {
export async function fetchSearchSourceAndRecordWithInspector({
searchSource,
requestId,
requestName,
requestDesc,
inspectorAdapters,
abortSignal,
}) {
const inspectorRequest = inspectorAdapters.requests.start(
requestName,
{ id: requestId, description: requestDesc });
@ -24,7 +28,7 @@ export async function fetchSearchSourceAndRecordWithInspector({ searchSource, re
searchSource.getSearchRequestBody().then(body => {
inspectorRequest.json(body);
});
resp = await searchSource.fetch();
resp = await searchSource.fetch({ abortSignal });
inspectorRequest
.stats(getResponseInspectorStats(searchSource, resp))
.ok({ json: resp });
@ -35,8 +39,3 @@ export async function fetchSearchSourceAndRecordWithInspector({ searchSource, re
return resp;
}
uiModules.get('app/maps').run(($injector) => {
const Private = $injector.get('Private');
SearchSource = Private(SearchSourceProvider);
});

View file

@ -133,10 +133,8 @@ export class AbstractESSource extends AbstractVectorSource {
async _runEsQuery(requestName, searchSource, registerCancelCallback, requestDescription) {
const cancel = () => {
searchSource.cancelQueued();
};
registerCancelCallback(cancel);
const abortController = new AbortController();
registerCancelCallback(() => abortController.abort());
try {
return await fetchSearchSourceAndRecordWithInspector({
@ -144,7 +142,8 @@ export class AbstractESSource extends AbstractVectorSource {
searchSource,
requestName,
requestId: this.getId(),
requestDesc: requestDescription
requestDesc: requestDescription,
abortSignal: abortController.signal,
});
} catch(error) {
if (error.name === 'AbortError') {

View file

@ -4,10 +4,12 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { searchSourceMock } from '../../../../../../../../src/legacy/ui/public/courier/search_source/mocks';
export const savedSearchMock = {
id: 'the-saved-search-id',
title: 'the-saved-search-title',
searchSource: {},
searchSource: searchSourceMock,
columns: [],
sort: [],
destroy: () => {},

View file

@ -7,43 +7,8 @@
import { kfetch } from 'ui/kfetch';
import { SearchError, getSearchErrorType } from 'ui/courier';
function getAllFetchParams(searchRequests, Promise) {
return Promise.map(searchRequests, (searchRequest) => {
return Promise.try(searchRequest.getFetchParams, void 0, searchRequest)
.then((fetchParams) => {
return (searchRequest.fetchParams = fetchParams);
})
.then(value => ({ resolved: value }))
.catch(error => ({ rejected: error }));
});
}
function serializeAllFetchParams(fetchParams, searchRequests) {
const searchRequestsWithFetchParams = [];
const failedSearchRequests = [];
// Gather the fetch param responses from all the successful requests.
fetchParams.forEach((result, index) => {
if (result.resolved) {
searchRequestsWithFetchParams.push(result.resolved);
} else {
const searchRequest = searchRequests[index];
searchRequest.handleFailure(result.rejected);
failedSearchRequests.push(searchRequest);
}
});
const serializedFetchParams = serializeFetchParams(searchRequestsWithFetchParams);
return {
serializedFetchParams,
failedSearchRequests,
};
}
function serializeFetchParams(searchRequestsWithFetchParams) {
return JSON.stringify(searchRequestsWithFetchParams.map(searchRequestWithFetchParams => {
function serializeFetchParams(searchRequests) {
return JSON.stringify(searchRequests.map(searchRequestWithFetchParams => {
const indexPattern = searchRequestWithFetchParams.index.title || searchRequestWithFetchParams.index;
const {
body: {
@ -84,16 +49,9 @@ function shimHitsInFetchResponse(response) {
export const rollupSearchStrategy = {
id: 'rollup',
search: async ({ searchRequests, Promise }) => {
// Flatten the searchSource within each searchRequest to get the fetch params,
// e.g. body, filters, index pattern, query.
const allFetchParams = await getAllFetchParams(searchRequests, Promise);
search: ({ searchRequests, Promise }) => {
// Serialize the fetch params into a format suitable for the body of an ES query.
const {
serializedFetchParams,
failedSearchRequests,
} = await serializeAllFetchParams(allFetchParams, searchRequests);
const serializedFetchParams = serializeFetchParams(searchRequests);
const controller = new AbortController();
const promise = kfetch({
@ -124,7 +82,6 @@ export const rollupSearchStrategy = {
return Promise.reject(searchError);
}),
abort: () => controller.abort(),
failedSearchRequests,
};
},

View file

@ -261,13 +261,8 @@
"common.ui.aggTypes.timeInterval.scaledHelpText": "現在 {bucketDescription} にスケーリングされています",
"common.ui.aggTypes.timeInterval.selectIntervalPlaceholder": "間隔を選択",
"common.ui.aggTypes.timeInterval.selectOptionHelpText": "オプションを選択するかカスタム値を作成します。例30s、20m、24h、2d、1w、1M",
"common.ui.courier.fetch.failedToClearRequestErrorMessage": "返答から未完全または重複のリクエストを消去できませんでした。",
"common.ui.courier.fetch.requestTimedOutNotificationMessage": "リクエストがタイムアウトしたため、データが不完全な可能性があります",
"common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage": "リクエストが 2 度中断されましたか?",
"common.ui.courier.fetch.requireErrorHandlerErrorMessage": "{errorHandler} が必要です",
"common.ui.courier.fetch.shardsFailedNotificationMessage": "{shardsTotal} 件中 {shardsFailed} 件のシャードでエラーが発生しました",
"common.ui.courier.fetch.unableContinueRequestErrorMessage": "{type} リクエストを続行できません",
"common.ui.courier.fetch.unableStartRequestErrorMessage": "既に開始済みのためリクエストは開始できません",
"common.ui.courier.hitsDescription": "クエリにより返されたドキュメントの数です。",
"common.ui.courier.hitsLabel": "ヒット数",
"common.ui.courier.hitsTotalDescription": "クエリに一致するドキュメントの数です。",
@ -298,14 +293,6 @@
"common.ui.dualRangeControl.mustSetBothErrorMessage": "下と上の値の両方を設定する必要があります",
"common.ui.dualRangeControl.outsideOfRangeErrorMessage": "値は {min} と {max} の間でなければなりません",
"common.ui.dualRangeControl.upperValidErrorMessage": "上の値は下の値以上でなければなりません",
"common.ui.errorAllowExplicitIndex.breadcrumbs.errorText": "エラー",
"common.ui.errorAllowExplicitIndex.errorDescription": "ご使用の Elasticsearch クラスターの {allowExplicitIndexConfig} 設定が {allowExplicitIndexValue} に設定されているようです。これにより Kibana が検索リクエストを行うことができません。この機能は、ダッシュボードに多数のパネルがある際に素早く一貫して読み込まれるように、Elasticsearch に複数インデックスを検索する単独のリクエストを送るのに使用します。",
"common.ui.errorAllowExplicitIndex.errorDisclaimer": "申し訳ございませんが、この問題が解決されるまでディスカバリ、可視化、ダッシュボードなどの Kibana の特定のアプリはご利用いただけません。",
"common.ui.errorAllowExplicitIndex.errorTitle": "おっと!",
"common.ui.errorAllowExplicitIndex.howToFix.goBackText": "ブラウザの戻るボタンで前の画面に戻ります。",
"common.ui.errorAllowExplicitIndex.howToFix.removeConfigItemText": "Elasticsearch の構成ファイルから {allowExplicitIndexConfig} を削除します。",
"common.ui.errorAllowExplicitIndex.howToFix.restartText": "Elasticsearch を再起動します。",
"common.ui.errorAllowExplicitIndex.howToFixErrorTitle": "どうすれば良いのでしょう?",
"common.ui.errorAutoCreateIndex.breadcrumbs.errorText": "エラー",
"common.ui.errorAutoCreateIndex.errorDescription": "Elasticsearch クラスターの {autoCreateIndexActionConfig} 設定が原因で、Kibana が保存されたオブジェクトを格納するインデックスを自動的に作成できないようです。Kibana は、保存されたオブジェクトインデックスが適切なマッピング/スキーマを使用し Kibana から Elasticsearch へのポーリングの回数を減らすための最適な手段であるため、この Elasticsearch の機能を使用します。",
"common.ui.errorAutoCreateIndex.errorDisclaimer": "申し訳ございませんが、この問題が解決されるまで Kibana で何も保存することができません。",

View file

@ -261,13 +261,8 @@
"common.ui.aggTypes.timeInterval.scaledHelpText": "当前缩放至 {bucketDescription}",
"common.ui.aggTypes.timeInterval.selectIntervalPlaceholder": "选择时间间隔",
"common.ui.aggTypes.timeInterval.selectOptionHelpText": "选择选项或创建定制值示例:30s、20m、24h、2d、1w、1M",
"common.ui.courier.fetch.failedToClearRequestErrorMessage": "无法从响应中清除不完整或重复的请求。",
"common.ui.courier.fetch.requestTimedOutNotificationMessage": "由于您的请求超时,因此数据可能不完整",
"common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage": "请求已中止两次?",
"common.ui.courier.fetch.requireErrorHandlerErrorMessage": "“{errorHandler}” 必填",
"common.ui.courier.fetch.shardsFailedNotificationMessage": "{shardsTotal} 个分片有 {shardsFailed} 个失败",
"common.ui.courier.fetch.unableContinueRequestErrorMessage": "无法继续 {type} 请求",
"common.ui.courier.fetch.unableStartRequestErrorMessage": "无法启动请求,因此其已启动",
"common.ui.courier.hitsDescription": "查询返回的文档数目。",
"common.ui.courier.hitsLabel": "命中",
"common.ui.courier.hitsTotalDescription": "匹配查询的文档数目。",
@ -298,14 +293,6 @@
"common.ui.dualRangeControl.mustSetBothErrorMessage": "下限值和上限值都须设置",
"common.ui.dualRangeControl.outsideOfRangeErrorMessage": "值必须是在 {min} 到 {max} 的范围内",
"common.ui.dualRangeControl.upperValidErrorMessage": "上限值必须大于或等于下限值",
"common.ui.errorAllowExplicitIndex.breadcrumbs.errorText": "错误",
"common.ui.errorAllowExplicitIndex.errorDescription": "似乎您的 Elasticsearch 集群已将设置 {allowExplicitIndexConfig} 设置为 {allowExplicitIndexValue},这使 Kibana 无法执行搜索请求。使用此功能,我们可以向 Elasticsearch 发送单个请求来搜索多个索引,这样,当仪表板上有多个面板时,面板可快速且一致地加载。",
"common.ui.errorAllowExplicitIndex.errorDisclaimer": "但是,只有解决了此问题后,您才能使用 Kibana 中的某些应用,如 Discover、Visualize 和仪表板。",
"common.ui.errorAllowExplicitIndex.errorTitle": "糟糕!",
"common.ui.errorAllowExplicitIndex.howToFix.goBackText": "使用浏览器的后退按钮返回您之前正做的工作。",
"common.ui.errorAllowExplicitIndex.howToFix.removeConfigItemText": "从 Elasticsearch 配置文件中删除 {allowExplicitIndexConfig}",
"common.ui.errorAllowExplicitIndex.howToFix.restartText": "重新启动 Elasticsearch。",
"common.ui.errorAllowExplicitIndex.howToFixErrorTitle": "那么,我如何解决此问题?",
"common.ui.errorAutoCreateIndex.breadcrumbs.errorText": "错误",
"common.ui.errorAutoCreateIndex.errorDescription": "似乎 Elasticsearch 集群的 {autoCreateIndexActionConfig} 设置使 Kibana 无法自动创建用于存储已保存对象的索引。Kibana 将使用此 Elasticsearch 功能,因为这是确保已保存对象索引使用正确映射/架构的最好方式,而且其允许 Kibana 较少地轮询 Elasticsearch。",
"common.ui.errorAutoCreateIndex.errorDisclaimer": "但是,只有解决了此问题后,您才能在 Kibana 保存内容。",