mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Merge branch 'master' of github.com:elastic/kibana into renameAllFiles
This commit is contained in:
commit
7a1e0554db
51 changed files with 728 additions and 650 deletions
|
@ -221,6 +221,7 @@ Remember, someone is blocked by a pull awaiting review, make it count. Be thorou
|
|||
1. **Understand the issue** that is being fixed, or the feature being added. Check the description on the pull, and check out the related issue. If you don't understand something, ask the person the submitter for clarification.
|
||||
1. **Reproduce the bug** (or the lack of feature I guess?) in the destination branch, usually `master`. The referenced issue will help you here. If you're unable to reproduce the issue, contact the issue submitter for clarification
|
||||
1. **Check out the pull** and test it. Is the issue fixed? Does it have nasty side effects? Try to create suspect inputs. If it operates on the value of a field try things like: strings (including an empty string), null, numbers, dates. Try to think of edge cases that might break the code.
|
||||
1. **Merge the target branch**. It is possible that tests or the linter have been updated in the target branch since the pull was submitted. Merging the pull could cause core to start failing.
|
||||
1. **Read the code**. Understanding the changes will help you find additional things to test. Contact the submitter if you don't understand something.
|
||||
1. **Go line-by-line**. Are there [style guide](https://github.com/elastic/kibana/blob/master/STYLEGUIDE.md) violations? Strangely named variables? Magic numbers? Do the abstractions make sense to you? Are things arranged in a testable way?
|
||||
1. **Speaking of tests** Are they there? If a new function was added does it have tests? Do the tests, well, TEST anything? Do they just run the function or do they properly check the output?
|
||||
|
|
|
@ -89,7 +89,7 @@
|
|||
"boom": "2.8.0",
|
||||
"bootstrap": "3.3.5",
|
||||
"brace": "0.5.1",
|
||||
"bunyan": "1.4.0",
|
||||
"bunyan": "1.7.1",
|
||||
"clipboard": "1.5.5",
|
||||
"commander": "2.8.1",
|
||||
"css-loader": "0.17.0",
|
||||
|
@ -146,7 +146,7 @@
|
|||
"angular-mocks": "1.4.7",
|
||||
"auto-release-sinon": "1.0.3",
|
||||
"babel-eslint": "4.1.8",
|
||||
"chokidar": "1.0.5",
|
||||
"chokidar": "1.4.3",
|
||||
"eslint": "1.10.3",
|
||||
"eslint-plugin-mocha": "1.1.0",
|
||||
"expect.js": "0.3.1",
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import cluster from 'cluster';
|
||||
const { join } = require('path');
|
||||
const { join, resolve } = require('path');
|
||||
const { format: formatUrl } = require('url');
|
||||
import Hapi from 'hapi';
|
||||
const { debounce, compact, get, invoke, bindAll, once, sample } = require('lodash');
|
||||
const { debounce, compact, get, invoke, bindAll, once, sample, uniq } = require('lodash');
|
||||
|
||||
import Log from '../log';
|
||||
import Worker from './worker';
|
||||
|
@ -85,14 +85,19 @@ module.exports = class ClusterManager {
|
|||
const chokidar = require('chokidar');
|
||||
const fromRoot = require('../../utils/from_root');
|
||||
|
||||
this.watcher = chokidar.watch([
|
||||
'src/plugins',
|
||||
'src/server',
|
||||
'src/ui',
|
||||
'src/utils',
|
||||
'config',
|
||||
...extraPaths
|
||||
], {
|
||||
const watchPaths = uniq(
|
||||
[
|
||||
fromRoot('src/plugins'),
|
||||
fromRoot('src/server'),
|
||||
fromRoot('src/ui'),
|
||||
fromRoot('src/utils'),
|
||||
fromRoot('config'),
|
||||
...extraPaths
|
||||
]
|
||||
.map(path => resolve(path))
|
||||
);
|
||||
|
||||
this.watcher = chokidar.watch(watchPaths, {
|
||||
cwd: fromRoot('.'),
|
||||
ignored: /[\\\/](\..*|node_modules|bower_components|public|__tests__)[\\\/]/
|
||||
});
|
||||
|
|
|
@ -54,7 +54,7 @@ module.exports = function ({ Plugin }) {
|
|||
return reply.continue();
|
||||
}
|
||||
|
||||
function noCreateIndex({ path }, reply) {
|
||||
function noDirectIndex({ path }, reply) {
|
||||
const requestPath = trimRight(trim(path), '/');
|
||||
const matchPath = createPath(kibanaIndex);
|
||||
|
||||
|
@ -75,7 +75,7 @@ module.exports = function ({ Plugin }) {
|
|||
['PUT', 'POST', 'DELETE'],
|
||||
`/${kibanaIndex}/{paths*}`,
|
||||
{
|
||||
pre: [ noCreateIndex, noBulkCheck ]
|
||||
pre: [ noDirectIndex, noBulkCheck ]
|
||||
}
|
||||
);
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import SetupError from './setup_error';
|
||||
import { format } from 'util';
|
||||
import { mappings } from './kibana_index_mappings';
|
||||
|
||||
module.exports = function (server) {
|
||||
const client = server.plugins.elasticsearch.client;
|
||||
const index = server.config().get('kibana.index');
|
||||
|
@ -16,16 +18,7 @@ module.exports = function (server) {
|
|||
settings: {
|
||||
number_of_shards: 1
|
||||
},
|
||||
mappings: {
|
||||
config: {
|
||||
properties: {
|
||||
buildNum: {
|
||||
type: 'string',
|
||||
index: 'not_analyzed'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mappings
|
||||
}
|
||||
})
|
||||
.catch(handleError('Unable to create Kibana index "<%= kibana.index %>"'))
|
||||
|
|
|
@ -9,6 +9,19 @@ import callWithRequest from './call_with_request';
|
|||
module.exports = function (server) {
|
||||
const config = server.config();
|
||||
|
||||
class ElasticsearchClientLogging {
|
||||
error(err) {
|
||||
server.log(['error', 'elasticsearch'], err);
|
||||
}
|
||||
warning(message) {
|
||||
server.log(['warning', 'elasticsearch'], message);
|
||||
}
|
||||
info() {}
|
||||
debug() {}
|
||||
trace() {}
|
||||
close() {}
|
||||
}
|
||||
|
||||
function createClient(options) {
|
||||
options = _.defaults(options || {}, {
|
||||
url: config.get('elasticsearch.url'),
|
||||
|
@ -52,18 +65,7 @@ module.exports = function (server) {
|
|||
defer: function () {
|
||||
return Bluebird.defer();
|
||||
},
|
||||
log: function () {
|
||||
this.error = function (err) {
|
||||
server.log(['error', 'elasticsearch'], err);
|
||||
};
|
||||
this.warning = function (message) {
|
||||
server.log(['warning', 'elasticsearch'], message);
|
||||
};
|
||||
this.info = _.noop;
|
||||
this.debug = _.noop;
|
||||
this.trace = _.noop;
|
||||
this.close = _.noop;
|
||||
}
|
||||
log: ElasticsearchClientLogging
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -73,6 +75,7 @@ module.exports = function (server) {
|
|||
const noAuthClient = createClient({ auth: false });
|
||||
server.on('close', _.bindKey(noAuthClient, 'close'));
|
||||
|
||||
server.expose('ElasticsearchClientLogging', ElasticsearchClientLogging);
|
||||
server.expose('client', client);
|
||||
server.expose('createClient', createClient);
|
||||
server.expose('callWithRequestFactory', callWithRequest);
|
||||
|
|
10
src/plugins/elasticsearch/lib/kibana_index_mappings.js
Normal file
10
src/plugins/elasticsearch/lib/kibana_index_mappings.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
export const mappings = {
|
||||
config: {
|
||||
properties: {
|
||||
buildNum: {
|
||||
type: 'string',
|
||||
index: 'not_analyzed'
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
|
@ -1,4 +1,5 @@
|
|||
import upgrade from './upgrade_config';
|
||||
import { mappings } from './kibana_index_mappings';
|
||||
|
||||
module.exports = function (server) {
|
||||
const config = server.config();
|
||||
|
@ -8,11 +9,16 @@ module.exports = function (server) {
|
|||
type: 'config',
|
||||
body: {
|
||||
size: 1000,
|
||||
sort: [ { buildNum: { order: 'desc', ignore_unmapped: true } } ]
|
||||
sort: [
|
||||
{
|
||||
buildNum: {
|
||||
order: 'desc',
|
||||
unmapped_type: mappings.config.properties.buildNum.type
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
return client.search(options).then(upgrade(server));
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ export default function configDefaultsProvider() {
|
|||
type: 'json',
|
||||
value:
|
||||
'[\n' +
|
||||
' ["", "hh:mm:ss.SSS"],\n' +
|
||||
' ["", "HH:mm:ss.SSS"],\n' +
|
||||
' ["PT1S", "HH:mm:ss"],\n' +
|
||||
' ["PT1M", "HH:mm"],\n' +
|
||||
' ["PT1H",\n' +
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
import sinon from 'auto-release-sinon';
|
||||
import expect from 'expect.js';
|
||||
import ngMock from 'ng_mock';
|
||||
import CourierDataSourceDocSourceProvider from 'ui/courier/data_source/doc_source';
|
||||
import CourierFetchRequestDocProvider from 'ui/courier/fetch/request/doc';
|
||||
|
||||
import DocSourceProvider from '../../data_source/doc_source';
|
||||
import DocRequestProvider from '../request/doc';
|
||||
|
||||
describe('Courier DocFetchRequest class', function () {
|
||||
let storage;
|
||||
let source;
|
||||
let defer;
|
||||
let req;
|
||||
|
||||
var storage;
|
||||
var source;
|
||||
var defer;
|
||||
var req;
|
||||
|
||||
var setVersion;
|
||||
let setVersion;
|
||||
|
||||
beforeEach(ngMock.module('kibana'));
|
||||
beforeEach(ngMock.inject(function (Private, Promise, $injector) {
|
||||
var DocSource = Private(CourierDataSourceDocSourceProvider);
|
||||
var DocFetchRequest = Private(CourierFetchRequestDocProvider);
|
||||
const DocSource = Private(DocSourceProvider);
|
||||
const DocFetchRequest = Private(DocRequestProvider);
|
||||
|
||||
storage =
|
||||
$injector.get('localStorage').store =
|
||||
|
|
|
@ -2,11 +2,12 @@ import ngMock from 'ng_mock';
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'auto-release-sinon';
|
||||
|
||||
import FetchProvider from 'ui/courier/fetch';
|
||||
import IndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
|
||||
import searchResp from 'fixtures/search_response';
|
||||
import CourierDataSourceDocSourceProvider from 'ui/courier/data_source/doc_source';
|
||||
import CourierDataSourceSearchSourceProvider from 'ui/courier/data_source/search_source';
|
||||
|
||||
import FetchProvider from '../fetch';
|
||||
import DocSourceProvider from '../../data_source/doc_source';
|
||||
import SearchSourceProvider from '../../data_source/search_source';
|
||||
|
||||
describe('Fetch service', function () {
|
||||
require('test_utils/no_digest_promises').activateForSuite();
|
||||
|
@ -24,8 +25,8 @@ describe('Fetch service', function () {
|
|||
Promise = $injector.get('Promise');
|
||||
fetch = Private(FetchProvider);
|
||||
indexPattern = Private(IndexPatternProvider);
|
||||
DocSource = Private(CourierDataSourceDocSourceProvider);
|
||||
SearchSource = Private(CourierDataSourceSearchSourceProvider);
|
||||
DocSource = Private(DocSourceProvider);
|
||||
SearchSource = Private(SearchSourceProvider);
|
||||
}));
|
||||
|
||||
describe('#doc(docSource)', function () {
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import _ from 'lodash';
|
||||
import sinon from 'auto-release-sinon';
|
||||
import expect from 'expect.js';
|
||||
import ngMock from 'ng_mock';
|
||||
import CourierFetchFetchTheseProvider from 'ui/courier/fetch/_fetch_these';
|
||||
|
||||
import FetchTheseProvider from '../fetch_these';
|
||||
|
||||
describe('ui/courier/fetch/_fetch_these', () => {
|
||||
|
||||
let Promise;
|
||||
|
@ -22,15 +23,15 @@ describe('ui/courier/fetch/_fetch_these', () => {
|
|||
return fakeResponses;
|
||||
}
|
||||
|
||||
PrivateProvider.swap(require('ui/courier/fetch/_call_client'), FakeResponsesProvider);
|
||||
PrivateProvider.swap(require('ui/courier/fetch/_call_response_handlers'), FakeResponsesProvider);
|
||||
PrivateProvider.swap(require('ui/courier/fetch/_continue_incomplete'), FakeResponsesProvider);
|
||||
PrivateProvider.swap(require('ui/courier/fetch/call_client'), FakeResponsesProvider);
|
||||
PrivateProvider.swap(require('ui/courier/fetch/call_response_handlers'), FakeResponsesProvider);
|
||||
PrivateProvider.swap(require('ui/courier/fetch/continue_incomplete'), FakeResponsesProvider);
|
||||
}));
|
||||
|
||||
beforeEach(ngMock.inject((Private, $injector) => {
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
Promise = $injector.get('Promise');
|
||||
fetchThese = Private(CourierFetchFetchTheseProvider);
|
||||
fetchThese = Private(FetchTheseProvider);
|
||||
request = mockRequest();
|
||||
requests = [ request ];
|
||||
}));
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
import CourierFetchRequestRequestProvider from 'ui/courier/fetch/request/request';
|
||||
|
||||
export default function CourierFetchIsRequestProvider(Private) {
|
||||
var AbstractRequest = Private(CourierFetchRequestRequestProvider);
|
||||
|
||||
return function isRequest(obj) {
|
||||
return obj instanceof AbstractRequest;
|
||||
};
|
||||
};
|
|
@ -1,29 +1,31 @@
|
|||
import _ from 'lodash';
|
||||
import CourierFetchIsRequestProvider from 'ui/courier/fetch/_is_request';
|
||||
import CourierFetchMergeDuplicateRequestsProvider from 'ui/courier/fetch/_merge_duplicate_requests';
|
||||
import CourierFetchReqStatusProvider from 'ui/courier/fetch/_req_status';
|
||||
|
||||
import IsRequestProvider from './is_request';
|
||||
import MergeDuplicatesRequestProvider from './merge_duplicate_requests';
|
||||
import ReqStatusProvider from './req_status';
|
||||
|
||||
export default function CourierFetchCallClient(Private, Promise, es, esShardTimeout, sessionId) {
|
||||
|
||||
var isRequest = Private(CourierFetchIsRequestProvider);
|
||||
var mergeDuplicateRequests = Private(CourierFetchMergeDuplicateRequestsProvider);
|
||||
const isRequest = Private(IsRequestProvider);
|
||||
const mergeDuplicateRequests = Private(MergeDuplicatesRequestProvider);
|
||||
|
||||
var ABORTED = Private(CourierFetchReqStatusProvider).ABORTED;
|
||||
var DUPLICATE = Private(CourierFetchReqStatusProvider).DUPLICATE;
|
||||
const ABORTED = Private(ReqStatusProvider).ABORTED;
|
||||
const DUPLICATE = Private(ReqStatusProvider).DUPLICATE;
|
||||
|
||||
function callClient(strategy, requests) {
|
||||
// merging docs can change status to DUPLICATE, capture new statuses
|
||||
var statuses = mergeDuplicateRequests(requests);
|
||||
const statuses = mergeDuplicateRequests(requests);
|
||||
|
||||
// get the actual list of requests that we will be fetching
|
||||
var executable = statuses.filter(isRequest);
|
||||
var execCount = executable.length;
|
||||
const executable = statuses.filter(isRequest);
|
||||
let execCount = executable.length;
|
||||
|
||||
// resolved by respond()
|
||||
var esPromise;
|
||||
var defer = Promise.defer();
|
||||
let esPromise;
|
||||
const defer = Promise.defer();
|
||||
|
||||
// for each respond with either the response or ABORTED
|
||||
var respond = function (responses) {
|
||||
const respond = function (responses) {
|
||||
responses = responses || [];
|
||||
return Promise.map(requests, function (req, i) {
|
||||
switch (statuses[i]) {
|
||||
|
@ -43,7 +45,7 @@ export default function CourierFetchCallClient(Private, Promise, es, esShardTime
|
|||
|
||||
|
||||
// handle a request being aborted while being fetched
|
||||
var requestWasAborted = Promise.method(function (req, i) {
|
||||
const requestWasAborted = Promise.method(function (req, i) {
|
||||
if (statuses[i] === ABORTED) {
|
||||
defer.reject(new Error('Request was aborted twice?'));
|
||||
}
|
|
@ -1,12 +1,12 @@
|
|||
import { SearchTimeout } from 'ui/errors';
|
||||
import { RequestFailure } from 'ui/errors';
|
||||
import { ShardFailure } from 'ui/errors';
|
||||
import CourierFetchReqStatusProvider from 'ui/courier/fetch/_req_status';
|
||||
import CourierFetchNotifierProvider from 'ui/courier/fetch/_notifier';
|
||||
import { RequestFailure, SearchTimeout, ShardFailure } from 'ui/errors';
|
||||
|
||||
import ReqStatusProvider from './req_status';
|
||||
import NotifierProvider from './notifier';
|
||||
|
||||
export default function CourierFetchCallResponseHandlers(Private, Promise) {
|
||||
var ABORTED = Private(CourierFetchReqStatusProvider).ABORTED;
|
||||
var INCOMPLETE = Private(CourierFetchReqStatusProvider).INCOMPLETE;
|
||||
var notify = Private(CourierFetchNotifierProvider);
|
||||
const ABORTED = Private(ReqStatusProvider).ABORTED;
|
||||
const INCOMPLETE = Private(ReqStatusProvider).INCOMPLETE;
|
||||
const notify = Private(NotifierProvider);
|
||||
|
||||
|
||||
function callResponseHandlers(requests, responses) {
|
||||
|
@ -15,7 +15,7 @@ export default function CourierFetchCallResponseHandlers(Private, Promise) {
|
|||
return ABORTED;
|
||||
}
|
||||
|
||||
var resp = responses[i];
|
||||
let resp = responses[i];
|
||||
|
||||
if (resp.timed_out) {
|
||||
notify.warning(new SearchTimeout());
|
|
@ -1,10 +1,10 @@
|
|||
import CourierFetchReqStatusProvider from 'ui/courier/fetch/_req_status';
|
||||
import ReqStatusProvider from './req_status';
|
||||
|
||||
export default function CourierFetchContinueIncompleteRequests(Private) {
|
||||
var INCOMPLETE = Private(CourierFetchReqStatusProvider).INCOMPLETE;
|
||||
const INCOMPLETE = Private(ReqStatusProvider).INCOMPLETE;
|
||||
|
||||
function continueIncompleteRequests(strategy, requests, responses, fetchWithStrategy) {
|
||||
var incomplete = [];
|
||||
const incomplete = [];
|
||||
|
||||
responses.forEach(function (resp, i) {
|
||||
if (resp === INCOMPLETE) {
|
|
@ -1,18 +1,20 @@
|
|||
import _ from 'lodash';
|
||||
import CourierRequestQueueProvider from 'ui/courier/_request_queue';
|
||||
import CourierFetchFetchTheseProvider from 'ui/courier/fetch/_fetch_these';
|
||||
import CourierFetchCallResponseHandlersProvider from 'ui/courier/fetch/_call_response_handlers';
|
||||
import CourierFetchReqStatusProvider from 'ui/courier/fetch/_req_status';
|
||||
|
||||
import RequestQueueProvider from '../_request_queue';
|
||||
import FetchTheseProvider from './fetch_these';
|
||||
import CallResponseHandlersProvider from './call_response_handlers';
|
||||
import ReqStatusProvider from './req_status';
|
||||
|
||||
export default function fetchService(Private, Promise) {
|
||||
|
||||
var requestQueue = Private(CourierRequestQueueProvider);
|
||||
var fetchThese = Private(CourierFetchFetchTheseProvider);
|
||||
const requestQueue = Private(RequestQueueProvider);
|
||||
const fetchThese = Private(FetchTheseProvider);
|
||||
|
||||
var callResponseHandlers = Private(CourierFetchCallResponseHandlersProvider);
|
||||
var INCOMPLETE = Private(CourierFetchReqStatusProvider).INCOMPLETE;
|
||||
const callResponseHandlers = Private(CallResponseHandlersProvider);
|
||||
const INCOMPLETE = Private(ReqStatusProvider).INCOMPLETE;
|
||||
|
||||
function fetchQueued(strategy) {
|
||||
var requests = requestQueue.getStartable(strategy);
|
||||
const requests = requestQueue.getStartable(strategy);
|
||||
if (!requests.length) return Promise.resolve();
|
||||
else return fetchThese(requests);
|
||||
}
|
||||
|
@ -20,7 +22,7 @@ export default function fetchService(Private, Promise) {
|
|||
this.fetchQueued = fetchQueued;
|
||||
|
||||
function fetchASource(source, strategy) {
|
||||
var defer = Promise.defer();
|
||||
const defer = Promise.defer();
|
||||
|
||||
fetchThese([
|
||||
source._createRequest(defer)
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
import CourierFetchNotifierProvider from 'ui/courier/fetch/_notifier';
|
||||
import CourierFetchForEachStrategyProvider from 'ui/courier/fetch/_for_each_strategy';
|
||||
import CourierFetchCallClientProvider from 'ui/courier/fetch/_call_client';
|
||||
import CourierFetchCallResponseHandlersProvider from 'ui/courier/fetch/_call_response_handlers';
|
||||
import CourierFetchContinueIncompleteProvider from 'ui/courier/fetch/_continue_incomplete';
|
||||
import CourierFetchReqStatusProvider from 'ui/courier/fetch/_req_status';
|
||||
import NotifierProvider from './notifier';
|
||||
import ForEachStrategyProvider from './for_each_strategy';
|
||||
import CallClientProvider from './call_client';
|
||||
import CallResponseHandlersProvider from './call_response_handlers';
|
||||
import ContinueIncompleteProvider from './continue_incomplete';
|
||||
import ReqStatusProvider from './req_status';
|
||||
|
||||
export default function FetchTheseProvider(Private, Promise) {
|
||||
var notify = Private(CourierFetchNotifierProvider);
|
||||
var forEachStrategy = Private(CourierFetchForEachStrategyProvider);
|
||||
const notify = Private(NotifierProvider);
|
||||
const forEachStrategy = Private(ForEachStrategyProvider);
|
||||
|
||||
// core tasks
|
||||
var callClient = Private(CourierFetchCallClientProvider);
|
||||
var callResponseHandlers = Private(CourierFetchCallResponseHandlersProvider);
|
||||
var continueIncomplete = Private(CourierFetchContinueIncompleteProvider);
|
||||
const callClient = Private(CallClientProvider);
|
||||
const callResponseHandlers = Private(CallResponseHandlersProvider);
|
||||
const continueIncomplete = Private(ContinueIncompleteProvider);
|
||||
|
||||
var ABORTED = Private(CourierFetchReqStatusProvider).ABORTED;
|
||||
var DUPLICATE = Private(CourierFetchReqStatusProvider).DUPLICATE;
|
||||
var INCOMPLETE = Private(CourierFetchReqStatusProvider).INCOMPLETE;
|
||||
const ABORTED = Private(ReqStatusProvider).ABORTED;
|
||||
const DUPLICATE = Private(ReqStatusProvider).DUPLICATE;
|
||||
const INCOMPLETE = Private(ReqStatusProvider).INCOMPLETE;
|
||||
|
||||
function fetchThese(requests) {
|
||||
return forEachStrategy(requests, function (strategy, reqsForStrategy) {
|
||||
|
@ -66,7 +66,7 @@ export default function FetchTheseProvider(Private, Promise) {
|
|||
}
|
||||
|
||||
return new Promise(function (resolve) {
|
||||
var action = req.started ? req.continue : req.start;
|
||||
const action = req.started ? req.continue : req.start;
|
||||
resolve(action.call(req));
|
||||
})
|
||||
.catch(err => req.handleFailure(err));
|
|
@ -1,13 +1,13 @@
|
|||
import _ from 'lodash';
|
||||
export default function FetchForEachRequestStrategy(Private, Promise) {
|
||||
|
||||
export default function FetchForEachRequestStrategy(Private, Promise) {
|
||||
function forEachStrategy(requests, block) {
|
||||
block = Promise.method(block);
|
||||
var sets = [];
|
||||
const sets = [];
|
||||
|
||||
requests.forEach(function (req) {
|
||||
var strategy = req.strategy;
|
||||
var set = _.find(sets, { 0: strategy });
|
||||
const strategy = req.strategy;
|
||||
const set = _.find(sets, { 0: strategy });
|
||||
if (set) set[1].push(req);
|
||||
else sets.push([strategy, [req]]);
|
||||
});
|
9
src/ui/public/courier/fetch/is_request.js
Normal file
9
src/ui/public/courier/fetch/is_request.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
import AbstractRequestProvider from './request';
|
||||
|
||||
export default function IsRequestProvider(Private) {
|
||||
const AbstractRequest = Private(AbstractRequestProvider);
|
||||
|
||||
return function isRequest(obj) {
|
||||
return obj instanceof AbstractRequest;
|
||||
};
|
||||
};
|
|
@ -1,17 +1,17 @@
|
|||
import CourierFetchIsRequestProvider from 'ui/courier/fetch/_is_request';
|
||||
import CourierFetchReqStatusProvider from 'ui/courier/fetch/_req_status';
|
||||
import IsRequestProvider from './is_request';
|
||||
import ReqStatusProvider from './req_status';
|
||||
|
||||
export default function FetchMergeDuplicateRequests(Private) {
|
||||
var isRequest = Private(CourierFetchIsRequestProvider);
|
||||
var DUPLICATE = Private(CourierFetchReqStatusProvider).DUPLICATE;
|
||||
const isRequest = Private(IsRequestProvider);
|
||||
const DUPLICATE = Private(ReqStatusProvider).DUPLICATE;
|
||||
|
||||
function mergeDuplicateRequests(requests) {
|
||||
// dedupe requests
|
||||
var index = {};
|
||||
const index = {};
|
||||
return requests.map(function (req) {
|
||||
if (!isRequest(req)) return req;
|
||||
|
||||
var iid = req.source._instanceid;
|
||||
const iid = req.source._instanceid;
|
||||
if (!index[iid]) {
|
||||
// this request is unique so far
|
||||
index[iid] = req;
|
|
@ -1,10 +1,11 @@
|
|||
import sinon from 'auto-release-sinon';
|
||||
import expect from 'expect.js';
|
||||
import ngMock from 'ng_mock';
|
||||
import CourierFetchRequestSegmentedProvider from 'ui/courier/fetch/request/segmented';
|
||||
import CourierFetchRequestSearchProvider from 'ui/courier/fetch/request/search';
|
||||
describe('ui/courier/fetch/request/segmented', () => {
|
||||
|
||||
import SegmentedRequestProvider from '../segmented';
|
||||
import SearchRequestProvider from '../search';
|
||||
|
||||
describe('ui/courier/fetch/request/segmented', () => {
|
||||
let Promise;
|
||||
let $rootScope;
|
||||
let SegmentedReq;
|
||||
|
@ -16,8 +17,8 @@ describe('ui/courier/fetch/request/segmented', () => {
|
|||
beforeEach(ngMock.inject((Private, $injector) => {
|
||||
Promise = $injector.get('Promise');
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
SegmentedReq = Private(CourierFetchRequestSegmentedProvider);
|
||||
searchReqStart = sinon.spy(Private(CourierFetchRequestSearchProvider).prototype, 'start');
|
||||
SegmentedReq = Private(SegmentedRequestProvider);
|
||||
searchReqStart = sinon.spy(Private(SearchRequestProvider).prototype, 'start');
|
||||
}));
|
||||
|
||||
describe('#start()', () => {
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import sinon from 'auto-release-sinon';
|
||||
import expect from 'expect.js';
|
||||
import ngMock from 'ng_mock';
|
||||
|
||||
import StubbedSearchSourceProvider from 'fixtures/stubbed_search_source';
|
||||
import CourierFetchRequestSegmentedProvider from 'ui/courier/fetch/request/segmented';
|
||||
|
||||
import SegmentedRequestProvider from '../segmented';
|
||||
|
||||
describe('ui/courier/fetch/request/segmented/_createQueue', () => {
|
||||
|
||||
let Promise;
|
||||
|
@ -16,7 +19,7 @@ describe('ui/courier/fetch/request/segmented/_createQueue', () => {
|
|||
beforeEach(ngMock.inject((Private, $injector) => {
|
||||
Promise = $injector.get('Promise');
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
SegmentedReq = Private(CourierFetchRequestSegmentedProvider);
|
||||
SegmentedReq = Private(SegmentedRequestProvider);
|
||||
|
||||
MockSource = class {
|
||||
constructor() {
|
||||
|
@ -29,7 +32,7 @@ describe('ui/courier/fetch/request/segmented/_createQueue', () => {
|
|||
const req = new SegmentedReq(new MockSource());
|
||||
req._queueCreated = null;
|
||||
|
||||
var promise = req._createQueue();
|
||||
const promise = req._createQueue();
|
||||
expect(req._queueCreated).to.be(false);
|
||||
await promise;
|
||||
expect(req._queueCreated).to.be(true);
|
||||
|
|
|
@ -6,7 +6,8 @@ import sinon from 'auto-release-sinon';
|
|||
import HitSortFnProv from 'plugins/kibana/discover/_hit_sort_fn';
|
||||
import NoDigestPromises from 'test_utils/no_digest_promises';
|
||||
import StubbedSearchSourceProvider from 'fixtures/stubbed_search_source';
|
||||
import CourierFetchRequestSegmentedProvider from 'ui/courier/fetch/request/segmented';
|
||||
|
||||
import SegmentedRequestProvider from '../segmented';
|
||||
|
||||
describe('Segmented Request Index Selection', function () {
|
||||
let Promise;
|
||||
|
@ -22,7 +23,7 @@ describe('Segmented Request Index Selection', function () {
|
|||
Promise = $injector.get('Promise');
|
||||
HitSortFn = Private(HitSortFnProv);
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
SegmentedReq = Private(CourierFetchRequestSegmentedProvider);
|
||||
SegmentedReq = Private(SegmentedRequestProvider);
|
||||
|
||||
MockSource = class {
|
||||
constructor() {
|
||||
|
|
|
@ -6,7 +6,8 @@ import sinon from 'auto-release-sinon';
|
|||
import HitSortFnProv from 'plugins/kibana/discover/_hit_sort_fn';
|
||||
import NoDigestPromises from 'test_utils/no_digest_promises';
|
||||
import StubbedSearchSourceProvider from 'fixtures/stubbed_search_source';
|
||||
import CourierFetchRequestSegmentedProvider from 'ui/courier/fetch/request/segmented';
|
||||
|
||||
import SegmentedRequestProvider from '../segmented';
|
||||
|
||||
describe('Segmented Request Size Picking', function () {
|
||||
let Promise;
|
||||
|
@ -22,7 +23,7 @@ describe('Segmented Request Size Picking', function () {
|
|||
Promise = $injector.get('Promise');
|
||||
HitSortFn = Private(HitSortFnProv);
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
SegmentedReq = Private(CourierFetchRequestSegmentedProvider);
|
||||
SegmentedReq = Private(SegmentedRequestProvider);
|
||||
|
||||
MockSource = class {
|
||||
constructor() {
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
import _ from 'lodash';
|
||||
import EventsProvider from 'ui/events';
|
||||
export default function CourierSegmentedReqHandle(Private) {
|
||||
var Events = Private(EventsProvider);
|
||||
|
||||
|
||||
/**
|
||||
* Simple class for creating an object to send to the
|
||||
* requester of a SegmentedRequest. Since the SegmentedRequest
|
||||
* extends AbstractRequest, it wasn't able to be the event
|
||||
* emitter it was born to be. This provides a channel for
|
||||
* setting values on the segmented request, and an event
|
||||
* emitter for the request to speak outwardly
|
||||
*
|
||||
* @param {SegmentedRequest} - req - the requst this handle relates to
|
||||
*/
|
||||
_.class(SegmentedHandle).inherits(Events);
|
||||
function SegmentedHandle(req) {
|
||||
SegmentedHandle.Super.call(this);
|
||||
|
||||
// export a couple methods from the request
|
||||
this.setDirection = _.bindKey(req, 'setDirection');
|
||||
this.setSize = _.bindKey(req, 'setSize');
|
||||
this.setMaxSegments = _.bindKey(req, 'setMaxSegments');
|
||||
this.setSortFn = _.bindKey(req, 'setSortFn');
|
||||
}
|
||||
|
||||
return SegmentedHandle;
|
||||
};
|
|
@ -1,42 +1,43 @@
|
|||
import _ from 'lodash';
|
||||
import CourierFetchStrategyDocProvider from 'ui/courier/fetch/strategy/doc';
|
||||
import CourierFetchRequestRequestProvider from 'ui/courier/fetch/request/request';
|
||||
import DocStrategyProvider from '../strategy/doc';
|
||||
import AbstractRequestProvider from './request';
|
||||
|
||||
export default function DocRequestProvider(Private) {
|
||||
|
||||
var docStrategy = Private(CourierFetchStrategyDocProvider);
|
||||
var AbstractRequest = Private(CourierFetchRequestRequestProvider);
|
||||
const docStrategy = Private(DocStrategyProvider);
|
||||
const AbstractRequest = Private(AbstractRequestProvider);
|
||||
|
||||
_.class(DocRequest).inherits(AbstractRequest);
|
||||
function DocRequest(source, defer) {
|
||||
DocRequest.Super.call(this, source, defer);
|
||||
class DocRequest extends AbstractRequest {
|
||||
constructor(...args) {
|
||||
super(...args);
|
||||
|
||||
this.type = 'doc';
|
||||
this.strategy = docStrategy;
|
||||
}
|
||||
|
||||
DocRequest.prototype.canStart = function () {
|
||||
var parent = DocRequest.Super.prototype.canStart.call(this);
|
||||
if (!parent) return false;
|
||||
|
||||
var version = this.source._version;
|
||||
var storedVersion = this.source._getStoredVersion();
|
||||
|
||||
// conditions that equal "fetch This DOC!"
|
||||
var unknown = !version && !storedVersion;
|
||||
var mismatch = version !== storedVersion;
|
||||
|
||||
return Boolean(mismatch || (unknown && !this.started));
|
||||
};
|
||||
|
||||
DocRequest.prototype.handleResponse = function (resp) {
|
||||
if (resp.found) {
|
||||
this.source._storeVersion(resp._version);
|
||||
} else {
|
||||
this.source._clearVersion();
|
||||
this.type = 'doc';
|
||||
this.strategy = docStrategy;
|
||||
}
|
||||
|
||||
return DocRequest.Super.prototype.handleResponse.call(this, resp);
|
||||
};
|
||||
canStart() {
|
||||
const parent = super.canStart();
|
||||
if (!parent) return false;
|
||||
|
||||
const version = this.source._version;
|
||||
const storedVersion = this.source._getStoredVersion();
|
||||
|
||||
// conditions that equal "fetch This DOC!"
|
||||
const unknown = !version && !storedVersion;
|
||||
const mismatch = version !== storedVersion;
|
||||
|
||||
return Boolean(mismatch || (unknown && !this.started));
|
||||
}
|
||||
|
||||
handleResponse(resp) {
|
||||
if (resp.found) {
|
||||
this.source._storeVersion(resp._version);
|
||||
} else {
|
||||
this.source._clearVersion();
|
||||
}
|
||||
|
||||
return super.handleResponse(resp);
|
||||
}
|
||||
}
|
||||
|
||||
return DocRequest;
|
||||
};
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
import CourierErrorHandlersProvider from 'ui/courier/_error_handlers';
|
||||
import Notifier from 'ui/notify/notifier';
|
||||
|
||||
export default function RequestErrorHandlerFactory(Private) {
|
||||
var errHandlers = Private(CourierErrorHandlersProvider);
|
||||
import ErrorHandlersProvider from '../../_error_handlers';
|
||||
|
||||
var notify = new Notifier({
|
||||
export default function RequestErrorHandlerFactory(Private) {
|
||||
const errHandlers = Private(ErrorHandlersProvider);
|
||||
|
||||
const notify = new Notifier({
|
||||
location: 'Courier Fetch Error'
|
||||
});
|
||||
|
||||
function handleError(req, error) {
|
||||
var myHandlers = [];
|
||||
const myHandlers = [];
|
||||
|
||||
errHandlers.splice(0).forEach(function (handler) {
|
||||
(handler.source === req.source ? myHandlers : errHandlers).push(handler);
|
|
@ -1,115 +1,110 @@
|
|||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
|
||||
import errors from 'ui/errors';
|
||||
import CourierRequestQueueProvider from 'ui/courier/_request_queue';
|
||||
import CourierFetchRequestErrorHandlerProvider from 'ui/courier/fetch/request/_error_handler';
|
||||
|
||||
import RequestQueueProvider from '../../_request_queue';
|
||||
import ErrorHandlerRequestProvider from './error_handler';
|
||||
|
||||
export default function AbstractReqProvider(Private, Promise) {
|
||||
var requestQueue = Private(CourierRequestQueueProvider);
|
||||
var requestErrorHandler = Private(CourierFetchRequestErrorHandlerProvider);
|
||||
const requestQueue = Private(RequestQueueProvider);
|
||||
const requestErrorHandler = Private(ErrorHandlerRequestProvider);
|
||||
|
||||
function AbstractReq(source, defer) {
|
||||
if (!(this instanceof AbstractReq) || !this.constructor || this.constructor === AbstractReq) {
|
||||
throw new Error('The AbstractReq class should not be called directly');
|
||||
return class AbstractReq {
|
||||
constructor(source, defer) {
|
||||
this.source = source;
|
||||
this.defer = defer || Promise.defer();
|
||||
this._whenAbortedHandlers = [];
|
||||
|
||||
requestQueue.push(this);
|
||||
}
|
||||
|
||||
this.source = source;
|
||||
this.defer = defer || Promise.defer();
|
||||
|
||||
requestQueue.push(this);
|
||||
}
|
||||
|
||||
AbstractReq.prototype.canStart = function () {
|
||||
return Boolean(!this.stopped && !this.source._fetchDisabled);
|
||||
};
|
||||
|
||||
AbstractReq.prototype.start = function () {
|
||||
if (this.started) {
|
||||
throw new TypeError('Unable to start request because it has already started');
|
||||
canStart() {
|
||||
return Boolean(!this.stopped && !this.source._fetchDisabled);
|
||||
}
|
||||
|
||||
this.started = true;
|
||||
this.moment = moment();
|
||||
start() {
|
||||
if (this.started) {
|
||||
throw new TypeError('Unable to start request because it has already started');
|
||||
}
|
||||
|
||||
var source = this.source;
|
||||
if (source.activeFetchCount) {
|
||||
source.activeFetchCount += 1;
|
||||
} else {
|
||||
source.activeFetchCount = 1;
|
||||
this.started = true;
|
||||
this.moment = moment();
|
||||
|
||||
const source = this.source;
|
||||
if (source.activeFetchCount) {
|
||||
source.activeFetchCount += 1;
|
||||
} else {
|
||||
source.activeFetchCount = 1;
|
||||
}
|
||||
|
||||
source.history = [this];
|
||||
}
|
||||
|
||||
source.history = [this];
|
||||
};
|
||||
getFetchParams() {
|
||||
return this.source._flatten();
|
||||
}
|
||||
|
||||
AbstractReq.prototype.getFetchParams = function () {
|
||||
return this.source._flatten();
|
||||
};
|
||||
transformResponse(resp) {
|
||||
return resp;
|
||||
}
|
||||
|
||||
AbstractReq.prototype.transformResponse = function (resp) {
|
||||
return resp;
|
||||
};
|
||||
filterError(resp) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AbstractReq.prototype.filterError = function (resp) {
|
||||
return false;
|
||||
};
|
||||
handleResponse(resp) {
|
||||
this.success = true;
|
||||
this.resp = resp;
|
||||
}
|
||||
|
||||
AbstractReq.prototype.handleResponse = function (resp) {
|
||||
this.success = true;
|
||||
this.resp = resp;
|
||||
};
|
||||
handleFailure(error) {
|
||||
this.success = false;
|
||||
this.resp = error && error.resp;
|
||||
this.retry();
|
||||
return requestErrorHandler(this, error);
|
||||
}
|
||||
|
||||
AbstractReq.prototype.handleFailure = function (error) {
|
||||
this.success = false;
|
||||
this.resp = error && error.resp;
|
||||
this.retry();
|
||||
return requestErrorHandler(this, error);
|
||||
};
|
||||
isIncomplete() {
|
||||
return false;
|
||||
}
|
||||
|
||||
AbstractReq.prototype.isIncomplete = function () {
|
||||
return false;
|
||||
};
|
||||
continue() {
|
||||
throw new Error('Unable to continue ' + this.type + ' request');
|
||||
}
|
||||
|
||||
AbstractReq.prototype.continue = function () {
|
||||
throw new Error('Unable to continue ' + this.type + ' request');
|
||||
};
|
||||
retry() {
|
||||
const clone = this.clone();
|
||||
this.abort();
|
||||
return clone;
|
||||
}
|
||||
|
||||
AbstractReq.prototype.retry = function () {
|
||||
var clone = this.clone();
|
||||
this.abort();
|
||||
return clone;
|
||||
};
|
||||
|
||||
// don't want people overriding this, so it becomes a natural
|
||||
// part of .abort() and .complete()
|
||||
function stop(then) {
|
||||
return function () {
|
||||
_markStopped() {
|
||||
if (this.stopped) return;
|
||||
|
||||
this.stopped = true;
|
||||
this.source.activeFetchCount -= 1;
|
||||
_.pull(requestQueue, this);
|
||||
then.call(this);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
AbstractReq.prototype.abort = stop(function () {
|
||||
this.defer = null;
|
||||
this.aborted = true;
|
||||
if (this._whenAborted) _.callEach(this._whenAborted);
|
||||
});
|
||||
abort() {
|
||||
this._markStopped();
|
||||
this.defer = null;
|
||||
this.aborted = true;
|
||||
_.callEach(this._whenAbortedHandlers);
|
||||
}
|
||||
|
||||
AbstractReq.prototype.whenAborted = function (cb) {
|
||||
this._whenAborted = (this._whenAborted || []);
|
||||
this._whenAborted.push(cb);
|
||||
whenAborted(cb) {
|
||||
this._whenAbortedHandlers.push(cb);
|
||||
}
|
||||
|
||||
complete() {
|
||||
this._markStopped();
|
||||
this.ms = this.moment.diff() * -1;
|
||||
this.defer.resolve(this.resp);
|
||||
}
|
||||
|
||||
clone() {
|
||||
return new this.constructor(this.source, this.defer);
|
||||
}
|
||||
};
|
||||
|
||||
AbstractReq.prototype.complete = stop(function () {
|
||||
this.ms = this.moment.diff() * -1;
|
||||
this.defer.resolve(this.resp);
|
||||
});
|
||||
|
||||
AbstractReq.prototype.clone = function () {
|
||||
return new this.constructor(this.source, this.defer);
|
||||
};
|
||||
|
||||
return AbstractReq;
|
||||
};
|
||||
|
|
|
@ -1,19 +1,17 @@
|
|||
import _ from 'lodash';
|
||||
import CourierFetchStrategySearchProvider from 'ui/courier/fetch/strategy/search';
|
||||
import CourierFetchRequestRequestProvider from 'ui/courier/fetch/request/request';
|
||||
import SearchStrategyProvider from '../strategy/search';
|
||||
import AbstractRequestProvider from './request';
|
||||
|
||||
export default function SearchReqProvider(Private) {
|
||||
|
||||
var searchStrategy = Private(CourierFetchStrategySearchProvider);
|
||||
var AbstractRequest = Private(CourierFetchRequestRequestProvider);
|
||||
const searchStrategy = Private(SearchStrategyProvider);
|
||||
const AbstractRequest = Private(AbstractRequestProvider);
|
||||
|
||||
_.class(SearchReq).inherits(AbstractRequest);
|
||||
var Super = SearchReq.Super;
|
||||
function SearchReq(source, defer) {
|
||||
Super.call(this, source, defer);
|
||||
return class SearchReq extends AbstractRequest {
|
||||
constructor(...args) {
|
||||
super(...args);
|
||||
|
||||
this.type = 'search';
|
||||
this.strategy = searchStrategy;
|
||||
}
|
||||
|
||||
return SearchReq;
|
||||
this.type = 'search';
|
||||
this.strategy = searchStrategy;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,334 +1,337 @@
|
|||
import { isNumber } from 'lodash';
|
||||
import _ from 'lodash';
|
||||
import CourierFetchRequestSearchProvider from 'ui/courier/fetch/request/search';
|
||||
import CourierFetchRequestSegmentedHandleProvider from 'ui/courier/fetch/request/_segmented_handle';
|
||||
export default function CourierSegmentedReqProvider(es, Private, Promise, Notifier, timefilter, config) {
|
||||
var SearchReq = Private(CourierFetchRequestSearchProvider);
|
||||
var SegmentedHandle = Private(CourierFetchRequestSegmentedHandleProvider);
|
||||
import { isNumber } from 'lodash';
|
||||
|
||||
var notify = new Notifier({
|
||||
import Notifier from 'ui/notify/notifier';
|
||||
|
||||
import SearchRequestProvider from './search';
|
||||
import SegmentedHandleProvider from './segmented_handle';
|
||||
|
||||
export default function SegmentedReqProvider(es, Private, Promise, timefilter, config) {
|
||||
const SearchReq = Private(SearchRequestProvider);
|
||||
const SegmentedHandle = Private(SegmentedHandleProvider);
|
||||
|
||||
const notify = new Notifier({
|
||||
location: 'Segmented Fetch'
|
||||
});
|
||||
|
||||
_.class(SegmentedReq).inherits(SearchReq);
|
||||
function SegmentedReq(source, defer, initFn) {
|
||||
SearchReq.call(this, source, defer);
|
||||
class SegmentedReq extends SearchReq {
|
||||
constructor(source, defer, initFn) {
|
||||
super(source, defer);
|
||||
|
||||
this.type = 'segmented';
|
||||
this.type = 'segmented';
|
||||
|
||||
// segmented request specific state
|
||||
this._initFn = initFn;
|
||||
// segmented request specific state
|
||||
this._initFn = initFn;
|
||||
|
||||
this._desiredSize = null;
|
||||
this._maxSegments = config.get('courier:maxSegmentCount');
|
||||
this._direction = 'desc';
|
||||
this._sortFn = null;
|
||||
this._queueCreated = false;
|
||||
this._handle = new SegmentedHandle(this);
|
||||
this._desiredSize = null;
|
||||
this._maxSegments = config.get('courier:maxSegmentCount');
|
||||
this._direction = 'desc';
|
||||
this._sortFn = null;
|
||||
this._queueCreated = false;
|
||||
this._handle = new SegmentedHandle(this);
|
||||
|
||||
this._hitWindow = null;
|
||||
this._hitWindow = null;
|
||||
|
||||
// prevent the source from changing between requests,
|
||||
// all calls will return the same promise
|
||||
this._getFlattenedSource = _.once(this._getFlattenedSource);
|
||||
}
|
||||
|
||||
/*********
|
||||
** SearchReq overrides
|
||||
*********/
|
||||
|
||||
SegmentedReq.prototype.start = function () {
|
||||
var self = this;
|
||||
|
||||
this._complete = [];
|
||||
this._active = null;
|
||||
this._segments = [];
|
||||
this._all = [];
|
||||
this._queue = [];
|
||||
|
||||
this._mergedResp = {
|
||||
took: 0,
|
||||
hits: {
|
||||
hits: [],
|
||||
total: 0,
|
||||
max_score: 0
|
||||
}
|
||||
};
|
||||
|
||||
// give the request consumer a chance to receive each segment and set
|
||||
// parameters via the handle
|
||||
if (_.isFunction(this._initFn)) this._initFn(this._handle);
|
||||
return this._createQueue().then(function (queue) {
|
||||
self._all = queue.slice(0);
|
||||
|
||||
// Send the initial fetch status
|
||||
self._reportStatus();
|
||||
|
||||
return SearchReq.prototype.start.call(self);
|
||||
});
|
||||
};
|
||||
|
||||
SegmentedReq.prototype.continue = function () {
|
||||
return this._reportStatus();
|
||||
};
|
||||
|
||||
SegmentedReq.prototype.getFetchParams = function () {
|
||||
var self = this;
|
||||
|
||||
return self._getFlattenedSource().then(function (flatSource) {
|
||||
var params = _.cloneDeep(flatSource);
|
||||
|
||||
// calculate the number of indices to fetch in this request in order to prevent
|
||||
// more than self._maxSegments requests. We use Math.max(1, n) to ensure that each request
|
||||
// has at least one index pattern, and Math.floor() to make sure that if the
|
||||
// number of indices does not round out evenly the extra index is tacked onto the last
|
||||
// request, making sure the first request returns faster.
|
||||
var remainingSegments = self._maxSegments - self._segments.length;
|
||||
var indexCount = Math.max(1, Math.floor(self._queue.length / remainingSegments));
|
||||
|
||||
var indices = self._active = self._queue.splice(0, indexCount);
|
||||
params.index = _.pluck(indices, 'index');
|
||||
|
||||
if (isNumber(self._desiredSize)) {
|
||||
params.body.size = self._pickSizeForIndices(indices);
|
||||
}
|
||||
|
||||
return params;
|
||||
});
|
||||
};
|
||||
|
||||
SegmentedReq.prototype.handleResponse = function (resp) {
|
||||
return this._consumeSegment(resp);
|
||||
};
|
||||
|
||||
SegmentedReq.prototype.filterError = function (resp) {
|
||||
if (/ClusterBlockException.*index\sclosed/.test(resp.error)) {
|
||||
this._consumeSegment(false);
|
||||
return true;
|
||||
// prevent the source from changing between requests,
|
||||
// all calls will return the same promise
|
||||
this._getFlattenedSource = _.once(this._getFlattenedSource);
|
||||
}
|
||||
};
|
||||
|
||||
SegmentedReq.prototype.isIncomplete = function () {
|
||||
var queueNotCreated = !this._queueCreated;
|
||||
var queueNotEmpty = this._queue.length > 0;
|
||||
return queueNotCreated || queueNotEmpty;
|
||||
};
|
||||
/*********
|
||||
** SearchReq overrides
|
||||
*********/
|
||||
|
||||
SegmentedReq.prototype.clone = function () {
|
||||
return new SegmentedReq(this.source, this.defer, this._initFn);
|
||||
};
|
||||
start() {
|
||||
this._complete = [];
|
||||
this._active = null;
|
||||
this._segments = [];
|
||||
this._all = [];
|
||||
this._queue = [];
|
||||
|
||||
SegmentedReq.prototype.complete = function () {
|
||||
this._reportStatus();
|
||||
this._handle.emit('complete');
|
||||
return SearchReq.prototype.complete.call(this);
|
||||
};
|
||||
this._mergedResp = {
|
||||
took: 0,
|
||||
hits: {
|
||||
hits: [],
|
||||
total: 0,
|
||||
max_score: 0
|
||||
}
|
||||
};
|
||||
|
||||
/*********
|
||||
** SegmentedReq specific methods
|
||||
*********/
|
||||
// give the request consumer a chance to receive each segment and set
|
||||
// parameters via the handle
|
||||
if (_.isFunction(this._initFn)) this._initFn(this._handle);
|
||||
return this._createQueue().then((queue) => {
|
||||
this._all = queue.slice(0);
|
||||
|
||||
/**
|
||||
* Set the sort total number of segments to emit
|
||||
*
|
||||
* @param {number}
|
||||
*/
|
||||
SegmentedReq.prototype.setMaxSegments = function (maxSegments) {
|
||||
this._maxSegments = Math.max(_.parseInt(maxSegments), 1);
|
||||
};
|
||||
// Send the initial fetch status
|
||||
this._reportStatus();
|
||||
|
||||
/**
|
||||
* Set the sort direction for the request.
|
||||
*
|
||||
* @param {string} dir - one of 'asc' or 'desc'
|
||||
*/
|
||||
SegmentedReq.prototype.setDirection = function (dir) {
|
||||
switch (dir) {
|
||||
case 'asc':
|
||||
case 'desc':
|
||||
return (this._direction = dir);
|
||||
default:
|
||||
throw new TypeError('unknown sort direction "' + dir + '"');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the function that will be used to sort the rows
|
||||
*
|
||||
* @param {fn}
|
||||
*/
|
||||
SegmentedReq.prototype.setSortFn = function (sortFn) {
|
||||
this._sortFn = sortFn;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the sort total number of documents to
|
||||
* emit
|
||||
*
|
||||
* Setting to false will not limit the documents,
|
||||
* if a number is set the size of the request to es
|
||||
* will be updated on each new request
|
||||
*
|
||||
* @param {number|false}
|
||||
*/
|
||||
SegmentedReq.prototype.setSize = function (totalSize) {
|
||||
this._desiredSize = _.parseInt(totalSize);
|
||||
if (isNaN(this._desiredSize)) this._desiredSize = null;
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._createQueue = function () {
|
||||
var self = this;
|
||||
var timeBounds = timefilter.getBounds();
|
||||
var indexPattern = self.source.get('index');
|
||||
self._queueCreated = false;
|
||||
|
||||
return indexPattern.toDetailedIndexList(timeBounds.min, timeBounds.max, self._direction)
|
||||
.then(function (queue) {
|
||||
if (!_.isArray(queue)) queue = [queue];
|
||||
|
||||
self._queue = queue;
|
||||
self._queueCreated = true;
|
||||
|
||||
return queue;
|
||||
});
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._reportStatus = function () {
|
||||
return this._handle.emit('status', {
|
||||
total: this._queueCreated ? this._all.length : NaN,
|
||||
complete: this._queueCreated ? this._complete.length : NaN,
|
||||
remaining: this._queueCreated ? this._queue.length : NaN,
|
||||
hitCount: this._queueCreated ? this._mergedResp.hits.hits.length : NaN
|
||||
});
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._getFlattenedSource = function () {
|
||||
return this.source._flatten();
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._consumeSegment = function (seg) {
|
||||
var index = this._active;
|
||||
this._complete.push(index);
|
||||
if (!seg) return; // segment was ignored/filtered, don't store it
|
||||
|
||||
var hadHits = _.get(this._mergedResp, 'hits.hits.length') > 0;
|
||||
var gotHits = _.get(seg, 'hits.hits.length') > 0;
|
||||
var firstHits = !hadHits && gotHits;
|
||||
var haveHits = hadHits || gotHits;
|
||||
|
||||
this._mergeSegment(seg);
|
||||
this.resp = _.omit(this._mergedResp, '_bucketIndex');
|
||||
|
||||
if (firstHits) this._handle.emit('first', seg);
|
||||
if (gotHits) this._handle.emit('segment', seg);
|
||||
if (haveHits) this._handle.emit('mergedSegment', this.resp);
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._mergeHits = function (hits) {
|
||||
var mergedHits = this._mergedResp.hits.hits;
|
||||
var desiredSize = this._desiredSize;
|
||||
var sortFn = this._sortFn;
|
||||
|
||||
_.pushAll(hits, mergedHits);
|
||||
|
||||
if (sortFn) {
|
||||
notify.event('resort rows', function () {
|
||||
mergedHits.sort(sortFn);
|
||||
return super.start();
|
||||
});
|
||||
}
|
||||
|
||||
if (isNumber(desiredSize)) {
|
||||
mergedHits = this._mergedResp.hits.hits = mergedHits.slice(0, desiredSize);
|
||||
}
|
||||
};
|
||||
|
||||
SegmentedReq.prototype._mergeSegment = notify.timed('merge response segment', function (seg) {
|
||||
var merged = this._mergedResp;
|
||||
|
||||
this._segments.push(seg);
|
||||
|
||||
merged.took += seg.took;
|
||||
merged.hits.total += seg.hits.total;
|
||||
merged.hits.max_score = Math.max(merged.hits.max_score, seg.hits.max_score);
|
||||
|
||||
if (_.size(seg.hits.hits)) {
|
||||
this._mergeHits(seg.hits.hits);
|
||||
this._detectHitsWindow(merged.hits.hits);
|
||||
continue() {
|
||||
return this._reportStatus();
|
||||
}
|
||||
|
||||
if (!seg.aggregations) return;
|
||||
getFetchParams() {
|
||||
return this._getFlattenedSource().then(flatSource => {
|
||||
const params = _.cloneDeep(flatSource);
|
||||
|
||||
Object.keys(seg.aggregations).forEach(function (aggKey) {
|
||||
// calculate the number of indices to fetch in this request in order to prevent
|
||||
// more than this._maxSegments requests. We use Math.max(1, n) to ensure that each request
|
||||
// has at least one index pattern, and Math.floor() to make sure that if the
|
||||
// number of indices does not round out evenly the extra index is tacked onto the last
|
||||
// request, making sure the first request returns faster.
|
||||
const remainingSegments = this._maxSegments - this._segments.length;
|
||||
const indexCount = Math.max(1, Math.floor(this._queue.length / remainingSegments));
|
||||
|
||||
if (!merged.aggregations) {
|
||||
// start merging aggregations
|
||||
merged.aggregations = {};
|
||||
merged._bucketIndex = {};
|
||||
}
|
||||
const indices = this._active = this._queue.splice(0, indexCount);
|
||||
params.index = _.pluck(indices, 'index');
|
||||
|
||||
if (!merged.aggregations[aggKey]) {
|
||||
merged.aggregations[aggKey] = {
|
||||
buckets: []
|
||||
};
|
||||
}
|
||||
|
||||
seg.aggregations[aggKey].buckets.forEach(function (bucket) {
|
||||
var mbucket = merged._bucketIndex[bucket.key];
|
||||
if (mbucket) {
|
||||
mbucket.doc_count += bucket.doc_count;
|
||||
return;
|
||||
if (isNumber(this._desiredSize)) {
|
||||
params.body.size = this._pickSizeForIndices(indices);
|
||||
}
|
||||
|
||||
mbucket = merged._bucketIndex[bucket.key] = bucket;
|
||||
merged.aggregations[aggKey].buckets.push(mbucket);
|
||||
return params;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
SegmentedReq.prototype._detectHitsWindow = function (hits) {
|
||||
hits = hits || [];
|
||||
var indexPattern = this.source.get('index');
|
||||
var desiredSize = this._desiredSize;
|
||||
|
||||
var size = _.size(hits);
|
||||
if (!isNumber(desiredSize) || size < desiredSize) {
|
||||
this._hitWindow = {
|
||||
size: size,
|
||||
min: -Infinity,
|
||||
max: Infinity
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
let min;
|
||||
let max;
|
||||
handleResponse(resp) {
|
||||
return this._consumeSegment(resp);
|
||||
}
|
||||
|
||||
hits.forEach(function (deepHit) {
|
||||
var hit = indexPattern.flattenHit(deepHit);
|
||||
var time = hit[indexPattern.timeFieldName];
|
||||
if (min == null || time < min) min = time;
|
||||
if (max == null || time > max) max = time;
|
||||
});
|
||||
filterError(resp) {
|
||||
if (/ClusterBlockException.*index\sclosed/.test(resp.error)) {
|
||||
this._consumeSegment(false);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
this._hitWindow = { size, min, max };
|
||||
};
|
||||
isIncomplete() {
|
||||
const queueNotCreated = !this._queueCreated;
|
||||
const queueNotEmpty = this._queue.length > 0;
|
||||
return queueNotCreated || queueNotEmpty;
|
||||
}
|
||||
|
||||
SegmentedReq.prototype._pickSizeForIndices = function (indices) {
|
||||
var hitWindow = this._hitWindow;
|
||||
var desiredSize = this._desiredSize;
|
||||
clone() {
|
||||
return new SegmentedReq(this.source, this.defer, this._initFn);
|
||||
}
|
||||
|
||||
if (!isNumber(desiredSize)) return null;
|
||||
// we don't have any hits yet, get us more info!
|
||||
if (!hitWindow) return desiredSize;
|
||||
// the order of documents isn't important, just get us more
|
||||
if (!this._sortFn) return Math.max(desiredSize - hitWindow.size, 0);
|
||||
// if all of the documents in every index fall outside of our current doc set, we can ignore them.
|
||||
var someOverlap = indices.some(function (index) {
|
||||
return index.min <= hitWindow.max && hitWindow.min <= index.max;
|
||||
});
|
||||
complete() {
|
||||
this._reportStatus();
|
||||
this._handle.emit('complete');
|
||||
return super.complete();
|
||||
}
|
||||
|
||||
return someOverlap ? desiredSize : 0;
|
||||
};
|
||||
/*********
|
||||
** SegmentedReq specific methods
|
||||
*********/
|
||||
|
||||
|
||||
/**
|
||||
* Set the sort total number of segments to emit
|
||||
*
|
||||
* @param {number}
|
||||
*/
|
||||
setMaxSegments(maxSegments) {
|
||||
this._maxSegments = Math.max(_.parseInt(maxSegments), 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the sort direction for the request.
|
||||
*
|
||||
* @param {string} dir - one of 'asc' or 'desc'
|
||||
*/
|
||||
setDirection(dir) {
|
||||
switch (dir) {
|
||||
case 'asc':
|
||||
case 'desc':
|
||||
return (this._direction = dir);
|
||||
default:
|
||||
throw new TypeError('unknown sort direction "' + dir + '"');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the function that will be used to sort the rows
|
||||
*
|
||||
* @param {fn}
|
||||
*/
|
||||
setSortFn(sortFn) {
|
||||
this._sortFn = sortFn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the sort total number of documents to
|
||||
* emit
|
||||
*
|
||||
* Setting to false will not limit the documents,
|
||||
* if a number is set the size of the request to es
|
||||
* will be updated on each new request
|
||||
*
|
||||
* @param {number|false}
|
||||
*/
|
||||
setSize(totalSize) {
|
||||
this._desiredSize = _.parseInt(totalSize);
|
||||
if (isNaN(this._desiredSize)) this._desiredSize = null;
|
||||
}
|
||||
|
||||
_createQueue() {
|
||||
const timeBounds = timefilter.getBounds();
|
||||
const indexPattern = this.source.get('index');
|
||||
this._queueCreated = false;
|
||||
|
||||
return indexPattern.toDetailedIndexList(timeBounds.min, timeBounds.max, this._direction)
|
||||
.then(queue => {
|
||||
if (!_.isArray(queue)) queue = [queue];
|
||||
|
||||
this._queue = queue;
|
||||
this._queueCreated = true;
|
||||
|
||||
return queue;
|
||||
});
|
||||
}
|
||||
|
||||
_reportStatus() {
|
||||
return this._handle.emit('status', {
|
||||
total: this._queueCreated ? this._all.length : NaN,
|
||||
complete: this._queueCreated ? this._complete.length : NaN,
|
||||
remaining: this._queueCreated ? this._queue.length : NaN,
|
||||
hitCount: this._queueCreated ? this._mergedResp.hits.hits.length : NaN
|
||||
});
|
||||
}
|
||||
|
||||
_getFlattenedSource() {
|
||||
return this.source._flatten();
|
||||
}
|
||||
|
||||
_consumeSegment(seg) {
|
||||
const index = this._active;
|
||||
this._complete.push(index);
|
||||
if (!seg) return; // segment was ignored/filtered, don't store it
|
||||
|
||||
const hadHits = _.get(this._mergedResp, 'hits.hits.length') > 0;
|
||||
const gotHits = _.get(seg, 'hits.hits.length') > 0;
|
||||
const firstHits = !hadHits && gotHits;
|
||||
const haveHits = hadHits || gotHits;
|
||||
|
||||
this._mergeSegment(seg);
|
||||
this.resp = _.omit(this._mergedResp, '_bucketIndex');
|
||||
|
||||
if (firstHits) this._handle.emit('first', seg);
|
||||
if (gotHits) this._handle.emit('segment', seg);
|
||||
if (haveHits) this._handle.emit('mergedSegment', this.resp);
|
||||
}
|
||||
|
||||
_mergeHits(hits) {
|
||||
const mergedHits = this._mergedResp.hits.hits;
|
||||
const desiredSize = this._desiredSize;
|
||||
const sortFn = this._sortFn;
|
||||
|
||||
_.pushAll(hits, mergedHits);
|
||||
|
||||
if (sortFn) {
|
||||
notify.event('resort rows', function () {
|
||||
mergedHits.sort(sortFn);
|
||||
});
|
||||
}
|
||||
|
||||
if (isNumber(desiredSize)) {
|
||||
this._mergedResp.hits.hits = mergedHits.slice(0, desiredSize);
|
||||
}
|
||||
}
|
||||
|
||||
_mergeSegment(seg) {
|
||||
const merged = this._mergedResp;
|
||||
|
||||
this._segments.push(seg);
|
||||
|
||||
merged.took += seg.took;
|
||||
merged.hits.total += seg.hits.total;
|
||||
merged.hits.max_score = Math.max(merged.hits.max_score, seg.hits.max_score);
|
||||
|
||||
if (_.size(seg.hits.hits)) {
|
||||
this._mergeHits(seg.hits.hits);
|
||||
this._detectHitsWindow(merged.hits.hits);
|
||||
}
|
||||
|
||||
if (!seg.aggregations) return;
|
||||
|
||||
Object.keys(seg.aggregations).forEach(function (aggKey) {
|
||||
|
||||
if (!merged.aggregations) {
|
||||
// start merging aggregations
|
||||
merged.aggregations = {};
|
||||
merged._bucketIndex = {};
|
||||
}
|
||||
|
||||
if (!merged.aggregations[aggKey]) {
|
||||
merged.aggregations[aggKey] = {
|
||||
buckets: []
|
||||
};
|
||||
}
|
||||
|
||||
seg.aggregations[aggKey].buckets.forEach(function (bucket) {
|
||||
let mbucket = merged._bucketIndex[bucket.key];
|
||||
if (mbucket) {
|
||||
mbucket.doc_count += bucket.doc_count;
|
||||
return;
|
||||
}
|
||||
|
||||
mbucket = merged._bucketIndex[bucket.key] = bucket;
|
||||
merged.aggregations[aggKey].buckets.push(mbucket);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_detectHitsWindow(hits) {
|
||||
hits = hits || [];
|
||||
const indexPattern = this.source.get('index');
|
||||
const desiredSize = this._desiredSize;
|
||||
|
||||
const size = _.size(hits);
|
||||
if (!isNumber(desiredSize) || size < desiredSize) {
|
||||
this._hitWindow = {
|
||||
size: size,
|
||||
min: -Infinity,
|
||||
max: Infinity
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
let min;
|
||||
let max;
|
||||
|
||||
hits.forEach(function (deepHit) {
|
||||
const hit = indexPattern.flattenHit(deepHit);
|
||||
const time = hit[indexPattern.timeFieldName];
|
||||
if (min == null || time < min) min = time;
|
||||
if (max == null || time > max) max = time;
|
||||
});
|
||||
|
||||
this._hitWindow = { size, min, max };
|
||||
}
|
||||
|
||||
_pickSizeForIndices(indices) {
|
||||
const hitWindow = this._hitWindow;
|
||||
const desiredSize = this._desiredSize;
|
||||
|
||||
if (!isNumber(desiredSize)) return null;
|
||||
// we don't have any hits yet, get us more info!
|
||||
if (!hitWindow) return desiredSize;
|
||||
// the order of documents isn't important, just get us more
|
||||
if (!this._sortFn) return Math.max(desiredSize - hitWindow.size, 0);
|
||||
// if all of the documents in every index fall outside of our current doc set, we can ignore them.
|
||||
const someOverlap = indices.some(function (index) {
|
||||
return index.min <= hitWindow.max && hitWindow.min <= index.max;
|
||||
});
|
||||
|
||||
return someOverlap ? desiredSize : 0;
|
||||
}
|
||||
}
|
||||
|
||||
SegmentedReq.prototype.mergedSegment = notify.timed('merge response segment', SegmentedReq.prototype.mergedSegment);
|
||||
|
||||
return SegmentedReq;
|
||||
};
|
||||
|
|
40
src/ui/public/courier/fetch/request/segmented_handle.js
Normal file
40
src/ui/public/courier/fetch/request/segmented_handle.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
import EventsProvider from 'ui/events';
|
||||
|
||||
export default function CourierSegmentedReqHandle(Private) {
|
||||
const Events = Private(EventsProvider);
|
||||
|
||||
const segmentedRequest = Symbol('Actual Segmented Request');
|
||||
|
||||
/**
|
||||
* Simple class for creating an object to send to the
|
||||
* requester of a SegmentedRequest. Since the SegmentedRequest
|
||||
* extends AbstractRequest, it wasn't able to be the event
|
||||
* emitter it was born to be. This provides a channel for
|
||||
* setting values on the segmented request, and an event
|
||||
* emitter for the request to speak outwardly
|
||||
*
|
||||
* @param {SegmentedRequest} - req - the requst this handle relates to
|
||||
*/
|
||||
return class SegmentedHandle extends Events {
|
||||
constructor(req) {
|
||||
super();
|
||||
this[segmentedRequest] = req;
|
||||
}
|
||||
|
||||
setDirection(...args) {
|
||||
this[segmentedRequest].setDirection(...args);
|
||||
}
|
||||
|
||||
setSize(...args) {
|
||||
this[segmentedRequest].setSize(...args);
|
||||
}
|
||||
|
||||
setMaxSegments(...args) {
|
||||
this[segmentedRequest].setMaxSegments(...args);
|
||||
}
|
||||
|
||||
setSortFn(...args) {
|
||||
this[segmentedRequest].setSortFn(...args);
|
||||
}
|
||||
};
|
||||
};
|
|
@ -2,7 +2,9 @@ import _ from 'lodash';
|
|||
import sinon from 'auto-release-sinon';
|
||||
import expect from 'expect.js';
|
||||
import ngMock from 'ng_mock';
|
||||
import CourierFetchStrategySearchProvider from 'ui/courier/fetch/strategy/search';
|
||||
|
||||
import SearchStrategyProvider from '../search';
|
||||
|
||||
describe('ui/courier/fetch/strategy/search', () => {
|
||||
|
||||
let Promise;
|
||||
|
@ -15,7 +17,7 @@ describe('ui/courier/fetch/strategy/search', () => {
|
|||
beforeEach(ngMock.inject((Private, $injector) => {
|
||||
Promise = $injector.get('Promise');
|
||||
$rootScope = $injector.get('$rootScope');
|
||||
search = Private(CourierFetchStrategySearchProvider);
|
||||
search = Private(SearchStrategyProvider);
|
||||
reqsFetchParams = [
|
||||
{
|
||||
index: ['logstash-123'],
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
import { toJson } from 'ui/utils/aggressive_parse';
|
||||
import _ from 'lodash';
|
||||
import angular from 'angular';
|
||||
|
||||
import { toJson } from 'ui/utils/aggressive_parse';
|
||||
|
||||
export default function FetchStrategyForSearch(Private, Promise, timefilter) {
|
||||
|
||||
return {
|
||||
|
@ -20,7 +22,7 @@ export default function FetchStrategyForSearch(Private, Promise, timefilter) {
|
|||
return indexList;
|
||||
}
|
||||
|
||||
var timeBounds = timefilter.getBounds();
|
||||
const timeBounds = timefilter.getBounds();
|
||||
return indexList.toIndexList(timeBounds.min, timeBounds.max);
|
||||
})
|
||||
.then(function (indexList) {
|
||||
|
|
|
@ -6,14 +6,14 @@
|
|||
class="form-control url">
|
||||
</input>
|
||||
<button
|
||||
class="shorten-button"
|
||||
class="shorten-button btn btn-default"
|
||||
tooltip="Generate Short URL"
|
||||
ng-click="generateShortUrl()"
|
||||
ng-disabled="shortGenerated">
|
||||
<span aria-hidden="true" class="fa fa-compress"></span>
|
||||
</button>
|
||||
<button
|
||||
class="clipboard-button"
|
||||
class="clipboard-button btn btn-default"
|
||||
tooltip="Copy to Clipboard"
|
||||
ng-click="copyToClipboard()">
|
||||
<span aria-hidden="true" class="fa fa-clipboard"></span>
|
||||
|
|
|
@ -9,7 +9,7 @@ describe('IP Address Format', function () {
|
|||
fieldFormats = Private(RegistryFieldFormatsProvider);
|
||||
}));
|
||||
|
||||
it('convers a value from a decimal to a string', function () {
|
||||
it('converts a value from a decimal to a string', function () {
|
||||
var ip = fieldFormats.getInstance('ip');
|
||||
expect(ip.convert(1186489492)).to.be('70.184.100.148');
|
||||
});
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import _ from 'lodash';
|
||||
import 'ui/field_format_editor/numeral/numeral';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import BoundToConfigObjProvider from 'ui/bound_to_config_obj';
|
||||
export default function AbstractNumeralFormatProvider(Private) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
var BoundToConfigObj = Private(BoundToConfigObjProvider);
|
||||
var numeral = require('numeral')();
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import 'ui/stringify/editors/color.less';
|
||||
import _ from 'lodash';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import colorTemplate from 'ui/stringify/editors/color.html';
|
||||
export default function _StringProvider(Private) {
|
||||
export default function ColorFormatProvider(Private) {
|
||||
|
||||
const FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
const FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
const DEFAULT_COLOR = {
|
||||
range: `${Number.NEGATIVE_INFINITY}:${Number.POSITIVE_INFINITY}`,
|
||||
text: '#000000',
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import 'ui/field_format_editor/pattern/pattern';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import BoundToConfigObjProvider from 'ui/bound_to_config_obj';
|
||||
import dateTemplate from 'ui/stringify/editors/date.html';
|
||||
export default function DateTimeFormatProvider(Private) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
var BoundToConfigObj = Private(BoundToConfigObjProvider);
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import _ from 'lodash';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
export default function IpFormatProvider(Private) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
|
||||
_.class(Ip).inherits(FieldFormat);
|
||||
function Ip(params) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import _ from 'lodash';
|
||||
import BoundToConfigObjProvider from 'ui/bound_to_config_obj';
|
||||
import StringifyTypesNumeralProvider from 'ui/stringify/types/_numeral';
|
||||
export default function NumberFormatProvider(Private) {
|
||||
export default function PercentFormatProvider(Private) {
|
||||
var BoundToConfigObj = Private(BoundToConfigObjProvider);
|
||||
var Numeral = Private(StringifyTypesNumeralProvider);
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import _ from 'lodash';
|
||||
import noWhiteSpace from 'ui/utils/no_white_space';
|
||||
import angular from 'angular';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
export default function _SourceProvider(Private, shortDotsFilter) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
export default function _SourceFormatProvider(Private, shortDotsFilter) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
var template = _.template(noWhiteSpace(require('ui/stringify/types/_source.html')));
|
||||
|
||||
_.class(Source).inherits(FieldFormat);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import _ from 'lodash';
|
||||
import 'ui/field_format_editor/samples/samples';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
export default function _StringProvider(Private) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
export default function StringFormatProvider(Private) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
|
||||
|
||||
_.class(_String).inherits(FieldFormat);
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import _ from 'lodash';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
export default function TruncateFormatProvider(Private) {
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
var omission = '...';
|
||||
|
||||
_.class(Truncate).inherits(FieldFormat);
|
||||
|
@ -31,4 +32,4 @@ export default function TruncateFormatProvider(Private) {
|
|||
Truncate.sampleInput = [ require('ui/stringify/samples/large.html') ];
|
||||
|
||||
return Truncate;
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import _ from 'lodash';
|
||||
import 'ui/field_format_editor/pattern/pattern';
|
||||
import 'ui/stringify/icons';
|
||||
import IndexPatternsFieldFormatFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import IndexPatternsFieldFormatProvider from 'ui/index_patterns/_field_format/field_format';
|
||||
import urlTemplate from 'ui/stringify/editors/url.html';
|
||||
export default function UrlFormatProvider(Private, highlightFilter) {
|
||||
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatFieldFormatProvider);
|
||||
var FieldFormat = Private(IndexPatternsFieldFormatProvider);
|
||||
|
||||
|
||||
_.class(Url).inherits(FieldFormat);
|
||||
|
|
|
@ -106,6 +106,9 @@ export default function DispatchClass(Private) {
|
|||
var isClickable = this.listenerCount('click') > 0;
|
||||
var addEvent = this.addEvent;
|
||||
var $el = this.handler.el;
|
||||
if (!this.handler.highlight) {
|
||||
this.handler.highlight = self.highlight;
|
||||
}
|
||||
|
||||
function hover(d, i) {
|
||||
// Add pointer if item is clickable
|
||||
|
@ -113,7 +116,7 @@ export default function DispatchClass(Private) {
|
|||
self.addMousePointer.call(this, arguments);
|
||||
}
|
||||
|
||||
self.highlightLegend.call(this, $el);
|
||||
self.handler.highlight.call(this, $el);
|
||||
self.emit('hover', self.eventResponse(d, i));
|
||||
}
|
||||
|
||||
|
@ -129,9 +132,12 @@ export default function DispatchClass(Private) {
|
|||
var self = this;
|
||||
var addEvent = this.addEvent;
|
||||
var $el = this.handler.el;
|
||||
if (!this.handler.unHighlight) {
|
||||
this.handler.unHighlight = self.unHighlight;
|
||||
}
|
||||
|
||||
function mouseout() {
|
||||
self.unHighlightLegend.call(this, $el);
|
||||
self.handler.unHighlight.call(this, $el);
|
||||
}
|
||||
|
||||
return addEvent('mouseout', mouseout);
|
||||
|
@ -225,21 +231,24 @@ export default function DispatchClass(Private) {
|
|||
* Mouseover Behavior
|
||||
*
|
||||
* @param element {D3.Selection}
|
||||
* @method highlightLegend
|
||||
* @method highlight
|
||||
*/
|
||||
Dispatch.prototype.highlightLegend = function (element) {
|
||||
Dispatch.prototype.highlight = function (element) {
|
||||
var label = this.getAttribute('data-label');
|
||||
if (!label) return;
|
||||
$('[data-label]', element.parentNode).not(function (els, el) { return $(el).data('label') !== label;}).css('opacity', 0.5);
|
||||
//Opacity 1 is needed to avoid the css application
|
||||
$('[data-label]', element.parentNode).css('opacity', 1).not(
|
||||
function (els, el) { return `${$(el).data('label')}` === label;}
|
||||
).css('opacity', 0.5);
|
||||
};
|
||||
|
||||
/**
|
||||
* Mouseout Behavior
|
||||
*
|
||||
* @param element {D3.Selection}
|
||||
* @method unHighlightLegend
|
||||
* @method unHighlight
|
||||
*/
|
||||
Dispatch.prototype.unHighlightLegend = function (element) {
|
||||
Dispatch.prototype.unHighlight = function (element) {
|
||||
$('[data-label]', element.parentNode).css('opacity', 1);
|
||||
};
|
||||
|
||||
|
|
|
@ -33,7 +33,26 @@ export default function AreaChartFactory(Private) {
|
|||
if (this.isOverlapping) {
|
||||
|
||||
// Default opacity should return to 0.6 on mouseout
|
||||
handler._attr.defaultOpacity = 0.6;
|
||||
var defaultOpacity = 0.6;
|
||||
handler._attr.defaultOpacity = defaultOpacity;
|
||||
handler.highlight = function (element) {
|
||||
var label = this.getAttribute('data-label');
|
||||
if (!label) return;
|
||||
|
||||
var highlightOpacity = 0.8;
|
||||
var highlightElements = $('[data-label]', element.parentNode).filter(
|
||||
function (els, el) {
|
||||
return `${$(el).data('label')}` === label;
|
||||
});
|
||||
$('[data-label]', element.parentNode).not(highlightElements).css('opacity', defaultOpacity / 2); // half of the default opacity
|
||||
highlightElements.css('opacity', highlightOpacity);
|
||||
};
|
||||
handler.unHighlight = function (element) {
|
||||
$('[data-label]', element).css('opacity', defaultOpacity);
|
||||
|
||||
//The legend should keep max opacity
|
||||
$('[data-label]', $(element).siblings()).css('opacity', 1);
|
||||
};
|
||||
}
|
||||
|
||||
this.checkIfEnoughData();
|
||||
|
|
|
@ -6,8 +6,8 @@
|
|||
|
||||
<li
|
||||
ng-repeat="legendData in labels track by legendData.label"
|
||||
ng-mouseenter="highlightSeries(legendData.label)"
|
||||
ng-mouseleave="unhighlightSeries()"
|
||||
ng-mouseenter="highlight($event)"
|
||||
ng-mouseleave="unhighlight($event)"
|
||||
data-label="{{legendData.label}}"
|
||||
class="legend-value color">
|
||||
|
||||
|
@ -38,4 +38,4 @@
|
|||
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -29,12 +29,18 @@ uiModules.get('kibana')
|
|||
refresh();
|
||||
});
|
||||
|
||||
$scope.highlightSeries = function (label) {
|
||||
$('[data-label]', $elem.siblings()).not(function (els, el) { return $(el).data('label') !== label;}).css('opacity', 0.5);
|
||||
$scope.highlight = function (event) {
|
||||
var el = event.currentTarget;
|
||||
var handler = $scope.renderbot.vislibVis.handler;
|
||||
if (!handler) return;
|
||||
handler.highlight.call(el, handler.el);
|
||||
};
|
||||
|
||||
$scope.unhighlightSeries = function () {
|
||||
$('[data-label]', $elem.siblings()).css('opacity', 1);
|
||||
$scope.unhighlight = function (event) {
|
||||
var el = event.currentTarget;
|
||||
var handler = $scope.renderbot.vislibVis.handler;
|
||||
if (!handler) return;
|
||||
handler.unHighlight.call(el, handler.el);
|
||||
};
|
||||
|
||||
$scope.setColor = function (label, color) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue