Merge remote-tracking branch 'upstream/master' into feature/design-merge-master

This commit is contained in:
Timothy Sullivan 2016-02-09 17:07:09 -07:00
commit 16300d6607
983 changed files with 33632 additions and 33020 deletions

View file

@ -30,6 +30,7 @@ rules:
no-bitwise: 0
no-caller: 2
no-cond-assign: 0
no-const-assign: 2
no-debugger: 2
no-empty: 2
no-eval: 2

View file

@ -147,6 +147,7 @@ Distributable packages can be found in `target/` after the build completes.
Packages are built using fpm, pleaserun, dpkg, and rpm. fpm and pleaserun can be installed using gem. Package building has only been tested on Linux and is not supported on any other platform.
```sh
gem install pleaserun
apt-get install ruby-dev
gem install fpm
npm run build:ospackages
```

View file

@ -4,7 +4,7 @@ Kibana is an open source ([Apache Licensed](https://github.com/elastic/kibana/bl
## Requirements
- Elasticsearch version 2.2.0 or later
- Elasticsearch master
- Kibana binary package
## Installation

View file

@ -3,7 +3,7 @@
You can set up Kibana and start exploring your Elasticsearch indices in minutes.
All you need is:
* Elasticsearch 2.1 or later
* Elasticsearch master
* A modern web browser - http://www.elastic.co/subscriptions/matrix#matrix_browsers[Supported Browsers].
* Information about your Elasticsearch installation:
** URL of the Elasticsearch instance you want to connect to.
@ -82,8 +82,8 @@ simply be the name of a single index.
reads the index mapping to list all of the fields that contain a timestamp. If your index doesn't have time-based data,
disable the *Index contains time-based events* option.
+
WARNING: Using event times to create index names is *deprecated* in this release of Kibana. Support for this functionality
will be removed entirely in the next major Kibana release. Elasticsearch 2.1 includes sophisticated date parsing APIs that
WARNING: Using event times to create index names is *deprecated* in this release of Kibana. Support for this functionality
will be removed entirely in the next major Kibana release. Elasticsearch 2.1 includes sophisticated date parsing APIs that
Kibana uses to determine date information, removing the need to specify dates in the index pattern name.
+
. Click *Create* to add the index pattern. This first pattern is automatically configured as the default.

View file

@ -92,8 +92,8 @@
"commander": "2.8.1",
"css-loader": "0.17.0",
"d3": "3.5.6",
"elasticsearch": "8.0.1",
"elasticsearch-browser": "8.0.1",
"elasticsearch": "10.1.2",
"elasticsearch-browser": "10.1.2",
"expiry-js": "0.1.7",
"exports-loader": "0.6.2",
"expose-loader": "0.7.0",
@ -154,7 +154,7 @@
"grunt-cli": "0.1.13",
"grunt-contrib-clean": "0.6.0",
"grunt-contrib-copy": "0.8.1",
"grunt-esvm": "2.0.0",
"grunt-esvm": "2.1.1",
"grunt-karma": "0.12.0",
"grunt-run": "0.5.0",
"grunt-s3": "0.2.0-alpha.3",

View file

@ -1,9 +1,9 @@
let _ = require('lodash');
let Command = require('commander').Command;
import _ from 'lodash';
let red = require('./color').red;
let yellow = require('./color').yellow;
let help = require('./help');
import help from './help';
import { Command } from 'commander';
import { red } from './color';
import { yellow } from './color';
Command.prototype.error = function (err) {
if (err && err.message) err = err.message;

View file

@ -1,11 +1,11 @@
let _ = require('lodash');
let ansicolors = require('ansicolors');
import _ from 'lodash';
import ansicolors from 'ansicolors';
let log = _.restParam(function (color, label, rest1) {
console.log.apply(console, [color(` ${_.trim(label)} `)].concat(rest1));
});
let color = require('./color');
import color from './color';
module.exports = class Log {
constructor(quiet, silent) {

View file

@ -1,8 +1,8 @@
let _ = require('lodash');
import _ from 'lodash';
let utils = require('requirefrom')('src/utils');
let pkg = utils('packageJson');
let Command = require('./Command');
import Command from './Command';
let argv = process.env.kbnWorkerArgv ? JSON.parse(process.env.kbnWorkerArgv) : process.argv.slice();
let program = new Command('bin/kibana');

View file

@ -1,12 +1,12 @@
const cluster = require('cluster');
import cluster from 'cluster';
const { join } = require('path');
const { format: formatUrl } = require('url');
const Hapi = require('hapi');
import Hapi from 'hapi';
const { debounce, compact, get, invoke, bindAll, once, sample } = require('lodash');
const Log = require('../Log');
const Worker = require('./worker');
const BasePathProxy = require('./base_path_proxy');
import Log from '../Log';
import Worker from './worker';
import BasePathProxy from './base_path_proxy';
process.env.kbnWorkerType = 'managr';

View file

@ -1,9 +1,9 @@
let _ = require('lodash');
let cluster = require('cluster');
import _ from 'lodash';
import cluster from 'cluster';
let { resolve } = require('path');
let { EventEmitter } = require('events');
let fromRoot = require('../../utils/fromRoot');
import fromRoot from '../../utils/fromRoot';
let cliPath = fromRoot('src/cli');
let baseArgs = _.difference(process.argv.slice(2), ['--no-watch']);

View file

@ -1,6 +1,6 @@
var _ = require('lodash');
var ansicolors = require('ansicolors');
import _ from 'lodash';
import ansicolors from 'ansicolors';
exports.green = _.flow(ansicolors.black, ansicolors.bgGreen);
exports.red = _.flow(ansicolors.white, ansicolors.bgRed);

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = function (command, spaces) {
if (!_.size(command.commands)) {

View file

@ -0,0 +1,31 @@
import expect from 'expect.js';
import fileType, { ZIP, TAR } from '../file_type';
describe('kibana cli', function () {
describe('file_type', function () {
it('returns ZIP for .zip filename', function () {
const type = fileType('wat.zip');
expect(type).to.equal(ZIP);
});
it('returns TAR for .tar.gz filename', function () {
const type = fileType('wat.tar.gz');
expect(type).to.equal(TAR);
});
it('returns TAR for .tgz filename', function () {
const type = fileType('wat.tgz');
expect(type).to.equal(TAR);
});
it('returns undefined for unknown file type', function () {
const type = fileType('wat.unknown');
expect(type).to.equal(undefined);
});
it('accepts paths', function () {
const type = fileType('/some/path/to/wat.zip');
expect(type).to.equal(ZIP);
});
it('accepts urls', function () {
const type = fileType('http://example.com/wat.zip');
expect(type).to.equal(ZIP);
});
});
});

View file

@ -1,6 +1,6 @@
const expect = require('expect.js');
const sinon = require('sinon');
const plugin = require('../plugin');
import expect from 'expect.js';
import sinon from 'sinon';
import plugin from '../plugin';
describe('kibana cli', function () {

View file

@ -1,10 +1,10 @@
const expect = require('expect.js');
const sinon = require('sinon');
const fs = require('fs');
const rimraf = require('rimraf');
import expect from 'expect.js';
import sinon from 'sinon';
import fs from 'fs';
import rimraf from 'rimraf';
const pluginCleaner = require('../plugin_cleaner');
const pluginLogger = require('../plugin_logger');
import pluginCleaner from '../plugin_cleaner';
import pluginLogger from '../plugin_logger';
describe('kibana cli', function () {

View file

@ -1,12 +1,12 @@
const expect = require('expect.js');
const sinon = require('sinon');
const nock = require('nock');
const glob = require('glob');
const rimraf = require('rimraf');
const { join } = require('path');
const mkdirp = require('mkdirp');
const pluginLogger = require('../plugin_logger');
const pluginDownloader = require('../plugin_downloader');
import expect from 'expect.js';
import sinon from 'sinon';
import nock from 'nock';
import glob from 'glob';
import rimraf from 'rimraf';
import mkdirp from 'mkdirp';
import pluginLogger from '../plugin_logger';
import pluginDownloader from '../plugin_downloader';
import { join } from 'path';
describe('kibana cli', function () {
@ -124,6 +124,25 @@ describe('kibana cli', function () {
});
});
it('should consider .tgz files as archive type .tar.gz', function () {
const filePath = join(__dirname, 'replies/test_plugin_master.tar.gz');
const couchdb = nock('http://www.files.com')
.defaultReplyHeaders({
'content-length': '10'
})
.get('/plugin.tgz')
.replyWithFile(200, filePath);
const sourceUrl = 'http://www.files.com/plugin.tgz';
return downloader._downloadSingle(sourceUrl)
.then(function (data) {
expect(data.archiveType).to.be('.tar.gz');
expectWorkingPathNotEmpty();
});
});
it('should download a zip from a valid http url', function () {
const filePath = join(__dirname, 'replies/test_plugin_master.zip');

View file

@ -1,13 +1,13 @@
const expect = require('expect.js');
const sinon = require('sinon');
const glob = require('glob');
const rimraf = require('rimraf');
const { join } = require('path');
const mkdirp = require('mkdirp');
import expect from 'expect.js';
import sinon from 'sinon';
import glob from 'glob';
import rimraf from 'rimraf';
import mkdirp from 'mkdirp';
const pluginLogger = require('../plugin_logger');
const extract = require('../plugin_extractor');
const pluginDownloader = require('../plugin_downloader');
import pluginLogger from '../plugin_logger';
import extract from '../plugin_extractor';
import pluginDownloader from '../plugin_downloader';
import { join } from 'path';
describe('kibana cli', function () {

View file

@ -1,10 +1,10 @@
const expect = require('expect.js');
const sinon = require('sinon');
const rimraf = require('rimraf');
const { mkdirSync } = require('fs');
const { join } = require('path');
const pluginLogger = require('../plugin_logger');
const pluginInstaller = require('../plugin_installer');
import expect from 'expect.js';
import sinon from 'sinon';
import rimraf from 'rimraf';
import pluginLogger from '../plugin_logger';
import pluginInstaller from '../plugin_installer';
import { mkdirSync } from 'fs';
import { join } from 'path';
describe('kibana cli', function () {

View file

@ -1,6 +1,6 @@
const expect = require('expect.js');
const sinon = require('sinon');
const pluginLogger = require('../plugin_logger');
import expect from 'expect.js';
import sinon from 'sinon';
import pluginLogger from '../plugin_logger';
describe('kibana cli', function () {

View file

@ -1,7 +1,7 @@
const expect = require('expect.js');
const sinon = require('sinon');
const progressReporter = require('../progress_reporter');
const pluginLogger = require('../plugin_logger');
import expect from 'expect.js';
import sinon from 'sinon';
import progressReporter from '../progress_reporter';
import pluginLogger from '../plugin_logger';
describe('kibana cli', function () {

View file

@ -1,9 +1,9 @@
var path = require('path');
var expect = require('expect.js');
import path from 'path';
import expect from 'expect.js';
var utils = require('requirefrom')('src/utils');
var fromRoot = utils('fromRoot');
var settingParser = require('../setting_parser');
import settingParser from '../setting_parser';
describe('kibana cli', function () {

View file

@ -1,5 +1,6 @@
const { createWriteStream, createReadStream, unlinkSync, statSync } = require('fs');
const getProgressReporter = require('../progress_reporter');
import getProgressReporter from '../progress_reporter';
import { createWriteStream, createReadStream, unlinkSync, statSync } from 'fs';
import fileType from '../file_type';
function openSourceFile({ sourcePath }) {
try {
@ -36,15 +37,6 @@ async function copyFile({ readStream, writeStream, progressReporter }) {
});
}
function getArchiveTypeFromFilename(path) {
if (/\.zip$/i.test(path)) {
return '.zip';
}
if (/\.tar\.gz$/i.test(path)) {
return '.tar.gz';
}
}
/*
// Responsible for managing local file transfers
*/
@ -67,7 +59,7 @@ export default async function copyLocalFile(logger, sourcePath, targetPath) {
}
// all is well, return our archive type
const archiveType = getArchiveTypeFromFilename(sourcePath);
const archiveType = fileType(sourcePath);
return { archiveType };
} catch (err) {
logger.error(err);

View file

@ -1,7 +1,8 @@
const { fromNode: fn } = require('bluebird');
const { createWriteStream, unlinkSync } = require('fs');
const Wreck = require('wreck');
const getProgressReporter = require('../progress_reporter');
import Wreck from 'wreck';
import getProgressReporter from '../progress_reporter';
import { fromNode as fn } from 'bluebird';
import { createWriteStream, unlinkSync } from 'fs';
import fileType, { ZIP, TAR } from '../file_type';
function sendRequest({ sourceUrl, timeout }) {
const maxRedirects = 11; //Because this one goes to 11.
@ -49,18 +50,12 @@ function getArchiveTypeFromResponse(resp, sourceUrl) {
const contentType = (resp.headers['content-type'] || '');
switch (contentType.toLowerCase()) {
case 'application/zip': return '.zip';
case 'application/x-gzip': return '.tar.gz';
case 'application/zip': return ZIP;
case 'application/x-gzip': return TAR;
default:
//If we can't infer the archive type from the content-type header,
//fall back to checking the extension in the url
if (/\.zip$/i.test(sourceUrl)) {
return '.zip';
}
if (/\.tar\.gz$/i.test(sourceUrl)) {
return '.tar.gz';
}
break;
return fileType(sourceUrl);
}
}

View file

@ -1,6 +1,6 @@
const zlib = require('zlib');
const fs = require('fs');
const tar = require('tar');
import zlib from 'zlib';
import fs from 'fs';
import tar from 'tar';
async function extractArchive(settings) {
await new Promise((resolve, reject) => {

View file

@ -1,4 +1,4 @@
const DecompressZip = require('@bigfunger/decompress-zip');
import DecompressZip from '@bigfunger/decompress-zip';
async function extractArchive(settings) {
await new Promise((resolve, reject) => {

View file

@ -0,0 +1,14 @@
export const TAR = '.tar.gz';
export const ZIP = '.zip';
export default function fileType(filename) {
if (/\.zip$/i.test(filename)) {
return ZIP;
}
if (/\.tar\.gz$/i.test(filename)) {
return TAR;
}
if (/\.tgz$/i.test(filename)) {
return TAR;
}
}

View file

@ -1,10 +1,10 @@
const utils = require('requirefrom')('src/utils');
const fromRoot = utils('fromRoot');
const settingParser = require('./setting_parser');
const installer = require('./plugin_installer');
const remover = require('./plugin_remover');
const lister = require('./plugin_lister');
const pluginLogger = require('./plugin_logger');
import settingParser from './setting_parser';
import installer from './plugin_installer';
import remover from './plugin_remover';
import lister from './plugin_lister';
import pluginLogger from './plugin_logger';
export default function pluginCli(program) {
function processCommand(command, options) {

View file

@ -1,5 +1,5 @@
const rimraf = require('rimraf');
const fs = require('fs');
import rimraf from 'rimraf';
import fs from 'fs';
export default function createPluginCleaner(settings, logger) {
function cleanPrevious() {

View file

@ -1,7 +1,7 @@
const _ = require('lodash');
const urlParse = require('url').parse;
const downloadHttpFile = require('./downloaders/http');
const downloadLocalFile = require('./downloaders/file');
import _ from 'lodash';
import downloadHttpFile from './downloaders/http';
import downloadLocalFile from './downloaders/file';
import { parse as urlParse } from 'url';
export default function createPluginDownloader(settings, logger) {
let archiveType;

View file

@ -1,12 +1,13 @@
const zipExtract = require('./extractors/zip');
const tarGzExtract = require('./extractors/tar_gz');
import zipExtract from './extractors/zip';
import tarGzExtract from './extractors/tar_gz';
import { ZIP, TAR } from './file_type';
export default function extractArchive(settings, logger, archiveType) {
switch (archiveType) {
case '.zip':
case ZIP:
return zipExtract(settings, logger);
break;
case '.tar.gz':
case TAR:
return tarGzExtract(settings, logger);
break;
default:

View file

@ -1,14 +1,14 @@
const _ = require('lodash');
import _ from 'lodash';
const utils = require('requirefrom')('src/utils');
const fromRoot = utils('fromRoot');
const pluginDownloader = require('./plugin_downloader');
const pluginCleaner = require('./plugin_cleaner');
const pluginExtractor = require('./plugin_extractor');
const KbnServer = require('../../server/KbnServer');
const readYamlConfig = require('../serve/read_yaml_config');
const { statSync, renameSync } = require('fs');
const Promise = require('bluebird');
const rimrafSync = require('rimraf').sync;
import pluginDownloader from './plugin_downloader';
import pluginCleaner from './plugin_cleaner';
import pluginExtractor from './plugin_extractor';
import KbnServer from '../../server/KbnServer';
import readYamlConfig from '../serve/read_yaml_config';
import Promise from 'bluebird';
import { sync as rimrafSync } from 'rimraf';
import { statSync, renameSync } from 'fs';
const mkdirp = Promise.promisify(require('mkdirp'));
export default {

View file

@ -1,4 +1,4 @@
const fs = require('fs');
import fs from 'fs';
export function list(settings, logger) {
fs.readdirSync(settings.pluginDir)

View file

@ -1,5 +1,5 @@
const fs = require('fs');
const rimraf = require('rimraf');
import fs from 'fs';
import rimraf from 'rimraf';
module.exports = {
remove: remove

View file

@ -1,6 +1,6 @@
const { resolve } = require('path');
const expiry = require('expiry-js');
import expiry from 'expiry-js';
import { intersection } from 'lodash';
import { resolve } from 'path';
export default function createSettingParser(options) {
function parseMilliseconds(val) {

View file

@ -1,6 +1,6 @@
let _ = require('lodash');
let fs = require('fs');
let yaml = require('js-yaml');
import _ from 'lodash';
import fs from 'fs';
import yaml from 'js-yaml';
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');

View file

@ -1,4 +1,4 @@
const _ = require('lodash');
import _ from 'lodash';
const { isWorker } = require('cluster');
const { resolve } = require('path');

View file

@ -1,83 +1,81 @@
define(function (require) {
return function GeoHashGridAggResponseFixture() {
import _ from 'lodash';
export default function GeoHashGridAggResponseFixture() {
var _ = require('lodash');
// for vis:
//
// vis = new Vis(indexPattern, {
// type: 'tile_map',
// aggs:[
// { schema: 'metric', type: 'avg', params: { field: 'bytes' } },
// { schema: 'split', type: 'terms', params: { field: '@tags', size: 10 } },
// { schema: 'segment', type: 'geohash_grid', params: { field: 'geo.coordinates', precision: 3 } }
// ],
// params: {
// isDesaturated: true,
// mapType: 'Scaled%20Circle%20Markers'
// },
// });
// for vis:
//
// vis = new Vis(indexPattern, {
// type: 'tile_map',
// aggs:[
// { schema: 'metric', type: 'avg', params: { field: 'bytes' } },
// { schema: 'split', type: 'terms', params: { field: '@tags', size: 10 } },
// { schema: 'segment', type: 'geohash_grid', params: { field: 'geo.coordinates', precision: 3 } }
// ],
// params: {
// isDesaturated: true,
// mapType: 'Scaled%20Circle%20Markers'
// },
// });
var geoHashCharts = _.union(
_.range(48, 57), // 0-9
_.range(65, 90), // A-Z
_.range(97, 122) // a-z
);
var geoHashCharts = _.union(
_.range(48, 57), // 0-9
_.range(65, 90), // A-Z
_.range(97, 122) // a-z
);
var totalDocCount = 0;
var totalDocCount = 0;
var tags = _.times(_.random(4, 20), function (i) {
// random number of tags
var docCount = 0;
var buckets = _.times(_.random(40, 200), function () {
return _.sample(geoHashCharts, 3).join('');
})
.sort()
.map(function (geoHash) {
var count = _.random(1, 5000);
var tags = _.times(_.random(4, 20), function (i) {
// random number of tags
var docCount = 0;
var buckets = _.times(_.random(40, 200), function () {
return _.sample(geoHashCharts, 3).join('');
})
.sort()
.map(function (geoHash) {
var count = _.random(1, 5000);
totalDocCount += count;
docCount += count;
return {
key: geoHash,
doc_count: count,
1: {
value: 2048 + i
}
};
});
totalDocCount += count;
docCount += count;
return {
key: 'tag ' + (i + 1),
doc_count: docCount,
3: {
buckets: buckets
},
key: geoHash,
doc_count: count,
1: {
value: 1000 + i
value: 2048 + i
}
};
});
return {
took: 3,
timed_out: false,
_shards: {
total: 4,
successful: 4,
failed: 0
key: 'tag ' + (i + 1),
doc_count: docCount,
3: {
buckets: buckets
},
hits: {
total: 298,
max_score: 0.0,
hits: []
},
aggregations: {
2: {
buckets: tags
}
1: {
value: 1000 + i
}
};
});
return {
took: 3,
timed_out: false,
_shards: {
total: 4,
successful: 4,
failed: 0
},
hits: {
total: 298,
max_score: 0.0,
hits: []
},
aggregations: {
2: {
buckets: tags
}
}
};
});
};

View file

@ -1,22 +1,20 @@
define(function (require) {
var results = {};
var results = {};
results.timeSeries = {
data: {
ordered: {
date: true,
interval: 600000,
max: 1414437217559,
min: 1414394017559
}
},
label: 'apache',
value: 44,
point: {
label: 'apache',
x: 1414400400000,
y: 44,
y0: 0
results.timeSeries = {
data: {
ordered: {
date: true,
interval: 600000,
max: 1414437217559,
min: 1414394017559
}
};
});
},
label: 'apache',
value: 44,
point: {
label: 'apache',
x: 1414400400000,
y: 44,
y0: 0
}
};

View file

@ -1,228 +1,226 @@
define(function (require) {
var data = { };
var data = { };
data.metricOnly = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_1: { value: 412032 },
}
};
data.metricOnly = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_1: { value: 412032 },
}
};
data.threeTermBuckets = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_2: {
buckets: [
{
key: 'png',
doc_count: 50,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'IT',
doc_count: 10,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 4, agg_1: { value: 0 } },
{ key: 'mac', doc_count: 6, agg_1: { value: 9299 } }
]
}
},
{
key: 'US',
doc_count: 20,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
]
}
data.threeTermBuckets = {
hits: { total: 1000, hits: [], max_score: 0 },
aggregations: {
agg_2: {
buckets: [
{
key: 'png',
doc_count: 50,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'IT',
doc_count: 10,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 4, agg_1: { value: 0 } },
{ key: 'mac', doc_count: 6, agg_1: { value: 9299 } }
]
}
]
}
},
{
key: 'css',
doc_count: 20,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'MX',
doc_count: 7,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
]
}
},
{
key: 'US',
doc_count: 13,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
]
}
},
{
key: 'US',
doc_count: 20,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 8, agg_1: { value: 3029 } }
]
}
]
}
},
{
key: 'html',
doc_count: 90,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'CN',
doc_count: 85,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 46, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
]
}
},
{
key: 'FR',
doc_count: 15,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
]
}
}
]
}
}
]
}
]
}
}
};
data.oneRangeBucket = {
'took': 35,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6039,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'0.0-1000.0': {
'from': 0,
'from_as_string': '0.0',
'to': 1000,
'to_as_string': '1000.0',
'doc_count': 606
},
'1000.0-2000.0': {
'from': 1000,
'from_as_string': '1000.0',
'to': 2000,
'to_as_string': '2000.0',
'doc_count': 298
},
{
key: 'css',
doc_count: 20,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'MX',
doc_count: 7,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 4, agg_1: { value: 5892 } }
]
}
},
{
key: 'US',
doc_count: 13,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'linux', doc_count: 12, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 1, agg_1: { value: 3029 } }
]
}
}
]
}
},
{
key: 'html',
doc_count: 90,
agg_1: { value: 412032 },
agg_3: {
buckets: [
{
key: 'CN',
doc_count: 85,
agg_1: { value: 9299 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 46, agg_1: { value: 4992 } },
{ key: 'mac', doc_count: 39, agg_1: { value: 5892 } }
]
}
},
{
key: 'FR',
doc_count: 15,
agg_1: { value: 8293 },
agg_4: {
buckets: [
{ key: 'win', doc_count: 3, agg_1: { value: 3992 } },
{ key: 'mac', doc_count: 12, agg_1: { value: 3029 } }
]
}
}
]
}
}
]
}
}
};
data.oneRangeBucket = {
'took': 35,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6039,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'0.0-1000.0': {
'from': 0,
'from_as_string': '0.0',
'to': 1000,
'to_as_string': '1000.0',
'doc_count': 606
},
'1000.0-2000.0': {
'from': 1000,
'from_as_string': '1000.0',
'to': 2000,
'to_as_string': '2000.0',
'doc_count': 298
}
}
}
};
}
};
data.oneFilterBucket = {
'took': 11,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6005,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'_type:apache': {
'doc_count': 4844
},
'_type:nginx': {
'doc_count': 1161
}
data.oneFilterBucket = {
'took': 11,
'timed_out': false,
'_shards': {
'total': 1,
'successful': 1,
'failed': 0
},
'hits': {
'total': 6005,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': {
'_type:apache': {
'doc_count': 4844
},
'_type:nginx': {
'doc_count': 1161
}
}
}
};
}
};
data.oneHistogramBucket = {
'took': 37,
'timed_out': false,
'_shards': {
'total': 6,
'successful': 6,
'failed': 0
},
'hits': {
'total': 49208,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': [
{
'key_as_string': '2014-09-28T00:00:00.000Z',
'key': 1411862400000,
'doc_count': 8247
},
{
'key_as_string': '2014-09-29T00:00:00.000Z',
'key': 1411948800000,
'doc_count': 8184
},
{
'key_as_string': '2014-09-30T00:00:00.000Z',
'key': 1412035200000,
'doc_count': 8269
},
{
'key_as_string': '2014-10-01T00:00:00.000Z',
'key': 1412121600000,
'doc_count': 8141
},
{
'key_as_string': '2014-10-02T00:00:00.000Z',
'key': 1412208000000,
'doc_count': 8148
},
{
'key_as_string': '2014-10-03T00:00:00.000Z',
'key': 1412294400000,
'doc_count': 8219
}
]
}
data.oneHistogramBucket = {
'took': 37,
'timed_out': false,
'_shards': {
'total': 6,
'successful': 6,
'failed': 0
},
'hits': {
'total': 49208,
'max_score': 0,
'hits': []
},
'aggregations': {
'agg_2': {
'buckets': [
{
'key_as_string': '2014-09-28T00:00:00.000Z',
'key': 1411862400000,
'doc_count': 8247
},
{
'key_as_string': '2014-09-29T00:00:00.000Z',
'key': 1411948800000,
'doc_count': 8184
},
{
'key_as_string': '2014-09-30T00:00:00.000Z',
'key': 1412035200000,
'doc_count': 8269
},
{
'key_as_string': '2014-10-01T00:00:00.000Z',
'key': 1412121600000,
'doc_count': 8141
},
{
'key_as_string': '2014-10-02T00:00:00.000Z',
'key': 1412208000000,
'doc_count': 8148
},
{
'key_as_string': '2014-10-03T00:00:00.000Z',
'key': 1412294400000,
'doc_count': 8219
}
]
}
};
}
};
return data;
});
export default data;

View file

@ -1,22 +1,20 @@
define(function (require) {
var _ = require('lodash');
var longString = Array(200).join('_');
import _ from 'lodash';
var longString = Array(200).join('_');
return function (id, mapping) {
function fakeVals(type) {
return _.mapValues(mapping, function (f, c) {
return c + '_' + type + '_' + id + longString;
});
}
export default function (id, mapping) {
function fakeVals(type) {
return _.mapValues(mapping, function (f, c) {
return c + '_' + type + '_' + id + longString;
});
}
return {
_id: id,
_index: 'test',
_source: fakeVals('original'),
sort: [id],
$$_formatted: fakeVals('formatted'),
$$_partialFormatted: fakeVals('formatted'),
$$_flattened: fakeVals('_flattened')
};
return {
_id: id,
_index: 'test',
_source: fakeVals('original'),
sort: [id],
$$_formatted: fakeVals('formatted'),
$$_partialFormatted: fakeVals('formatted'),
$$_flattened: fakeVals('_flattened')
};
});
};

View file

@ -1,62 +1,60 @@
define(function (require) {
return {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
export default {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string',
}
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string',
}
},
'not_analyzed_field': {
full_name: 'not_analyzed_field',
mapping: {
bar: {
type: 'string',
index: 'not_analyzed'
}
}
},
'not_analyzed_field': {
full_name: 'not_analyzed_field',
mapping: {
bar: {
type: 'string',
index: 'not_analyzed'
}
},
'index_no_field': {
full_name: 'index_no_field',
mapping: {
bar: {
type: 'string',
index: 'no'
}
}
},
'index_no_field': {
full_name: 'index_no_field',
mapping: {
bar: {
type: 'string',
index: 'no'
}
},
_id: {
full_name: '_id',
mapping: {
_id: {
store: false,
index: 'no',
}
}
},
_id: {
full_name: '_id',
mapping: {
_id: {
store: false,
index: 'no',
}
},
_timestamp: {
full_name: '_timestamp',
mapping: {
_timestamp: {
store: true,
index: 'no',
}
}
},
_timestamp: {
full_name: '_timestamp',
mapping: {
_timestamp: {
store: true,
index: 'no',
}
}
}
}
}
};
});
}
};

View file

@ -1,7 +1,5 @@
define(function (require) {
return {
meta: {
index: 'logstash-*'
}
};
});
export default {
meta: {
index: 'logstash-*'
}
};

View file

@ -1,24 +1,22 @@
define(function (require) {
var _ = require('lodash');
return function fitsFixture() {
return _.map([
{_source: {'@timestamp': 0, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 10, request: 'foo'}},
{_source: {'@timestamp': 1, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 20, request: 'bar'}},
{_source: {'@timestamp': 2, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'bar'}},
{_source: {'@timestamp': 3, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 4, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 5, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 6, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 7, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 8, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 9, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
], function (p, i) {
return _.merge({}, p, {
_score: 1,
_id: 1000 + i,
_type: 'test',
_index: 'test-index'
});
import _ from 'lodash';
export default function fitsFixture() {
return _.map([
{_source: {'@timestamp': 0, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 10, request: 'foo'}},
{_source: {'@timestamp': 1, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 20, request: 'bar'}},
{_source: {'@timestamp': 2, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'bar'}},
{_source: {'@timestamp': 3, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 4, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 5, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 30, request: 'baz'}},
{_source: {'@timestamp': 6, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 7, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 8, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
{_source: {'@timestamp': 9, ssl: true, ip: '192.168.0.1', extension: 'php', 'machine.os': 'Linux', bytes: 40.141592, request: 'bat'}},
], function (p, i) {
return _.merge({}, p, {
_score: 1,
_id: 1000 + i,
_type: 'test',
_index: 'test-index'
});
};
});
});
};

View file

@ -1,37 +1,35 @@
define(function (require) {
function stubbedLogstashFields() {
var sourceData = [
{ name: 'bytes', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true, count: 10 },
{ name: 'ssl', type: 'boolean', indexed: true, analyzed: true, sortable: true, filterable: true, count: 20 },
{ name: '@timestamp', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: 'time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: '@tags', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'utc_time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'phpmemory', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'ip', type: 'ip', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'request_body', type: 'attachment', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'point', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'area', type: 'geo_shape', indexed: true, analyzed: true, sortable: true, filterable: false },
{ name: 'hashed', type: 'murmur3', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'geo.coordinates', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_type', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
{ name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
{ name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
{ name: 'script string', type: 'string', scripted: true, script: '\'i am a string\'', lang: 'expression' },
{ name: 'script number', type: 'number', scripted: true, script: '1234', lang: 'expression' },
{ name: 'script murmur3', type: 'murmur3', scripted: true, script: '1234', lang: 'expression'},
].map(function (field) {
field.count = field.count || 0;
field.scripted = field.scripted || false;
return field;
});
function stubbedLogstashFields() {
var sourceData = [
{ name: 'bytes', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true, count: 10 },
{ name: 'ssl', type: 'boolean', indexed: true, analyzed: true, sortable: true, filterable: true, count: 20 },
{ name: '@timestamp', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: 'time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true, count: 30 },
{ name: '@tags', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'utc_time', type: 'date', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'phpmemory', type: 'number', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'ip', type: 'ip', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'request_body', type: 'attachment', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'point', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'area', type: 'geo_shape', indexed: true, analyzed: true, sortable: true, filterable: false },
{ name: 'hashed', type: 'murmur3', indexed: true, analyzed: true, sortable: false, filterable: false },
{ name: 'geo.coordinates', type: 'geo_point', indexed: true, analyzed: true, sortable: false, filterable: true },
{ name: 'extension', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'machine.os', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: 'geo.src', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_type', type: 'string', indexed: true, analyzed: true, sortable: true, filterable: true },
{ name: '_id', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: true},
{ name: '_source', type: 'string', indexed: false, analyzed: false, sortable: false, filterable: false},
{ name: 'custom_user_field', type: 'conflict', indexed: false, analyzed: false, sortable: false, filterable: true },
{ name: 'script string', type: 'string', scripted: true, script: '\'i am a string\'', lang: 'expression' },
{ name: 'script number', type: 'number', scripted: true, script: '1234', lang: 'expression' },
{ name: 'script murmur3', type: 'murmur3', scripted: true, script: '1234', lang: 'expression'},
].map(function (field) {
field.count = field.count || 0;
field.scripted = field.scripted || false;
return field;
});
return sourceData;
}
return sourceData;
}
return stubbedLogstashFields;
});
export default stubbedLogstashFields;

View file

@ -1,40 +1,38 @@
define(function (require) {
return {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string'
}
export default {
test: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'long'
}
}
}
}
},
duplicates: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'date'
}
},
'foo.bar': {
full_name: 'foo.bar',
mapping: {
bar: {
type: 'string'
}
}
}
}
}
};
});
},
duplicates: {
mappings: {
testType: {
'baz': {
full_name: 'baz',
mapping: {
bar: {
type: 'date'
}
}
}
}
}
}
};

View file

@ -1,17 +1,16 @@
define(function (require) {
var _ = require('lodash');
var sinon = require('auto-release-sinon');
import _ from 'lodash';
import sinon from 'auto-release-sinon';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
return function (Private, Promise) {
var indexPatterns = Private(require('fixtures/stubbed_logstash_index_pattern'));
var getIndexPatternStub = sinon.stub();
getIndexPatternStub.returns(Promise.resolve(indexPatterns));
export default function (Private, Promise) {
var indexPatterns = Private(FixturesStubbedLogstashIndexPatternProvider);
var getIndexPatternStub = sinon.stub();
getIndexPatternStub.returns(Promise.resolve(indexPatterns));
var courier = {
indexPatterns: { get: getIndexPatternStub },
getStub: getIndexPatternStub
};
return courier;
var courier = {
indexPatterns: { get: getIndexPatternStub },
getStub: getIndexPatternStub
};
});
return courier;
};

View file

@ -1,19 +1,17 @@
define(function (require) {
var _ = require('lodash');
var sinon = require('auto-release-sinon');
import _ from 'lodash';
import sinon from 'auto-release-sinon';
function MockState(defaults) {
this.on = _.noop;
this.off = _.noop;
this.save = sinon.stub();
this.replace = sinon.stub();
_.assign(this, defaults);
}
function MockState(defaults) {
this.on = _.noop;
this.off = _.noop;
this.save = sinon.stub();
this.replace = sinon.stub();
_.assign(this, defaults);
}
MockState.prototype.resetStub = function () {
this.save = sinon.stub();
return this;
};
MockState.prototype.resetStub = function () {
this.save = sinon.stub();
return this;
};
return MockState;
});
export default MockState;

View file

@ -1,15 +1,13 @@
define(function (require) {
var _ = require('lodash');
var keys = {};
return {
get: function (path, def) {
return keys[path] == null ? def : keys[path];
},
set: function (path, val) {
keys[path] = val;
return val;
},
on: _.noop,
off: _.noop
}
})
import _ from 'lodash';
var keys = {};
export default {
get: function (path, def) {
return keys[path] == null ? def : keys[path];
},
set: function (path, val) {
keys[path] = val;
return val;
},
on: _.noop,
off: _.noop
}

View file

@ -1,227 +1,225 @@
define(function (require) {
/*
Extensions:
gif: 5
html: 8
php: 5 (thus 5 with phpmemory fields)
png: 2
/*
Extensions:
gif: 5
html: 8
php: 5 (thus 5 with phpmemory fields)
png: 2
_type:
apache: 18
nginx: 2
_type:
apache: 18
nginx: 2
Bytes (all unique except):
374: 2
Bytes (all unique except):
374: 2
All have the same index, ids are unique
*/
All have the same index, ids are unique
*/
return [
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '61',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 360.20000000000005
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '388',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 5848.700000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '403',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 841.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '415',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '460',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2070.6,
'phpmemory': 276080
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '496',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 8421.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '511',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 994.8000000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '701',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '838',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '890',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '927',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2591.1,
'phpmemory': 345480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1034',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1450
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1142',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 1803.8999999999999,
'phpmemory': 240520
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1180',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '1224',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1243',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10961.5
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1510',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 382.8
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1628',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1729',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 3059.2000000000003
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1945',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
export default [
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '61',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 360.20000000000005
}
];
});
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '388',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 5848.700000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '403',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 841.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '415',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '460',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2070.6,
'phpmemory': 276080
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '496',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 8421.6
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '511',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 994.8000000000001
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '701',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '838',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '890',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 506.09999999999997,
'phpmemory': 67480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '927',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 2591.1,
'phpmemory': 345480
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1034',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1450
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1142',
'_score': 1,
'_source': {
'extension': 'php',
'bytes': 1803.8999999999999,
'phpmemory': 240520
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1180',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 1626.4
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'nginx',
'_id': '1224',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1243',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10961.5
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1510',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 382.8
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1628',
'_score': 1,
'_source': {
'extension': 'html',
'bytes': 374
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1729',
'_score': 1,
'_source': {
'extension': 'png',
'bytes': 3059.2000000000003
}
},
{
'_index': 'logstash-2014.09.09',
'_type': 'apache',
'_id': '1945',
'_score': 1,
'_source': {
'extension': 'gif',
'bytes': 10617.2
}
}
];

View file

@ -1,18 +1,16 @@
define(function (require) {
var hits = require('fixtures/real_hits');
import hits from 'fixtures/real_hits';
return {
took: 73,
timed_out: false,
_shards: {
total: 144,
successful: 144,
failed: 0
},
hits: {
total : 49487,
max_score : 1.0,
hits: hits
}
};
});
export default {
took: 73,
timed_out: false,
_shards: {
total: 144,
successful: 144,
failed: 0
},
hits: {
total : 49487,
max_score : 1.0,
hits: hits
}
};

View file

@ -1,22 +1,22 @@
define(function (require) {
function stubbedDocSourceResponse(Private) {
var mockLogstashFields = Private(require('fixtures/logstash_fields'));
import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
return function (id, index) {
index = index || '.kibana';
return {
_id: id,
_index: index,
_type: 'index-pattern',
_version: 2,
found: true,
_source: {
customFormats: '{}',
fields: JSON.stringify(mockLogstashFields)
}
};
function stubbedDocSourceResponse(Private) {
var mockLogstashFields = Private(FixturesLogstashFieldsProvider);
return function (id, index) {
index = index || '.kibana';
return {
_id: id,
_index: index,
_type: 'index-pattern',
_version: 2,
found: true,
_source: {
customFormats: '{}',
fields: JSON.stringify(mockLogstashFields)
}
};
}
};
}
return stubbedDocSourceResponse;
});
export default stubbedDocSourceResponse;

View file

@ -1,24 +1,25 @@
define(function (require) {
return function stubbedLogstashIndexPatternService(Private) {
var StubIndexPattern = Private(require('testUtils/stub_index_pattern'));
var fieldTypes = Private(require('ui/index_patterns/_field_types'));
var mockLogstashFields = Private(require('fixtures/logstash_fields'));
import _ from 'lodash';
import TestUtilsStubIndexPatternProvider from 'testUtils/stub_index_pattern';
import IndexPatternsFieldTypesProvider from 'ui/index_patterns/_field_types';
import FixturesLogstashFieldsProvider from 'fixtures/logstash_fields';
export default function stubbedLogstashIndexPatternService(Private) {
var StubIndexPattern = Private(TestUtilsStubIndexPatternProvider);
var fieldTypes = Private(IndexPatternsFieldTypesProvider);
var mockLogstashFields = Private(FixturesLogstashFieldsProvider);
var _ = require('lodash');
var fields = mockLogstashFields.map(function (field) {
field.displayName = field.name;
var type = fieldTypes.byName[field.type];
if (!type) throw new TypeError('unknown type ' + field.type);
if (!_.has(field, 'sortable')) field.sortable = type.sortable;
if (!_.has(field, 'filterable')) field.filterable = type.filterable;
return field;
});
var fields = mockLogstashFields.map(function (field) {
field.displayName = field.name;
var type = fieldTypes.byName[field.type];
if (!type) throw new TypeError('unknown type ' + field.type);
if (!_.has(field, 'sortable')) field.sortable = type.sortable;
if (!_.has(field, 'filterable')) field.filterable = type.filterable;
return field;
});
var indexPattern = new StubIndexPattern('logstash-*', 'time', fields);
indexPattern.id = 'logstash-*';
var indexPattern = new StubIndexPattern('logstash-*', 'time', fields);
indexPattern.id = 'logstash-*';
return indexPattern;
return indexPattern;
};
});
};

View file

@ -1,39 +1,38 @@
define(function (require) {
var sinon = require('auto-release-sinon');
var searchResponse = require('fixtures/search_response');
import sinon from 'auto-release-sinon';
import searchResponse from 'fixtures/search_response';
import FixturesStubbedLogstashIndexPatternProvider from 'fixtures/stubbed_logstash_index_pattern';
return function stubSearchSource(Private, $q, Promise) {
var deferedResult = $q.defer();
var indexPattern = Private(require('fixtures/stubbed_logstash_index_pattern'));
export default function stubSearchSource(Private, $q, Promise) {
var deferedResult = $q.defer();
var indexPattern = Private(FixturesStubbedLogstashIndexPatternProvider);
return {
sort: sinon.spy(),
size: sinon.spy(),
fetch: sinon.spy(),
destroy: sinon.spy(),
get: function (param) {
switch (param) {
case 'index':
return indexPattern;
default:
throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
}
},
crankResults: function () {
deferedResult.resolve(searchResponse);
deferedResult = $q.defer();
},
onResults: function () {
// Up to the test to resolve this manually
// For example:
// someHandler.resolve(require('fixtures/search_response'))
return deferedResult.promise;
},
onError: function () { return $q.defer().promise; },
_flatten: function () {
return Promise.resolve({ index: indexPattern, body: {} });
return {
sort: sinon.spy(),
size: sinon.spy(),
fetch: sinon.spy(),
destroy: sinon.spy(),
get: function (param) {
switch (param) {
case 'index':
return indexPattern;
default:
throw new Error('Param "' + param + '" is not implemented in the stubbed search source');
}
};
},
crankResults: function () {
deferedResult.resolve(searchResponse);
deferedResult = $q.defer();
},
onResults: function () {
// Up to the test to resolve this manually
// For example:
// someHandler.resolve(require('fixtures/search_response'))
return deferedResult.promise;
},
onError: function () { return $q.defer().promise; },
_flatten: function () {
return Promise.resolve({ index: indexPattern, body: {} });
}
};
});
};

View file

@ -1,21 +1,19 @@
define(function (require) {
var sinon = require('auto-release-sinon');
import sinon from 'auto-release-sinon';
function MockMap(container, chartData, params) {
this.container = container;
this.chartData = chartData;
this.params = params;
function MockMap(container, chartData, params) {
this.container = container;
this.chartData = chartData;
this.params = params;
// stub required methods
this.addStubs();
}
// stub required methods
this.addStubs();
}
MockMap.prototype.addStubs = function () {
this.addTitle = sinon.stub();
this.addFitControl = sinon.stub();
this.addBoundingControl = sinon.stub();
this.destroy = sinon.stub();
};
MockMap.prototype.addStubs = function () {
this.addTitle = sinon.stub();
this.addFitControl = sinon.stub();
this.addBoundingControl = sinon.stub();
this.destroy = sinon.stub();
};
return MockMap;
});
export default MockMap;

View file

@ -1,7 +1,20 @@
var $ = require('jquery');
var _ = require('lodash');
import _ from 'lodash';
import $ from 'jquery';
import VislibVisProvider from 'ui/vislib/vis';
var $visCanvas = $('<div>')
.attr('id', 'vislib-vis-fixtures')
.css({
height: '500px',
width: '1024px',
display: 'flex',
position: 'fixed',
top: '0px',
left: '0px',
overflow: 'hidden'
})
.appendTo('body');
var $visCanvas = $('<div>').attr('id', 'vislib-vis-fixtures').appendTo('body');
var count = 0;
var visHeight = $visCanvas.height();
@ -19,7 +32,7 @@ afterEach(function () {
module.exports = function VislibFixtures(Private) {
return function (visLibParams) {
var Vis = Private(require('ui/vislib/vis'));
var Vis = Private(VislibVisProvider);
return new Vis($visCanvas.new(), _.defaults({}, visLibParams || {}, {
shareYAxis: true,
addTooltip: true,

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [
{

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'valueFormatter': _.identity,

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var moment = require('moment');
import moment from 'moment';
module.exports = {
'label': '',

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'columns': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'rows': [

View file

@ -1,4 +1,4 @@
var _ = require('lodash');
import _ from 'lodash';
module.exports = {
'label': '',

View file

@ -1,16 +1,16 @@
let { inherits } = require('util');
let { defaults } = require('lodash');
let { resolve } = require('path');
let { writeFile } = require('fs');
let webpack = require('webpack');
var Boom = require('boom');
let DirectoryNameAsMain = require('webpack-directory-name-as-main');
let ExtractTextPlugin = require('extract-text-webpack-plugin');
var CommonsChunkPlugin = require('webpack/lib/optimize/CommonsChunkPlugin');
import webpack from 'webpack';
import Boom from 'boom';
import DirectoryNameAsMain from 'webpack-directory-name-as-main';
import ExtractTextPlugin from 'extract-text-webpack-plugin';
import CommonsChunkPlugin from 'webpack/lib/optimize/CommonsChunkPlugin';
let utils = require('requirefrom')('src/utils');
let fromRoot = utils('fromRoot');
let babelOptions = require('./babelOptions');
import babelOptions from './babelOptions';
import { inherits } from 'util';
import { defaults } from 'lodash';
import { resolve } from 'path';
import { writeFile } from 'fs';
let babelExclude = [/[\/\\](webpackShims|node_modules|bower_components)[\/\\]/];
class BaseOptimizer {

View file

@ -1,8 +1,8 @@
let { fromNode } = require('bluebird');
let { writeFile } = require('fs');
let BaseOptimizer = require('./BaseOptimizer');
let fromRoot = require('../utils/fromRoot');
import BaseOptimizer from './BaseOptimizer';
import fromRoot from '../utils/fromRoot';
import { fromNode } from 'bluebird';
import { writeFile } from 'fs';
module.exports = class FsOptimizer extends BaseOptimizer {
async init() {

View file

@ -1,4 +1,4 @@
var cloneDeep = require('lodash').cloneDeep;
import { cloneDeep } from 'lodash';
var fromRoot = require('path').resolve.bind(null, __dirname, '../../');
if (!process.env.BABEL_CACHE_PATH) {

View file

@ -1,3 +1,4 @@
import FsOptimizer from './FsOptimizer';
module.exports = async (kbnServer, server, config) => {
if (!config.get('optimize.enabled')) return;
@ -33,7 +34,6 @@ module.exports = async (kbnServer, server, config) => {
}
// only require the FsOptimizer when we need to
let FsOptimizer = require('./FsOptimizer');
let optimizer = new FsOptimizer({
env: bundles.env,
bundles: bundles,

View file

@ -1,9 +1,9 @@
let { once, pick, size } = require('lodash');
let { join } = require('path');
let Boom = require('boom');
import Boom from 'boom';
let BaseOptimizer = require('../BaseOptimizer');
let WeirdControlFlow = require('./WeirdControlFlow');
import BaseOptimizer from '../BaseOptimizer';
import WeirdControlFlow from './WeirdControlFlow';
import { once, pick, size } from 'lodash';
import { join } from 'path';
module.exports = class LazyOptimizer extends BaseOptimizer {
constructor(opts) {

View file

@ -1,7 +1,7 @@
let { Server } = require('hapi');
let { fromNode } = require('bluebird');
let Boom = require('boom');
import Boom from 'boom';
import { Server } from 'hapi';
import { fromNode } from 'bluebird';
module.exports = class LazyServer {

View file

@ -1,5 +1,5 @@
import { fromNode } from 'bluebird';
let { fromNode } = require('bluebird');
module.exports = class WeirdControlFlow {
constructor(work) {

View file

@ -1,6 +1,6 @@
import { isWorker } from 'cluster';
module.exports = async (kbnServer, server, config) => {
let { isWorker } = require('cluster');
if (!isWorker) {
throw new Error(`lazy optimization is only available in "watch" mode`);

View file

@ -1,9 +1,9 @@
import LazyServer from './LazyServer';
import LazyOptimizer from './LazyOptimizer';
module.exports = async (kbnServer, kibanaHapiServer, config) => {
let src = require('requirefrom')('src');
let fromRoot = src('utils/fromRoot');
let LazyServer = require('./LazyServer');
let LazyOptimizer = require('./LazyOptimizer');
let server = new LazyServer(
config.get('optimize.lazyHost'),

View file

@ -1,5 +1,5 @@
let { fromNode } = require('bluebird');
let { get, once } = require('lodash');
import { fromNode } from 'bluebird';
import { get, once } from 'lodash';
module.exports = (kbnServer, server, config) => {

View file

@ -1,4 +1,7 @@
var angular = require('angular');
import angular from 'angular';
import 'angular-mocks';
import 'mocha';
if (angular.mocks) {
throw new Error(
'Don\'t require angular-mocks directly or the tests ' +
@ -6,5 +9,4 @@ if (angular.mocks) {
);
}
require('angular-mocks');
module.exports = angular.mock;

View file

@ -1,20 +1,19 @@
define(function (require) {
// register the spy mode or it won't show up in the spys
require('ui/registry/spy_modes').register(VisDetailsSpyProvider);
import visDebugSpyPanelTemplate from 'plugins/devMode/visDebugSpyPanel.html';
// register the spy mode or it won't show up in the spys
require('ui/registry/spy_modes').register(VisDetailsSpyProvider);
function VisDetailsSpyProvider(Notifier, $filter, $rootScope, config) {
return {
name: 'debug',
display: 'Debug',
template: require('plugins/devMode/visDebugSpyPanel.html'),
order: 5,
link: function ($scope, $el) {
$scope.$watch('vis.getState() | json', function (json) {
$scope.visStateJson = json;
});
}
};
}
function VisDetailsSpyProvider(Notifier, $filter, $rootScope, config) {
return {
name: 'debug',
display: 'Debug',
template: visDebugSpyPanelTemplate,
order: 5,
link: function ($scope, $el) {
$scope.$watch('vis.getState() | json', function (json) {
$scope.visStateJson = json;
});
}
};
}
return VisDetailsSpyProvider;
});
export default VisDetailsSpyProvider;

View file

@ -28,8 +28,8 @@ module.exports = function ({ Plugin }) {
cert: string(),
key: string()
}).default(),
apiVersion: string().default('2.0'),
engineVersion: string().valid('^2.1.0').default('^2.1.0')
apiVersion: Joi.string().default('master'),
engineVersion: Joi.string().valid('^3.0.0').default('^3.0.0')
}).default();
},

View file

@ -1,17 +1,17 @@
var _ = require('lodash');
var Promise = require('bluebird');
var sinon = require('sinon');
import _ from 'lodash';
import Promise from 'bluebird';
import sinon from 'sinon';
var checkEsVersion = require('../check_es_version');
import checkEsVersion from '../check_es_version';
describe('plugins/elasticsearch', function () {
describe('lib/check_es_version', function () {
var server;
var plugin;
let server;
let plugin;
beforeEach(function () {
var get = sinon.stub().withArgs('elasticsearch.engineVersion').returns('^1.4.3');
var config = function () { return { get: get }; };
const get = sinon.stub().withArgs('elasticsearch.engineVersion').returns('^1.4.3');
const config = function () { return { get: get }; };
server = {
log: _.noop,
config: config,
@ -30,15 +30,15 @@ describe('plugins/elasticsearch', function () {
});
function setNodes(/* ...versions */) {
var versions = _.shuffle(arguments);
var nodes = {};
var i = 0;
const versions = _.shuffle(arguments);
const nodes = {};
let i = 0;
while (versions.length) {
var name = 'node-' + (++i);
var version = versions.shift();
const name = 'node-' + (++i);
const version = versions.shift();
var node = {
const node = {
version: version,
http_address: 'http_address',
ip: 'ip'
@ -48,7 +48,7 @@ describe('plugins/elasticsearch', function () {
nodes[name] = node;
}
var client = server.plugins.elasticsearch.client;
const client = server.plugins.elasticsearch.client;
client.nodes.info = sinon.stub().returns(Promise.resolve({ nodes: nodes }));
}

View file

@ -1,21 +1,21 @@
var _ = require('lodash');
var sinon = require('sinon');
var expect = require('expect.js');
var Promise = require('bluebird');
import _ from 'lodash';
import sinon from 'sinon';
import expect from 'expect.js';
import Promise from 'bluebird';
var createKibanaIndex = require('../create_kibana_index');
var SetupError = require('../setup_error');
import createKibanaIndex from '../create_kibana_index';
import SetupError from '../setup_error';
describe('plugins/elasticsearch', function () {
describe('lib/create_kibana_index', function () {
var server;
var client;
let server;
let client;
beforeEach(function () {
server = {};
client = {};
var config = { kibana: { index: '.my-kibana' } };
var get = sinon.stub();
let config = { kibana: { index: '.my-kibana' } };
const get = sinon.stub();
get.returns(config);
get.withArgs('kibana.index').returns(config.kibana.index);
config = function () { return { get: get }; };
@ -33,16 +33,16 @@ describe('plugins/elasticsearch', function () {
});
it('should check cluster.health upon successful index creation', function () {
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.then(function () {
sinon.assert.calledOnce(client.cluster.health);
});
});
it('should be created with mappings for config.buildNum', function () {
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.then(function () {
var params = client.indices.create.args[0][0];
const params = client.indices.create.args[0][0];
expect(params)
.to.have.property('body');
expect(params.body)
@ -61,9 +61,9 @@ describe('plugins/elasticsearch', function () {
});
it('should be created with 1 shard and default replica', function () {
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.then(function () {
var params = client.indices.create.args[0][0];
const params = client.indices.create.args[0][0];
expect(params)
.to.have.property('body');
expect(params.body)
@ -76,9 +76,9 @@ describe('plugins/elasticsearch', function () {
});
it('should be created with index name set in the config', function () {
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.then(function () {
var params = client.indices.create.args[0][0];
const params = client.indices.create.args[0][0];
expect(params)
.to.have.property('index', '.my-kibana');
});
@ -89,18 +89,18 @@ describe('plugins/elasticsearch', function () {
describe('failure requests', function () {
it('should reject with a SetupError', function () {
var error = new Error('Oops!');
const error = new Error('Oops!');
client.indices.create.returns(Promise.reject(error));
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.catch(function (err) {
expect(err).to.be.a(SetupError);
});
});
it('should reject with an error if index creation fails', function () {
var error = new Error('Oops!');
const error = new Error('Oops!');
client.indices.create.returns(Promise.reject(error));
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.catch(function (err) {
expect(err.message).to.be('Unable to create Kibana index ".my-kibana"');
expect(err).to.have.property('origError', error);
@ -109,10 +109,10 @@ describe('plugins/elasticsearch', function () {
it('should reject with an error if health check fails', function () {
var error = new Error('Oops!');
const error = new Error('Oops!');
client.indices.create.returns(Promise.resolve());
client.cluster.health.returns(Promise.reject(error));
var fn = createKibanaIndex(server);
const fn = createKibanaIndex(server);
return fn.catch(function (err) {
expect(err.message).to.be('Waiting for Kibana index ".my-kibana" to come online failed.');
expect(err).to.have.property('origError', error);

View file

@ -1,5 +1,5 @@
const expect = require('expect.js');
const createProxy = require('../create_proxy');
import expect from 'expect.js';
import createProxy from '../create_proxy';
describe('plugins/elasticsearch', function () {
describe('lib/create_proxy', function () {

View file

@ -1,5 +1,5 @@
var Promise = require('bluebird');
var portscanner = require('portscanner');
import Promise from 'bluebird';
import portscanner from 'portscanner';
module.exports = function findPort(start, end, host) {
host = host || 'localhost';

View file

@ -1,5 +1,5 @@
const getBasicAuthRealm = require('../get_basic_auth_realm');
const expect = require('expect.js');
import getBasicAuthRealm from '../get_basic_auth_realm';
import expect from 'expect.js';
const exception = '[security_exception] missing authentication token for REST request [/logstash-*/_search],' +
' with: {"header":{"WWW-Authenticate":"Basic realm=\\"shield\\""}}';

Some files were not shown because too many files have changed in this diff Show more