mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
Merge pull request #3714 from simianhacker/feature/hapi-server
Server Rewrite with Hapi (hey-pea-eye)
This commit is contained in:
commit
fb9748cfd0
114 changed files with 3714 additions and 1270 deletions
|
@ -3,6 +3,7 @@
|
|||
"node": true,
|
||||
|
||||
"globals": {
|
||||
"Promise": true
|
||||
"Promise": true,
|
||||
"status": true
|
||||
}
|
||||
}
|
||||
|
|
16
package.json
16
package.json
|
@ -12,7 +12,7 @@
|
|||
],
|
||||
"private": false,
|
||||
"version": "4.2.0-snapshot",
|
||||
"main": "src/server/app.js",
|
||||
"main": "src/server/index.js",
|
||||
"homepage": "https://www.elastic.co/products/kibana",
|
||||
"bugs": {
|
||||
"url": "http://github.com/elastic/kibana/issues"
|
||||
|
@ -50,9 +50,20 @@
|
|||
"elasticsearch": "^5.0.0",
|
||||
"express": "^4.10.6",
|
||||
"glob": "^4.3.2",
|
||||
"good": "^5.1.2",
|
||||
"good-console": "^4.1.0",
|
||||
"good-file": "^4.0.2",
|
||||
"good-reporter": "^3.1.0",
|
||||
"hapi": "^8.6.1",
|
||||
"http-auth": "^2.2.5",
|
||||
"joi": "^6.4.3",
|
||||
"js-yaml": "^3.2.5",
|
||||
"json-stringify-safe": "^5.0.1",
|
||||
"lodash": "^2.4.1",
|
||||
"lodash-deep": "^1.6.0",
|
||||
"moment": "^2.10.3",
|
||||
"morgan": "~1.5.1",
|
||||
"numeral": "^1.5.3",
|
||||
"request": "^2.40.0",
|
||||
"requirefrom": "^0.2.0",
|
||||
"semver": "^4.2.0",
|
||||
|
@ -87,14 +98,17 @@
|
|||
"istanbul": "^0.3.15",
|
||||
"jade": "^1.8.2",
|
||||
"license-checker": "3.0.3",
|
||||
"libesvm": "^1.0.1",
|
||||
"load-grunt-config": "^0.7.0",
|
||||
"marked": "^0.3.3",
|
||||
"marked-text-renderer": "^0.1.0",
|
||||
"mkdirp": "^0.5.0",
|
||||
"mocha": "^2.2.5",
|
||||
"nock": "^1.6.0",
|
||||
"npm": "^2.11.0",
|
||||
"opn": "^1.0.0",
|
||||
"path-browserify": "0.0.0",
|
||||
"portscanner": "^1.0.0",
|
||||
"progress": "^1.1.8",
|
||||
"requirejs": "^2.1.14",
|
||||
"rjs-build-analysis": "0.0.3",
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"extends": "../../.jshintrc.node"
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
var express = require('express');
|
||||
var path = require('path');
|
||||
var favicon = require('serve-favicon');
|
||||
var requestLogger = require('./lib/requestLogger');
|
||||
var auth = require('./lib/auth');
|
||||
var appHeaders = require('./lib/appHeaders');
|
||||
var cookieParser = require('cookie-parser');
|
||||
var bodyParser = require('body-parser');
|
||||
var compression = require('compression');
|
||||
var config = require('./config');
|
||||
|
||||
var routes = require('./routes/index');
|
||||
var proxy = require('./routes/proxy');
|
||||
|
||||
var app = express();
|
||||
|
||||
// view engine setup
|
||||
app.set('views', path.join(__dirname, 'views'));
|
||||
app.set('view engine', 'jade');
|
||||
app.set('x-powered-by', false);
|
||||
|
||||
app.use(requestLogger());
|
||||
app.use(auth());
|
||||
app.use(appHeaders());
|
||||
app.use(favicon(path.join(config.public_folder, 'styles', 'theme', 'elk.ico')));
|
||||
|
||||
if (app.get('env') === 'development') {
|
||||
require('./dev')(app);
|
||||
}
|
||||
|
||||
// The proxy must be set up before all the other middleware.
|
||||
// TODO: WE might want to move the middleware to each of the individual routes
|
||||
// so we don't have weird conflicts in the future.
|
||||
app.use('/elasticsearch', proxy);
|
||||
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({ extended: false }));
|
||||
app.use(cookieParser());
|
||||
app.use(compression());
|
||||
app.use(express.static(config.public_folder));
|
||||
if (config.external_plugins_folder) app.use('/plugins', express.static(config.external_plugins_folder));
|
||||
|
||||
app.use('/', routes);
|
||||
|
||||
|
||||
// catch 404 and forward to error handler
|
||||
app.use(function (req, res, next) {
|
||||
var err = new Error('Not Found');
|
||||
err.status = 404;
|
||||
next(err);
|
||||
});
|
||||
|
||||
// error handlers
|
||||
|
||||
// development error handler
|
||||
// will print stacktrace
|
||||
if (app.get('env') === 'development') {
|
||||
app.use(function (err, req, res, next) {
|
||||
res.status(err.status || 500);
|
||||
res.render('error', {
|
||||
message: err.message,
|
||||
error: err
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// production error handler
|
||||
// no stacktraces leaked to user
|
||||
app.use(function (err, req, res, next) {
|
||||
res.status(err.status || 500);
|
||||
res.render('error', {
|
||||
message: err.message,
|
||||
error: {}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
module.exports = app;
|
|
@ -1,16 +1,20 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var _ = require('lodash');
|
||||
var Kibana = require('../');
|
||||
var program = require('commander');
|
||||
var env = (process.env.NODE_ENV) ? process.env.NODE_ENV : 'development';
|
||||
var path = require('path');
|
||||
var writePidFile = require('../lib/write_pid_file');
|
||||
var loadSettingsFromYAML = require('../lib/load_settings_from_yaml');
|
||||
var settings = { 'logging.console.json': true };
|
||||
|
||||
var env = (process.env.NODE_ENV) ? process.env.NODE_ENV : 'development';
|
||||
var packagePath = path.resolve(__dirname, '..', '..', '..', 'package.json');
|
||||
var fs = require('fs');
|
||||
if (env !== 'development') {
|
||||
packagePath = path.resolve(__dirname, '..', 'package.json');
|
||||
}
|
||||
var package = require(packagePath);
|
||||
|
||||
|
||||
program.description('Kibana is an open source (Apache Licensed), browser based analytics and search dashboard for Elasticsearch.');
|
||||
program.version(package.version);
|
||||
program.option('-e, --elasticsearch <uri>', 'Elasticsearch instance');
|
||||
|
@ -22,61 +26,42 @@ program.option('-l, --log-file <path>', 'The file to log to');
|
|||
program.option('--plugins <path>', 'Path to scan for plugins');
|
||||
program.parse(process.argv);
|
||||
|
||||
// This needs to be set before the config is loaded. CONFIG_PATH is used to
|
||||
// override the kibana.yml config path which gets read when the config/index.js
|
||||
// is parsed for the first time.
|
||||
if (program.config) {
|
||||
process.env.CONFIG_PATH = program.config;
|
||||
}
|
||||
|
||||
// This needs to be set before the config is loaded. PLUGINS_PATH is used to
|
||||
// set the external plugins folder.
|
||||
if (program.plugins) {
|
||||
process.env.PLUGINS_FOLDER = program.plugins;
|
||||
settings['kibana.externalPluginsFolder'] = program.plugins;
|
||||
}
|
||||
|
||||
// Load the config
|
||||
var config = require('../config');
|
||||
|
||||
if (program.elasticsearch) {
|
||||
config.elasticsearch = program.elasticsearch;
|
||||
settings['elasticsearch.url'] = program.elasticsearch;
|
||||
}
|
||||
|
||||
if (program.port) {
|
||||
config.port = program.port;
|
||||
}
|
||||
|
||||
if (program.quiet) {
|
||||
config.quiet = program.quiet;
|
||||
}
|
||||
|
||||
if (program.logFile) {
|
||||
config.log_file = program.logFile;
|
||||
settings['kibana.server.port'] = program.port;
|
||||
}
|
||||
|
||||
if (program.host) {
|
||||
config.host = program.host;
|
||||
settings['kibana.server.host'] = program.host;
|
||||
}
|
||||
|
||||
if (program.quiet) {
|
||||
settings['logging.quiet'] = program.quiet;
|
||||
}
|
||||
|
||||
if (program.logFile) {
|
||||
settings['logging.file'] = program.logFile;
|
||||
}
|
||||
|
||||
var configPath = program.config || process.env.CONFIG_PATH;
|
||||
if (configPath) {
|
||||
settings = _.defaults(settings, loadSettingsFromYAML(configPath));
|
||||
}
|
||||
|
||||
|
||||
// Load and start the server. This must happen after all the config changes
|
||||
// have been made since the server also requires the config.
|
||||
var server = require('../');
|
||||
var logger = require('../lib/logger');
|
||||
server.start(function (err) {
|
||||
// If we get here then things have gone sideways and we need to give up.
|
||||
if (err) {
|
||||
logger.fatal({ err: err });
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (config.kibana.pid_file) {
|
||||
return fs.writeFile(config.kibana.pid_file, process.pid, function (err) {
|
||||
if (err) {
|
||||
logger.fatal({ err: err }, 'Failed to write PID file to %s', config.kibana.pid_file);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Start the Kibana server with the settings fromt he CLI and YAML file
|
||||
var kibana = new Kibana(settings);
|
||||
kibana.listen()
|
||||
.then(writePidFile)
|
||||
.catch(function (err) {
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
var _ = require('lodash');
|
||||
var fs = require('fs');
|
||||
var yaml = require('js-yaml');
|
||||
var path = require('path');
|
||||
var listPlugins = require('../lib/listPlugins');
|
||||
var configPath = process.env.CONFIG_PATH || path.join(__dirname, 'kibana.yml');
|
||||
var kibana = yaml.safeLoad(fs.readFileSync(configPath, 'utf8'));
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
|
||||
function checkPath(path) {
|
||||
try {
|
||||
fs.statSync(path);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Set defaults for config file stuff
|
||||
kibana.port = kibana.port || 5601;
|
||||
kibana.host = kibana.host || '0.0.0.0';
|
||||
kibana.elasticsearch_url = kibana.elasticsearch_url || 'http://localhost:9200';
|
||||
kibana.maxSockets = kibana.maxSockets || Infinity;
|
||||
kibana.log_file = kibana.log_file || null;
|
||||
|
||||
kibana.request_timeout = kibana.startup_timeout == null ? 0 : kibana.request_timeout;
|
||||
kibana.ping_timeout = kibana.ping_timeout == null ? kibana.request_timeout : kibana.ping_timeout;
|
||||
kibana.startup_timeout = kibana.startup_timeout == null ? 5000 : kibana.startup_timeout;
|
||||
|
||||
// Check if the local public folder is present. This means we are running in
|
||||
// the NPM module. If it's not there then we are running in the git root.
|
||||
var public_folder = path.resolve(__dirname, '..', 'public');
|
||||
if (!checkPath(public_folder)) public_folder = path.resolve(__dirname, '..', '..', 'kibana');
|
||||
|
||||
// Check to see if htpasswd file exists in the root directory otherwise set it to false
|
||||
var htpasswdPath = path.resolve(__dirname, '..', '.htpasswd');
|
||||
if (!checkPath(htpasswdPath)) htpasswdPath = path.resolve(__dirname, '..', '..', '..', '.htpasswd');
|
||||
if (!checkPath(htpasswdPath)) htpasswdPath = false;
|
||||
|
||||
var packagePath = path.resolve(__dirname, '..', 'package.json');
|
||||
try {
|
||||
fs.statSync(packagePath);
|
||||
} catch (err) {
|
||||
packagePath = path.resolve(__dirname, '..', '..', '..', 'package.json');
|
||||
}
|
||||
|
||||
var config = module.exports = {
|
||||
port : kibana.port,
|
||||
host : kibana.host,
|
||||
elasticsearch : kibana.elasticsearch_url,
|
||||
root : path.normalize(path.join(__dirname, '..')),
|
||||
quiet : false,
|
||||
public_folder : public_folder,
|
||||
external_plugins_folder : process.env.PLUGINS_FOLDER || null,
|
||||
bundled_plugins_folder : path.resolve(public_folder, 'plugins'),
|
||||
kibana : kibana,
|
||||
package : require(packagePath),
|
||||
htpasswd : htpasswdPath,
|
||||
buildNum : '@@buildNum',
|
||||
maxSockets : kibana.maxSockets,
|
||||
log_file : kibana.log_file,
|
||||
request_timeout : kibana.request_timeout,
|
||||
ping_timeout : kibana.ping_timeout
|
||||
};
|
||||
|
||||
config.plugins = listPlugins(config);
|
|
@ -1,19 +1,19 @@
|
|||
# Kibana is served by a back end server. This controls which port to use.
|
||||
port: 5601
|
||||
# port: 5601
|
||||
|
||||
# The host to bind the server to.
|
||||
host: "0.0.0.0"
|
||||
# host: "0.0.0.0"
|
||||
|
||||
# The Elasticsearch instance to use for all your queries.
|
||||
elasticsearch_url: "http://localhost:9200"
|
||||
# elasticsearch_url: "http://localhost:9200"
|
||||
|
||||
# preserve_elasticsearch_host true will send the hostname specified in `elasticsearch`. If you set it to false,
|
||||
# then the host you use to connect to *this* Kibana instance will be sent.
|
||||
elasticsearch_preserve_host: true
|
||||
# elasticsearch_preserve_host: true
|
||||
|
||||
# Kibana uses an index in Elasticsearch to store saved searches, visualizations
|
||||
# and dashboards. It will create a new index if it doesn't already exist.
|
||||
kibana_index: ".kibana"
|
||||
# kibana_index: ".kibana"
|
||||
|
||||
# If your Elasticsearch is protected with basic auth, this is the user credentials
|
||||
# used by the Kibana server to perform maintence on the kibana_index at statup. Your Kibana
|
||||
|
@ -31,7 +31,7 @@ kibana_index: ".kibana"
|
|||
# ca: /path/to/your/CA.pem
|
||||
|
||||
# The default application to load.
|
||||
default_app_id: "discover"
|
||||
# default_app_id: "discover"
|
||||
|
||||
# Time in milliseconds to wait for elasticsearch to respond to pings, defaults to
|
||||
# request_timeout setting
|
||||
|
@ -39,18 +39,18 @@ default_app_id: "discover"
|
|||
|
||||
# Time in milliseconds to wait for responses from the back end or elasticsearch.
|
||||
# This must be > 0
|
||||
request_timeout: 300000
|
||||
# request_timeout: 300000
|
||||
|
||||
# Time in milliseconds for Elasticsearch to wait for responses from shards.
|
||||
# Set to 0 to disable.
|
||||
shard_timeout: 0
|
||||
# shard_timeout: 0
|
||||
|
||||
# Time in milliseconds to wait for Elasticsearch at Kibana startup before retrying
|
||||
# startup_timeout: 5000
|
||||
|
||||
# Set to false to have a complete disregard for the validity of the SSL
|
||||
# certificate.
|
||||
verify_ssl: true
|
||||
# verify_ssl: true
|
||||
|
||||
# SSL for outgoing requests from the Kibana Server (PEM formatted)
|
||||
# ssl_key_file: /path/to/your/server.key
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
var express = require('express');
|
||||
var instrumentationMiddleware = require('./_instrumentation');
|
||||
var amdRapperMiddleware = require('./_amd_rapper');
|
||||
|
||||
var glob = require('glob');
|
||||
var path = require('path');
|
||||
var join = path.join;
|
||||
var rel = join.bind(null, __dirname);
|
||||
var ROOT = rel('../../../');
|
||||
var SRC = join(ROOT, 'src');
|
||||
var NODE_MODULES = join(ROOT, 'node_modules');
|
||||
var APP = join(SRC, 'kibana');
|
||||
var TEST = join(ROOT, 'test');
|
||||
|
||||
module.exports = function (app) {
|
||||
app.use(instrumentationMiddleware({
|
||||
root: SRC,
|
||||
displayRoot: SRC,
|
||||
filter: function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|rison)\.js$/);
|
||||
}
|
||||
}));
|
||||
|
||||
app.use(instrumentationMiddleware({
|
||||
root: APP,
|
||||
displayRoot: SRC,
|
||||
filter: function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|rison)\.js$/);
|
||||
}
|
||||
}));
|
||||
|
||||
app.use(amdRapperMiddleware({
|
||||
root: ROOT
|
||||
}));
|
||||
|
||||
app.use('/test', express.static(TEST));
|
||||
app.use('/src', express.static(SRC));
|
||||
app.use('/node_modules', express.static(NODE_MODULES));
|
||||
app.use('/specs', function (req, res) {
|
||||
var unit = join(ROOT, '/test/unit/');
|
||||
glob(join(unit, 'specs/**/*.js'), function (er, files) {
|
||||
var moduleIds = files
|
||||
.filter(function (filename) {
|
||||
return path.basename(filename).charAt(0) !== '_';
|
||||
})
|
||||
.map(function (filename) {
|
||||
return path.relative(unit, filename).replace(/\\/g, '/').replace(/\.js$/, '');
|
||||
});
|
||||
|
||||
res.end(JSON.stringify(moduleIds));
|
||||
});
|
||||
});
|
||||
};
|
|
@ -1,103 +1,68 @@
|
|||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var app = require('./app');
|
||||
var fs = require('fs');
|
||||
var config = require('./config');
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
http.globalAgent.maxSockets = config.maxSockets;
|
||||
https.globalAgent.maxSockets = config.maxSockets;
|
||||
var logger = require('./lib/logger');
|
||||
var _ = require('lodash');
|
||||
var Promise = require('bluebird');
|
||||
var initialization = require('./lib/serverInitialization');
|
||||
var key, cert;
|
||||
try {
|
||||
key = fs.readFileSync(config.kibana.ssl_key_file, 'utf8');
|
||||
cert = fs.readFileSync(config.kibana.ssl_cert_file, 'utf8');
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
logger.fatal('Failed to read %s', err.path);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
var Hapi = require('hapi');
|
||||
var requirePlugins = require('./lib/plugins/require_plugins');
|
||||
var extendHapi = require('./lib/extend_hapi');
|
||||
var join = require('path').join;
|
||||
|
||||
function Kibana(settings, plugins) {
|
||||
plugins = plugins || [];
|
||||
this.server = new Hapi.Server();
|
||||
|
||||
/**
|
||||
* Create HTTPS/HTTP server.
|
||||
*/
|
||||
var server;
|
||||
if (key && cert) {
|
||||
server = https.createServer({
|
||||
key: key,
|
||||
cert: cert
|
||||
}, app);
|
||||
} else {
|
||||
server = http.createServer(app);
|
||||
}
|
||||
server.on('error', onError);
|
||||
server.on('listening', onListening);
|
||||
// Extend Hapi with Kibana
|
||||
extendHapi(this.server);
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "error" event.
|
||||
*/
|
||||
var config = this.server.config();
|
||||
if (settings) config.set(settings);
|
||||
|
||||
function onError(error) {
|
||||
if (error.syscall !== 'listen') {
|
||||
throw error;
|
||||
// Load external plugins
|
||||
this.plugins = [];
|
||||
var externalPluginsFolder = config.get('kibana.externalPluginsFolder');
|
||||
if (externalPluginsFolder) {
|
||||
this.plugins = _([externalPluginsFolder])
|
||||
.flatten()
|
||||
.map(requirePlugins)
|
||||
.flatten()
|
||||
.value();
|
||||
}
|
||||
|
||||
// handle specific listen errors with friendly messages
|
||||
switch (error.code) {
|
||||
case 'EACCES':
|
||||
logger.error({ err: error }, 'Port %s requires elevated privileges', app.get('port'));
|
||||
process.exit(1);
|
||||
break;
|
||||
case 'EADDRINUSE':
|
||||
logger.error({ err: error }, 'Port %s is already in use', app.get('port'));
|
||||
process.exit(1);
|
||||
break;
|
||||
default:
|
||||
throw error;
|
||||
}
|
||||
this.plugins = this.plugins.concat(plugins);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "listening" event.
|
||||
*/
|
||||
Kibana.prototype.listen = function () {
|
||||
var config = this.server.config();
|
||||
var self = this;
|
||||
// Create a new connection
|
||||
this.server.connection({
|
||||
host: config.get('kibana.server.host'),
|
||||
port: config.get('kibana.server.port')
|
||||
});
|
||||
|
||||
function onListening() {
|
||||
var address = server.address();
|
||||
logger.info('Listening on %s:%d', address.address, address.port);
|
||||
}
|
||||
|
||||
function start() {
|
||||
var port = config.port || 3000;
|
||||
var host = config.host || '127.0.0.1';
|
||||
var listen = Promise.promisify(server.listen.bind(server));
|
||||
app.set('port', port);
|
||||
return listen(port, host);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
server: server,
|
||||
start: function (cb) {
|
||||
return initialization()
|
||||
.then(start)
|
||||
.then(function () {
|
||||
cb && cb();
|
||||
}, function (err) {
|
||||
logger.error({ err: err });
|
||||
if (cb) {
|
||||
cb(err);
|
||||
} else {
|
||||
process.exit();
|
||||
}
|
||||
return this.server.loadKibanaPlugins(this.plugins)
|
||||
.then(function () {
|
||||
// Start the server
|
||||
return new Promise(function (resolve, reject) {
|
||||
self.server.start(function (err) {
|
||||
if (err) return reject(err);
|
||||
self.server.log('server', 'Server running at ' + self.server.info.uri);
|
||||
resolve(self.server);
|
||||
});
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch(function (err) {
|
||||
self.server.log('fatal', err);
|
||||
console.log(err.stack);
|
||||
return Promise.reject(err);
|
||||
});
|
||||
};
|
||||
|
||||
Kibana.Plugin = require('./lib/plugins/plugin');
|
||||
module.exports = Kibana;
|
||||
|
||||
if (require.main === module) {
|
||||
module.exports.start();
|
||||
var kibana = new Kibana();
|
||||
kibana.listen().catch(function (err) {
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
var bunyan = require('bunyan');
|
||||
var ansicolors = require('ansicolors');
|
||||
var Writable = require('stream').Writable;
|
||||
var util = require('util');
|
||||
|
||||
var levels = {
|
||||
10: 'trace',
|
||||
20: 'debug',
|
||||
30: 'info',
|
||||
40: 'warn',
|
||||
50: 'error',
|
||||
60: 'fatal'
|
||||
};
|
||||
|
||||
var colors = {
|
||||
10: 'blue',
|
||||
20: 'green',
|
||||
30: 'cyan',
|
||||
40: 'yellow',
|
||||
50: 'red',
|
||||
60: 'magenta'
|
||||
};
|
||||
|
||||
var levelColor = function (code) {
|
||||
if (code < 299) {
|
||||
return ansicolors.green(code);
|
||||
}
|
||||
|
||||
if (code < 399) {
|
||||
return ansicolors.yellow(code);
|
||||
}
|
||||
|
||||
if (code < 499) {
|
||||
return ansicolors.magenta(code);
|
||||
}
|
||||
|
||||
return ansicolors.red(code);
|
||||
};
|
||||
|
||||
function StdOutStream(options) {
|
||||
Writable.call(this, options);
|
||||
}
|
||||
|
||||
util.inherits(StdOutStream, Writable);
|
||||
|
||||
StdOutStream.prototype._write = function (entry, encoding, callback) {
|
||||
entry = JSON.parse(entry.toString('utf8'));
|
||||
|
||||
var crayon = ansicolors[colors[entry.level]];
|
||||
var output = crayon(levels[entry.level].toUpperCase());
|
||||
output += ' ';
|
||||
output += ansicolors.brightBlack(entry.time);
|
||||
output += ' ';
|
||||
|
||||
if (entry.req && entry.res) {
|
||||
output += util.format('%s %s ', entry.req.method, entry.req.url);
|
||||
output += levelColor(entry.res.statusCode);
|
||||
output += ansicolors.brightBlack(util.format(' %dms - %d', entry.res.responseTime, entry.res.contentLength));
|
||||
} else if (entry.msg) {
|
||||
output += entry.msg;
|
||||
}
|
||||
|
||||
process.stdout.write(output + '\n');
|
||||
|
||||
if (entry.err) {
|
||||
process.stdout.write(ansicolors.brightRed(entry.err.stack) + '\n');
|
||||
}
|
||||
|
||||
callback();
|
||||
};
|
||||
|
||||
module.exports = StdOutStream;
|
|
@ -1,6 +0,0 @@
|
|||
module.exports = function () {
|
||||
return function (req, res, next) {
|
||||
res.header('X-App-Name', 'kibana');
|
||||
next();
|
||||
};
|
||||
};
|
|
@ -1,10 +0,0 @@
|
|||
var config = require('../config');
|
||||
var httpAuth = require('http-auth');
|
||||
module.exports = function () {
|
||||
var basic;
|
||||
if (config.htpasswd) {
|
||||
basic = httpAuth.basic({ file: config.htpasswd });
|
||||
return httpAuth.connect(basic);
|
||||
}
|
||||
return function (req, res, next) { return next(); };
|
||||
};
|
10
src/server/lib/config/check_path.js
Normal file
10
src/server/lib/config/check_path.js
Normal file
|
@ -0,0 +1,10 @@
|
|||
var fs = require('fs');
|
||||
module.exports = function checkPath(path) {
|
||||
try {
|
||||
fs.statSync(path);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
81
src/server/lib/config/config.js
Normal file
81
src/server/lib/config/config.js
Normal file
|
@ -0,0 +1,81 @@
|
|||
var Promise = require('bluebird');
|
||||
var Joi = require('joi');
|
||||
var _ = require('lodash');
|
||||
var override = require('./override');
|
||||
_.mixin(require('lodash-deep'));
|
||||
|
||||
function Config(schema, config) {
|
||||
config = config || {};
|
||||
this.schema = schema || Joi.object({}).default();
|
||||
this.reset(config);
|
||||
}
|
||||
|
||||
Config.prototype.extendSchema = function (key, schema) {
|
||||
var additionalSchema = {};
|
||||
if (!this.has(key)) {
|
||||
additionalSchema[key] = schema;
|
||||
this.schema = this.schema.keys(additionalSchema);
|
||||
this.reset(this.config);
|
||||
}
|
||||
};
|
||||
|
||||
Config.prototype.reset = function (obj) {
|
||||
var results = Joi.validate(obj, this.schema);
|
||||
if (results.error) {
|
||||
throw results.error;
|
||||
}
|
||||
this.config = results.value;
|
||||
};
|
||||
|
||||
Config.prototype.set = function (key, value) {
|
||||
var config = _.cloneDeep(this.config);
|
||||
if (_.isPlainObject(key)) {
|
||||
config = override(config, key);
|
||||
} else {
|
||||
_.deepSet(config, key, value);
|
||||
}
|
||||
var results = Joi.validate(config, this.schema);
|
||||
if (results.error) {
|
||||
throw results.error;
|
||||
}
|
||||
this.config = results.value;
|
||||
};
|
||||
|
||||
Config.prototype.get = function (key) {
|
||||
if (!key) {
|
||||
return _.cloneDeep(this.config);
|
||||
}
|
||||
|
||||
var value = _.deepGet(this.config, key);
|
||||
if (value === undefined) {
|
||||
if (!this.has(key)) {
|
||||
throw new Error('Unknown config key: ' + key);
|
||||
}
|
||||
}
|
||||
return _.cloneDeep(value);
|
||||
};
|
||||
|
||||
Config.prototype.has = function (key) {
|
||||
function has(key, schema, path) {
|
||||
path = path || [];
|
||||
// Catch the partial paths
|
||||
if (path.join('.') === key) return true;
|
||||
// Only go deep on inner objects with children
|
||||
if (schema._inner.children.length) {
|
||||
for (var i = 0; i < schema._inner.children.length; i++) {
|
||||
var child = schema._inner.children[i];
|
||||
// If the child is an object recurse through it's children and return
|
||||
// true if there's a match
|
||||
if (child.schema._type === 'object') {
|
||||
if (has(key, child.schema, path.concat([child.key]))) return true;
|
||||
// if the child matches, return true
|
||||
} else if (path.concat([child.key]).join('.') === key) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return !!has(key, this.schema);
|
||||
};
|
||||
|
||||
module.exports = Config;
|
19
src/server/lib/config/explode_by.js
Normal file
19
src/server/lib/config/explode_by.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
var _ = require('lodash');
|
||||
module.exports = function (dot, flatObject) {
|
||||
var fullObject = {};
|
||||
_.each(flatObject, function (value, key) {
|
||||
var keys = key.split(dot);
|
||||
(function walk(memo, keys, value) {
|
||||
var _key = keys.shift();
|
||||
if (keys.length === 0) {
|
||||
memo[_key] = value;
|
||||
} else {
|
||||
if (!memo[_key]) memo[_key] = {};
|
||||
walk(memo[_key], keys, value);
|
||||
}
|
||||
})(fullObject, keys, value);
|
||||
});
|
||||
return fullObject;
|
||||
};
|
||||
|
||||
|
18
src/server/lib/config/flatten_with.js
Normal file
18
src/server/lib/config/flatten_with.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
var _ = require('lodash');
|
||||
module.exports = function (dot, nestedObj, flattenArrays) {
|
||||
var key; // original key
|
||||
var stack = []; // track key stack
|
||||
var flatObj = {};
|
||||
(function flattenObj(obj) {
|
||||
_.keys(obj).forEach(function (key) {
|
||||
stack.push(key);
|
||||
if (!flattenArrays && _.isArray(obj[key])) flatObj[stack.join(dot)] = obj[key];
|
||||
else if (_.isObject(obj[key])) flattenObj(obj[key]);
|
||||
else flatObj[stack.join(dot)] = obj[key];
|
||||
stack.pop();
|
||||
});
|
||||
}(nestedObj));
|
||||
return flatObj;
|
||||
};
|
||||
|
||||
|
6
src/server/lib/config/index.js
Normal file
6
src/server/lib/config/index.js
Normal file
|
@ -0,0 +1,6 @@
|
|||
var Config = require('./config');
|
||||
var schema = require('./schema');
|
||||
var config = new Config(schema);
|
||||
module.exports = function () {
|
||||
return config;
|
||||
};
|
11
src/server/lib/config/override.js
Normal file
11
src/server/lib/config/override.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
var _ = require('lodash');
|
||||
var flattenWith = require('./flatten_with');
|
||||
var explodeBy = require('./explode_by');
|
||||
|
||||
module.exports = function (target, source) {
|
||||
var _target = flattenWith('.', target);
|
||||
var _source = flattenWith('.', source);
|
||||
return explodeBy('.', _.defaults(_source, _target));
|
||||
};
|
||||
|
||||
|
72
src/server/lib/config/schema.js
Normal file
72
src/server/lib/config/schema.js
Normal file
|
@ -0,0 +1,72 @@
|
|||
var Joi = require('joi');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var checkPath = require('./check_path');
|
||||
var packagePath = path.resolve(__dirname, '..', '..', 'package.json');
|
||||
|
||||
// Check if the local public folder is present. This means we are running in
|
||||
// the NPM module. If it's not there then we are running in the git root.
|
||||
var publicFolder = path.resolve(__dirname, '..', '..', 'public');
|
||||
if (!checkPath(publicFolder)) publicFolder = path.resolve(__dirname, '..', '..', '..', 'kibana');
|
||||
|
||||
try {
|
||||
fs.statSync(packagePath);
|
||||
} catch (err) {
|
||||
packagePath = path.resolve(__dirname, '..', '..', '..', '..', 'package.json');
|
||||
}
|
||||
|
||||
var bundledPluginsFolder = path.resolve(publicFolder, 'plugins');
|
||||
|
||||
|
||||
module.exports = Joi.object({
|
||||
kibana: Joi.object({
|
||||
server: Joi.object({
|
||||
host: Joi.string().hostname().default('0.0.0.0'),
|
||||
port: Joi.number().default(5601),
|
||||
maxSockets: Joi.any().default(Infinity),
|
||||
pidFile: Joi.string(),
|
||||
root: Joi.string().default(path.normalize(path.join(__dirname, '..'))),
|
||||
ssl: Joi.object({
|
||||
cert: Joi.string(),
|
||||
key: Joi.string()
|
||||
}).default()
|
||||
}).default(),
|
||||
index: Joi.string().default('.kibana'),
|
||||
publicFolder: Joi.string().default(publicFolder),
|
||||
externalPluginsFolder: Joi.alternatives().try(Joi.array().items(Joi.string()), Joi.string()),
|
||||
bundledPluginsFolder: Joi.string().default(bundledPluginsFolder),
|
||||
defaultAppId: Joi.string().default('discover'),
|
||||
package: Joi.any().default(require(packagePath)),
|
||||
buildNum: Joi.string().default('@@buildNum'),
|
||||
bundledPluginIds: Joi.array().items(Joi.string())
|
||||
}).default(),
|
||||
elasticsearch: Joi.object({
|
||||
url: Joi.string().uri({ scheme: ['http', 'https'] }).default('http://localhost:9200'),
|
||||
preserveHost: Joi.boolean().default(true),
|
||||
username: Joi.string(),
|
||||
password: Joi.string(),
|
||||
shardTimeout: Joi.number().default(0),
|
||||
requestTimeout: Joi.number().default(30000),
|
||||
pingTimeout: Joi.number().default(30000),
|
||||
startupTimeout: Joi.number().default(5000),
|
||||
ssl: Joi.object({
|
||||
verify: Joi.boolean().default(true),
|
||||
ca: Joi.string(),
|
||||
cert: Joi.string(),
|
||||
key: Joi.string()
|
||||
}).default(),
|
||||
minimumVerison: Joi.string().default('1.4.4')
|
||||
}).default(),
|
||||
logging: Joi.object({
|
||||
quiet: Joi.boolean().default(false),
|
||||
file: Joi.string(),
|
||||
console: Joi.object({
|
||||
ops: Joi.any(),
|
||||
log: Joi.any().default('*'),
|
||||
response: Joi.any().default('*'),
|
||||
error: Joi.any().default('*'),
|
||||
json: Joi.boolean().default(false),
|
||||
}).default()
|
||||
}).default(),
|
||||
}).default();
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
var _ = require('lodash');
|
||||
var through = require('through');
|
||||
|
||||
var levels = {
|
||||
10: 'trace',
|
||||
20: 'debug',
|
||||
30: 'info',
|
||||
40: 'warn',
|
||||
50: 'error',
|
||||
60: 'fatal'
|
||||
};
|
||||
|
||||
function write(entry) {
|
||||
entry = JSON.parse(entry.toString('utf8'));
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
|
||||
var output = {
|
||||
'@timestamp': entry.time,
|
||||
'level': levels[entry.level],
|
||||
'message': entry.msg,
|
||||
'node_env': env,
|
||||
'request': entry.req,
|
||||
'response': entry.res
|
||||
};
|
||||
|
||||
if (entry.err) {
|
||||
output.error = entry.err;
|
||||
if (!output.message) output.message = output.error.message;
|
||||
}
|
||||
|
||||
this.queue(JSON.stringify(output) + '\n');
|
||||
}
|
||||
|
||||
function end() {
|
||||
this.queue(null);
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
return through(write, end);
|
||||
};
|
|
@ -1,40 +0,0 @@
|
|||
var config = require('../config');
|
||||
var elasticsearch = require('elasticsearch');
|
||||
var logger = require('./logger');
|
||||
var _ = require('lodash');
|
||||
var fs = require('fs');
|
||||
var util = require('util');
|
||||
var url = require('url');
|
||||
var uri = url.parse(config.elasticsearch);
|
||||
if (config.kibana.kibana_elasticsearch_username && config.kibana.kibana_elasticsearch_password) {
|
||||
uri.auth = util.format('%s:%s', config.kibana.kibana_elasticsearch_username, config.kibana.kibana_elasticsearch_password);
|
||||
}
|
||||
|
||||
var ssl = { rejectUnauthorized: config.kibana.verify_ssl };
|
||||
|
||||
if (config.kibana.kibana_elasticsearch_client_crt && config.kibana.kibana_elasticsearch_client_key) {
|
||||
ssl.cert = fs.readFileSync(config.kibana.kibana_elasticsearch_client_crt, 'utf8');
|
||||
ssl.key = fs.readFileSync(config.kibana.kibana_elasticsearch_client_key, 'utf8');
|
||||
}
|
||||
|
||||
if (config.kibana.ca) {
|
||||
ssl.ca = fs.readFileSync(config.kibana.ca, 'utf8');
|
||||
}
|
||||
|
||||
module.exports = new elasticsearch.Client({
|
||||
host: url.format(uri),
|
||||
ssl: ssl,
|
||||
apiVersion: '1.4',
|
||||
pingTimeout: config.ping_timeout,
|
||||
log: function (config) {
|
||||
this.error = function (err) {
|
||||
logger.error({ err: err });
|
||||
};
|
||||
this.warning = _.bindKey(logger, 'warn');
|
||||
this.info = _.noop;
|
||||
this.debug = _.noop;
|
||||
this.trace = _.noop;
|
||||
this.close = _.noop;
|
||||
}
|
||||
});
|
||||
|
4
src/server/lib/extend_hapi.js
Normal file
4
src/server/lib/extend_hapi.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
module.exports = function (server) {
|
||||
server.decorate('server', 'config', require('./config'));
|
||||
server.decorate('server', 'loadKibanaPlugins', require('./plugins/load_kibana_plugins'));
|
||||
};
|
4
src/server/lib/get_status.js
Normal file
4
src/server/lib/get_status.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
var status = require('./status');
|
||||
module.exports = function (name) {
|
||||
return status[name] || { state: 'red' };
|
||||
};
|
|
@ -1,20 +0,0 @@
|
|||
var _ = require('lodash');
|
||||
var glob = require('glob');
|
||||
var path = require('path');
|
||||
|
||||
var plugins = function (dir) {
|
||||
if (!dir) return [];
|
||||
var files = glob.sync(path.join(dir, '*', 'index.js')) || [];
|
||||
return files.map(function (file) {
|
||||
var relative = path.relative(dir, file);
|
||||
return path.join('plugins', relative).replace(/\\/g, '/').replace(/\.js$/, '');
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = function (config) {
|
||||
var bundled_plugin_ids = config.kibana.bundled_plugin_ids || [];
|
||||
var bundled_plugins = plugins(config.bundled_plugins_folder);
|
||||
var external_plugins = plugins(config.external_plugins_folder);
|
||||
return bundled_plugin_ids.concat(bundled_plugins, external_plugins);
|
||||
};
|
||||
|
29
src/server/lib/load_settings_from_yaml.js
Normal file
29
src/server/lib/load_settings_from_yaml.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
var fs = require('fs');
|
||||
var yaml = require('js-yaml');
|
||||
module.exports = function (path) {
|
||||
var config = yaml.safeLoad(fs.readFileSync(path, 'utf8'));
|
||||
var settings = {};
|
||||
if (config.port) settings['kibana.server.port'] = config.port;
|
||||
if (config.host) settings['kibana.server.host'] = config.host;
|
||||
if (config.elasticsearch_url) settings['elasticsearch.url'] = config.elasticsearch_url;
|
||||
if (config.elasticsearch_preserve_host) settings['elasticsearch.preserveHost'] = config.elasticsearch_preserve_host;
|
||||
if (config.config_index) settings['config.index'] = config.config_index;
|
||||
if (config.config_elasticsearch_username) settings['elasticsearch.username'] = config.config_elasticsearch_username;
|
||||
if (config.config_elasticsearch_password) settings['elasticsearch.password'] = config.config_elasticsearch_password;
|
||||
if (config.config_elasticsearch_client_crt) settings['elasticsearch.ssl.cert'] = config.config_elasticsearch_client_crt;
|
||||
if (config.config_elasticsearch_client_key) settings['elasticsearch.ssl.key'] = config.config_elasticsearch_client_key;
|
||||
if (config.ca) settings['elasticsearch.ssl.ca'] = config.ca;
|
||||
if (config.verify_ssl) settings['elasticsearch.ssl.verify'] = config.verify_ssl;
|
||||
if (config.default_app_id) settings['kibana.defaultAppId'] = config.default_app_id;
|
||||
if (config.ping_timeout) settings['elastcsearch.pingTimeout'] = config.ping_timeout;
|
||||
if (config.request_timeout) settings['elastcsearch.requestTimeout'] = config.request_timeout;
|
||||
if (config.shard_timeout) settings['elastcsearch.shardTimeout'] = config.shard_timeout;
|
||||
if (config.startup_timeout) settings['elastcsearch.startupTimeout'] = config.startup_timeout;
|
||||
if (config.ssl_cert_file) settings['kibana.server.ssl.cert'] = config.ssl_cert_file;
|
||||
if (config.ssl_key_file) settings['kibana.server.ssl.key'] = config.ssl_key_file;
|
||||
if (config.pid_file) settings['config.server.pidFile'] = config.pid_file;
|
||||
if (config.log_file) settings['logging.file'] = config.log_file;
|
||||
if (config.bundled_plugin_ids) settings['kibana.bundledPluginIds'] = config.bundled_plugin_ids;
|
||||
return settings;
|
||||
};
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
var _ = require('lodash');
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
var bunyan = require('bunyan');
|
||||
var fs = require('fs');
|
||||
var StdOutStream = require('./StdOutStream');
|
||||
var createJSONStream = require('./createJSONStream');
|
||||
var config = require('../config');
|
||||
var streams = [];
|
||||
|
||||
// Set the default stream based on the enviroment. If we are on development then
|
||||
// then we are going to create a pretty stream. Everytyhing else will get the
|
||||
// JSON stream to stdout.
|
||||
var defaultStream;
|
||||
if (env === 'development') {
|
||||
defaultStream = new StdOutStream();
|
||||
} else {
|
||||
defaultStream = createJSONStream()
|
||||
.pipe(process.stdout);
|
||||
}
|
||||
|
||||
// If we are not being oppressed and we are not sending the output to a log file
|
||||
// push the default stream to the list of streams
|
||||
if (!config.quiet && !config.log_file) {
|
||||
streams.push({ stream: defaultStream });
|
||||
}
|
||||
|
||||
// Send the stream to a file using the json format.
|
||||
if (config.log_file) {
|
||||
var fileStream = fs.createWriteStream(config.log_file);
|
||||
streams.push({ stream: createJSONStream().pipe(fileStream) });
|
||||
}
|
||||
|
||||
var logger = module.exports = bunyan.createLogger({
|
||||
name: 'Kibana',
|
||||
streams: streams,
|
||||
serializers: _.assign(bunyan.stdSerializers, {
|
||||
res: function (res) {
|
||||
if (!res) return res;
|
||||
return {
|
||||
statusCode: res.statusCode,
|
||||
responseTime: res.responseTime,
|
||||
contentLength: res.contentLength
|
||||
};
|
||||
}
|
||||
})
|
||||
});
|
||||
|
128
src/server/lib/logging/good_reporters/_event_to_json.js
Normal file
128
src/server/lib/logging/good_reporters/_event_to_json.js
Normal file
|
@ -0,0 +1,128 @@
|
|||
var moment = require('moment');
|
||||
var _ = require('lodash');
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
var numeral = require('numeral');
|
||||
var ansicolors = require('ansicolors');
|
||||
var stringify = require('json-stringify-safe');
|
||||
var querystring = require('querystring');
|
||||
|
||||
function serializeError(err) {
|
||||
return {
|
||||
message: err.message,
|
||||
name: err.name,
|
||||
stack: err.stack,
|
||||
code: err.code,
|
||||
signal: err.signal
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
var levelColor = function (code) {
|
||||
if (code < 299) {
|
||||
return ansicolors.green(code);
|
||||
}
|
||||
if (code < 399) {
|
||||
return ansicolors.yellow(code);
|
||||
}
|
||||
if (code < 499) {
|
||||
return ansicolors.magenta(code);
|
||||
}
|
||||
return ansicolors.red(code);
|
||||
};
|
||||
|
||||
function lookup(name) {
|
||||
switch (name) {
|
||||
case 'error':
|
||||
return 'error';
|
||||
default:
|
||||
return 'info';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function (name, event) {
|
||||
var data = {
|
||||
'@timestamp': moment.utc(event.timestamp).format(),
|
||||
level: lookup(event),
|
||||
node_env: env,
|
||||
tags: event.tags,
|
||||
pid: event.pid
|
||||
};
|
||||
if (name === 'response') {
|
||||
_.defaults(data, _.pick(event, [
|
||||
'method',
|
||||
'statusCode'
|
||||
]));
|
||||
|
||||
data.req = {
|
||||
url: event.path,
|
||||
method: event.method,
|
||||
headers: event.headers,
|
||||
remoteAddress: event.source.remoteAddress,
|
||||
userAgent: event.source.remoteAddress,
|
||||
referer: event.source.referer
|
||||
};
|
||||
|
||||
var contentLength = 0;
|
||||
if (typeof event.responsePayload === 'object') {
|
||||
contentLength = stringify(event.responsePayload).length;
|
||||
} else {
|
||||
contentLength = event.responsePayload.toString().length;
|
||||
}
|
||||
|
||||
data.res = {
|
||||
statusCode: event.statusCode,
|
||||
responseTime: event.responseTime,
|
||||
contentLength: contentLength
|
||||
};
|
||||
|
||||
var query = querystring.stringify(event.query);
|
||||
if (query) data.req.url += '?' + query;
|
||||
|
||||
|
||||
data.message = data.req.method.toUpperCase() + ' ';
|
||||
data.message += data.req.url;
|
||||
data.message += ' ';
|
||||
data.message += levelColor(data.res.statusCode);
|
||||
data.message += ' ';
|
||||
data.message += ansicolors.brightBlack(data.res.responseTime + 'ms');
|
||||
data.message += ansicolors.brightBlack(' - ' + numeral(contentLength).format('0.0b'));
|
||||
}
|
||||
else if (name === 'ops') {
|
||||
_.defaults(data, _.pick(event, [
|
||||
'pid',
|
||||
'os',
|
||||
'proc',
|
||||
'load'
|
||||
]));
|
||||
data.message = ansicolors.brightBlack('memory: ');
|
||||
data.message += numeral(data.proc.mem.heapUsed).format('0.0b');
|
||||
data.message += ' ';
|
||||
data.message += ansicolors.brightBlack('uptime: ');
|
||||
data.message += numeral(data.proc.uptime).format('00:00:00');
|
||||
data.message += ' ';
|
||||
data.message += ansicolors.brightBlack('load: [');
|
||||
data.message += data.os.load.map(function (val) {
|
||||
return numeral(val).format('0.00');
|
||||
}).join(' ');
|
||||
data.message += ansicolors.brightBlack(']');
|
||||
data.message += ' ';
|
||||
data.message += ansicolors.brightBlack('delay: ');
|
||||
data.message += numeral(data.proc.delay).format('0.000');
|
||||
}
|
||||
else if (name === 'error') {
|
||||
data.level = 'error';
|
||||
data.message = event.error.message;
|
||||
data.error = serializeError(event.error);
|
||||
data.url = event.url;
|
||||
}
|
||||
else {
|
||||
if (event.data instanceof Error) {
|
||||
data.level = _.contains(event.tags, 'fatal') ? 'fatal' : 'error';
|
||||
data.message = event.data.message;
|
||||
data.error = serializeError(event.data);
|
||||
} else {
|
||||
data.message = event.data;
|
||||
}
|
||||
}
|
||||
return data;
|
||||
};
|
58
src/server/lib/logging/good_reporters/console.js
Normal file
58
src/server/lib/logging/good_reporters/console.js
Normal file
|
@ -0,0 +1,58 @@
|
|||
var ansicolors = require('ansicolors');
|
||||
var eventToJson = require('./_event_to_json');
|
||||
var GoodReporter = require('good-reporter');
|
||||
var util = require('util');
|
||||
var moment = require('moment');
|
||||
var stringify = require('json-stringify-safe');
|
||||
var querystring = require('querystring');
|
||||
var numeral = require('numeral');
|
||||
|
||||
var colors = {
|
||||
log: 'blue',
|
||||
req: 'green',
|
||||
res: 'green',
|
||||
ops: 'cyan',
|
||||
err: 'red',
|
||||
info: 'blue',
|
||||
error: 'red',
|
||||
fatal: 'magenta'
|
||||
};
|
||||
|
||||
function stripColors(string) {
|
||||
return string.replace(/\u001b[^m]+m/g, '');
|
||||
}
|
||||
|
||||
var Console = module.exports = function (events, options) {
|
||||
this._json = options.json;
|
||||
GoodReporter.call(this, events);
|
||||
};
|
||||
util.inherits(Console, GoodReporter);
|
||||
|
||||
Console.prototype.stop = function () { };
|
||||
|
||||
Console.prototype._report = function (name, data) {
|
||||
data = eventToJson(name, data);
|
||||
var nameCrayon = ansicolors[colors[name.substr(0, 3)]];
|
||||
var typeCrayon = ansicolors[colors[data.level]];
|
||||
var output;
|
||||
if (this._json) {
|
||||
data.message = stripColors(data.message);
|
||||
output = stringify(data);
|
||||
} else {
|
||||
output = nameCrayon(name.substr(0, 3));
|
||||
output += ': ';
|
||||
output += typeCrayon(data.level.toUpperCase());
|
||||
output += ' ';
|
||||
output += '[ ';
|
||||
output += ansicolors.brightBlack(moment(data.timestamp).format());
|
||||
output += ' ] ';
|
||||
|
||||
if (data.error) {
|
||||
output += ansicolors.red(data.error.stack);
|
||||
} else {
|
||||
output += data.message;
|
||||
}
|
||||
|
||||
}
|
||||
console.log(output);
|
||||
};
|
0
src/server/lib/logging/good_reporters/file.js
Normal file
0
src/server/lib/logging/good_reporters/file.js
Normal file
35
src/server/lib/logging/index.js
Normal file
35
src/server/lib/logging/index.js
Normal file
|
@ -0,0 +1,35 @@
|
|||
var Promise = require('bluebird');
|
||||
var good = require('good');
|
||||
var path = require('path');
|
||||
var join = path.join;
|
||||
var Console = require('./good_reporters/console');
|
||||
|
||||
|
||||
module.exports = function (server) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
var reporters = [];
|
||||
var config = server.config();
|
||||
|
||||
// If we are not quite then add the console logger
|
||||
var filters = {};
|
||||
if (!config.get('logging.quiet')) {
|
||||
if (config.get('logging.console.ops') != null) filters.ops = config.get('logging.console.ops');
|
||||
if (config.get('logging.console.log') != null) filters.log = config.get('logging.console.log');
|
||||
if (config.get('logging.console.response') != null) filters.response = config.get('logging.console.response');
|
||||
if (config.get('logging.console.error') != null) filters.error = config.get('logging.console.error');
|
||||
}
|
||||
reporters.push({ reporter: Console, args: [filters, { json: config.get('logging.console.json') } ] });
|
||||
server.register({
|
||||
register: good,
|
||||
options: {
|
||||
opsInterval: 5000,
|
||||
logRequestHeaders: true,
|
||||
logResponsePayload: true,
|
||||
reporters: reporters
|
||||
}
|
||||
}, function (err) {
|
||||
if (err) return reject(err);
|
||||
resolve(server);
|
||||
});
|
||||
});
|
||||
};
|
|
@ -1,29 +0,0 @@
|
|||
var config = require('../config');
|
||||
var upgrade = require('./upgradeConfig');
|
||||
|
||||
module.exports = function (client) {
|
||||
var options = {
|
||||
index: config.kibana.kibana_index,
|
||||
type: 'config',
|
||||
body: {
|
||||
size: 1000,
|
||||
sort: [ { buildNum: { order: 'desc' } } ],
|
||||
query: {
|
||||
filtered: {
|
||||
filter: {
|
||||
bool: {
|
||||
must_not: [ { query: { match: { _id: '@@version' } } } ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return client.search(options)
|
||||
.then(upgrade(client))
|
||||
.catch(function (err) {
|
||||
if (!/SearchParseException.+mapping.+\[buildNum\]|^IndexMissingException/.test(err.message)) throw err;
|
||||
});
|
||||
};
|
||||
|
18
src/server/lib/plugins/add_statics_for_public.js
Normal file
18
src/server/lib/plugins/add_statics_for_public.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
var Promise = require('bluebird');
|
||||
module.exports = function (plugin) {
|
||||
if (plugin.publicPath) {
|
||||
plugin.server.route({
|
||||
config: {
|
||||
id: plugin.name + ':public'
|
||||
},
|
||||
method: 'GET',
|
||||
path: '/' + plugin.name + '/{paths*}',
|
||||
handler: {
|
||||
directory: {
|
||||
path: plugin.publicPath
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return Promise.resolve(plugin);
|
||||
};
|
18
src/server/lib/plugins/check_dependencies.js
Normal file
18
src/server/lib/plugins/check_dependencies.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
var _ = require('lodash');
|
||||
var checkDependencies = module.exports = function (name, deps, callStack) {
|
||||
if (!deps[name]) throw new Error('Missing dependency: ' + name);
|
||||
callStack = callStack || [];
|
||||
if (_.contains(callStack, name)) {
|
||||
callStack.push(name);
|
||||
throw new Error('Circular dependency: ' + callStack.join(' -> '));
|
||||
}
|
||||
for (var i = 0; i < deps[name].length; i++) {
|
||||
var task = deps[name][i];
|
||||
if (!deps[task]) throw new Error('Missing dependency: ' + task);
|
||||
if (deps[task].length) {
|
||||
checkDependencies(task, deps, callStack.concat(name));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
27
src/server/lib/plugins/list_plugins.js
Normal file
27
src/server/lib/plugins/list_plugins.js
Normal file
|
@ -0,0 +1,27 @@
|
|||
var _ = require('lodash');
|
||||
var glob = require('glob');
|
||||
var path = require('path');
|
||||
|
||||
var plugins = function (dir) {
|
||||
if (!dir) return [];
|
||||
var files = glob.sync(path.join(dir, '*', 'index.js')) || [];
|
||||
return files.map(function (file) {
|
||||
return file.replace(dir, 'plugins').replace(/\.js$/, '');
|
||||
});
|
||||
};
|
||||
|
||||
var cache;
|
||||
|
||||
module.exports = function (server) {
|
||||
var config = server.config();
|
||||
if (!cache) {
|
||||
var bundled_plugin_ids = config.get('kibana.bundledPluginIds') || [];
|
||||
var bundled_plugins = plugins(config.get('kibana.bundledPluginsFolder'));
|
||||
var external_plugins = _(server.plugins).map(function (plugin, name) {
|
||||
return plugin.self && plugin.self.publicPlugins || [];
|
||||
}).flatten().value();
|
||||
cache = bundled_plugin_ids.concat(bundled_plugins, external_plugins);
|
||||
}
|
||||
return cache;
|
||||
};
|
||||
|
21
src/server/lib/plugins/load_kibana_plugins.js
Normal file
21
src/server/lib/plugins/load_kibana_plugins.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
var Promise = require('bluebird');
|
||||
var registerPlugins = require('./register_plugins');
|
||||
var requirePlugins = require('./require_plugins');
|
||||
var logging = require('../logging/');
|
||||
var registerPluginConfigs = require('./register_plugin_configs');
|
||||
|
||||
module.exports = function (externalPlugins) {
|
||||
// require all the internal plugins then concat witht the external
|
||||
// plugins passed in from the start method.
|
||||
var plugins = requirePlugins().concat(externalPlugins);
|
||||
// setup logging then register the plugins
|
||||
return logging(this)
|
||||
// Setup the config schema for the plugins
|
||||
.then(function (server) {
|
||||
return registerPluginConfigs(server, plugins);
|
||||
})
|
||||
// Register the plugins
|
||||
.then(function (server) {
|
||||
return registerPlugins(server, plugins);
|
||||
});
|
||||
};
|
18
src/server/lib/plugins/plugin.js
Normal file
18
src/server/lib/plugins/plugin.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
var _ = require('lodash');
|
||||
var Promise = require('bluebird');
|
||||
|
||||
function Plugin(options) {
|
||||
this.server = null;
|
||||
this.status = null;
|
||||
this.publicPath = null;
|
||||
this.require = [];
|
||||
this.init = function (server, options) {
|
||||
return Promise.reject(new Error('You must override the init function for plugins'));
|
||||
};
|
||||
this.config = function (Joi) {
|
||||
return Joi.object({}).default();
|
||||
};
|
||||
_.assign(this, options);
|
||||
}
|
||||
|
||||
module.exports = Plugin;
|
22
src/server/lib/plugins/register_plugin_configs.js
Normal file
22
src/server/lib/plugins/register_plugin_configs.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
var Promise = require('bluebird');
|
||||
var Joi = require('joi');
|
||||
/**
|
||||
* Execute the #config() call on each of the plugins and attach their schema's
|
||||
* to the main config object under their namespace.
|
||||
* @param {object} server Kibana server
|
||||
* @param {array} plugins Plugins for Kibana
|
||||
* @returns {Promise}
|
||||
*/
|
||||
module.exports = function (server, plugins) {
|
||||
var config = server.config();
|
||||
return Promise.each(plugins, function (plugin) {
|
||||
return Promise.resolve(plugin.config(Joi)).then(function (schema) {
|
||||
var pluginSchema = {};
|
||||
if (schema) {
|
||||
config.extendSchema(plugin.name, schema);
|
||||
}
|
||||
});
|
||||
}).then(function () {
|
||||
return server;
|
||||
});
|
||||
};
|
101
src/server/lib/plugins/register_plugins.js
Normal file
101
src/server/lib/plugins/register_plugins.js
Normal file
|
@ -0,0 +1,101 @@
|
|||
var _ = require('lodash');
|
||||
var Promise = require('bluebird');
|
||||
var checkDependencies = require('./check_dependencies');
|
||||
var status = require('../status');
|
||||
var addStaticsForPublic = require('./add_statics_for_public');
|
||||
|
||||
/**
|
||||
* Check to see if there are any circular dependencies for the task tree
|
||||
* @param {array} plugins an array of plugins
|
||||
* @returns {type} description
|
||||
*/
|
||||
function checkForCircularDependency(plugins) {
|
||||
var deps = {};
|
||||
plugins.forEach(function (task) {
|
||||
deps[task.name] = [];
|
||||
if (task.require) deps[task.name] = task.require;
|
||||
});
|
||||
return _(deps).keys().map(function (task) {
|
||||
return checkDependencies(task, deps);
|
||||
}).every(function (result) {
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = function (server, plugins) {
|
||||
var total = plugins.length;
|
||||
var results = {};
|
||||
var running = {};
|
||||
var finished = false;
|
||||
var todo = plugins.concat();
|
||||
|
||||
/**
|
||||
* Checks to see if all the tasks are completed for an array of dependencies
|
||||
* @param {array} tasks An array of plugin names
|
||||
* @returns {boolean} if all the tasks are done this it will return true
|
||||
*/
|
||||
function allDone(tasks) {
|
||||
var done = _.keys(results);
|
||||
return tasks.every(function (dep) {
|
||||
return _.contains(done, dep);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a plugin with the Kibana server
|
||||
*
|
||||
* This includes setting up the status object and setting the reference to
|
||||
* the plugin's server
|
||||
*
|
||||
* @param {object} plugin The plugin to register
|
||||
* @returns {Promise}
|
||||
*/
|
||||
function registerPlugin(plugin) {
|
||||
var config = server.config();
|
||||
return new Promise(function (resolve, reject) {
|
||||
var register = function (server, options, next) {
|
||||
plugin.server = server;
|
||||
plugin.server.expose('self', plugin);
|
||||
status.createStatus(plugin);
|
||||
Promise.try(plugin.init, [server, options], plugin).nodeify(next);
|
||||
};
|
||||
register.attributes = { name: plugin.name };
|
||||
var options = config.get(plugin.name) || {};
|
||||
server.register({ register: register, options: options }, function (err) {
|
||||
if (err) return reject(err);
|
||||
// Only change the plugin status to green if the intial status has not
|
||||
// been updated from yellow - Initializing
|
||||
if (plugin.status.state === undefined) {
|
||||
plugin.status.green('Ready');
|
||||
}
|
||||
resolve(plugin);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
// Check to see if we have a circular dependency
|
||||
if (checkForCircularDependency(plugins)) {
|
||||
(function runPending() {
|
||||
plugins.forEach(function (plugin) {
|
||||
// The running tasks are the same length as the results then we are
|
||||
// done with all the plugin initalization tasks
|
||||
if (_.keys(results).length === total) return resolve(results);
|
||||
// If the current plugin is done or running the continue to the next one
|
||||
if (results[plugin.name] || running[plugin.name]) return;
|
||||
// If the current plugin doesn't have dependencies or all the dependencies
|
||||
// are fullfilled then try running the plugin.
|
||||
if (!plugin.require || (plugin.require && allDone(plugin.require))) {
|
||||
running[plugin.name] = true;
|
||||
registerPlugin(plugin)
|
||||
.then(addStaticsForPublic)
|
||||
.then(function () {
|
||||
results[plugin.name] = true;
|
||||
runPending();
|
||||
}).catch(reject);
|
||||
}
|
||||
});
|
||||
})();
|
||||
}
|
||||
});
|
||||
};
|
31
src/server/lib/plugins/require_plugins.js
Normal file
31
src/server/lib/plugins/require_plugins.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
var path = require('path');
|
||||
var join = path.join;
|
||||
var glob = require('glob');
|
||||
var Promise = require('bluebird');
|
||||
var checkPath = require('../config/check_path');
|
||||
|
||||
module.exports = function (globPath) {
|
||||
globPath = globPath || join( __dirname, '..', '..', 'plugins', '*', 'index.js');
|
||||
return glob.sync(globPath).map(function (file) {
|
||||
var module = require(file);
|
||||
var regex = new RegExp('([^' + path.sep + ']+)' + path.sep + 'index.js');
|
||||
var matches = file.match(regex);
|
||||
if (!module.name && matches) {
|
||||
module.name = matches[1];
|
||||
}
|
||||
|
||||
// has a public folder?
|
||||
var publicPath = module.publicPath || join(path.dirname(file), 'public');
|
||||
if (checkPath(publicPath)) {
|
||||
module.publicPath = publicPath;
|
||||
if (!module.publicPlugins) {
|
||||
module.publicPlugins = glob.sync(join(publicPath, 'plugins', '*', 'index.js'));
|
||||
module.publicPlugins = module.publicPlugins.map(function (file) {
|
||||
return file.replace(publicPath, module.name).replace(/\.js$/, '');
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return module;
|
||||
});
|
||||
};
|
3
src/server/lib/plugins/validate_plugin.js
Normal file
3
src/server/lib/plugins/validate_plugin.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
module.exports = function (plugin) {
|
||||
return !!plugin.name;
|
||||
};
|
|
@ -1,17 +0,0 @@
|
|||
var logger = require('./logger');
|
||||
var _ = require('lodash');
|
||||
module.exports = function (options) {
|
||||
return function (req, res, next) {
|
||||
var startTime = new Date();
|
||||
var end = res.end;
|
||||
var _req = _.pick(req, ['url', 'headers', 'method']);
|
||||
_req.connection = _.pick(req.connection, ['remoteAddress', 'remotePort']);
|
||||
res.end = function (chunk, encoding) {
|
||||
res.contentLength = parseInt(res.getHeader('content-length') || 0, 10);
|
||||
res.responseTime = (new Date()).getTime() - startTime.getTime();
|
||||
end.call(res, chunk, encoding);
|
||||
logger.info({ req: _req, res: res }, '%s %s %d - %dms', req.method, req.url, res.statusCode, res.responseTime);
|
||||
};
|
||||
next();
|
||||
};
|
||||
};
|
6
src/server/lib/run_setup_tasks.js
Normal file
6
src/server/lib/run_setup_tasks.js
Normal file
|
@ -0,0 +1,6 @@
|
|||
var Promise = require('bluebird');
|
||||
module.exports = function (server, tasks) {
|
||||
return Promise.each(tasks, function (task) {
|
||||
return task(server);
|
||||
});
|
||||
};
|
|
@ -1,9 +0,0 @@
|
|||
var waitForEs = require('./waitForEs');
|
||||
var migrateConfig = require('./migrateConfig');
|
||||
var client = require('./elasticsearch_client');
|
||||
|
||||
module.exports = function () {
|
||||
return waitForEs().then(function () {
|
||||
return migrateConfig(client);
|
||||
});
|
||||
};
|
4
src/server/lib/set_status.js
Normal file
4
src/server/lib/set_status.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
var status = require('./status');
|
||||
module.exports = function (name, color, message ) {
|
||||
status[name] = { state: color, message: message };
|
||||
};
|
61
src/server/lib/start.js
Normal file
61
src/server/lib/start.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
var _ = require('lodash');
|
||||
var Promise = require('bluebird');
|
||||
var Hapi = require('hapi');
|
||||
var requirePlugins = require('./plugins/require_plugins');
|
||||
var validatePlugin = require('./plugins/validate_plugin');
|
||||
var extendHapi = require('./extend_hapi');
|
||||
var join = require('path').join;
|
||||
|
||||
|
||||
module.exports = function (settings, plugins) {
|
||||
// Plugin authors can use this to add plugins durring development
|
||||
plugins = plugins || [];
|
||||
|
||||
if (plugins.length && !plugins.every(validatePlugin)) {
|
||||
return Promise.reject(new Error('Plugins must have a name attribute.'));
|
||||
}
|
||||
|
||||
// Initalize the Hapi server
|
||||
var server = new Hapi.Server();
|
||||
|
||||
// Extend Hapi with Kibana
|
||||
extendHapi(server);
|
||||
|
||||
var config = server.config();
|
||||
if (settings) config.set(settings);
|
||||
|
||||
// Create a new connection
|
||||
server.connection({
|
||||
host: config.get('kibana.server.host'),
|
||||
port: config.get('kibana.server.port')
|
||||
});
|
||||
|
||||
// Load external plugins
|
||||
var externalPlugins = [];
|
||||
var externalPluginsFolder = config.get('kibana.externalPluginsFolder');
|
||||
if (externalPluginsFolder) {
|
||||
externalPlugins = _([externalPluginsFolder])
|
||||
.flatten()
|
||||
.map(requirePlugins)
|
||||
.flatten()
|
||||
.value();
|
||||
}
|
||||
|
||||
// Load the plugins
|
||||
return server.loadKibanaPlugins(externalPlugins.concat(plugins))
|
||||
.then(function () {
|
||||
// Start the server
|
||||
return new Promise(function (resolve, reject) {
|
||||
server.start(function (err) {
|
||||
if (err) return reject(err);
|
||||
server.log('server', 'Server running at ' + server.info.uri);
|
||||
resolve(server);
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch(function (err) {
|
||||
server.log('fatal', err);
|
||||
console.log(err.stack);
|
||||
return Promise.reject(err);
|
||||
});
|
||||
};
|
19
src/server/lib/status/index.js
Normal file
19
src/server/lib/status/index.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
var logStatusChange = require('./log_status_change');
|
||||
var Status = require('./status');
|
||||
|
||||
function SystemStatus() {
|
||||
this.data = {};
|
||||
}
|
||||
|
||||
SystemStatus.prototype.createStatus = function (plugin) {
|
||||
plugin.status = new Status(plugin.name);
|
||||
plugin.server.expose('status', plugin.status);
|
||||
plugin.status.on('change', logStatusChange(plugin));
|
||||
this.data[plugin.name] = plugin.status;
|
||||
};
|
||||
|
||||
SystemStatus.prototype.toJSON = function () {
|
||||
return this.data;
|
||||
};
|
||||
|
||||
module.exports = new SystemStatus();
|
7
src/server/lib/status/log_status_change.js
Normal file
7
src/server/lib/status/log_status_change.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
var util = require('util');
|
||||
module.exports = function (plugin) {
|
||||
return function (current, previous) {
|
||||
var logMsg = util.format('[ %s ] Change status from %s to %s - %s', plugin.name, previous.state, current.state, current.message);
|
||||
plugin.server.log('plugin', logMsg);
|
||||
};
|
||||
};
|
35
src/server/lib/status/status.js
Normal file
35
src/server/lib/status/status.js
Normal file
|
@ -0,0 +1,35 @@
|
|||
var util = require('util');
|
||||
var EventEmitter = require('events').EventEmitter;
|
||||
|
||||
util.inherits(Status, EventEmitter);
|
||||
function Status(name) {
|
||||
this.name = name;
|
||||
this.state = undefined;
|
||||
this.message = 'uninitialized';
|
||||
EventEmitter.call(this);
|
||||
var self = this;
|
||||
}
|
||||
|
||||
function createStatusFn(color) {
|
||||
return function (message) {
|
||||
var previous = {
|
||||
state: this.state,
|
||||
message: this.message
|
||||
};
|
||||
this.state = color;
|
||||
this.message = message;
|
||||
if (previous.state === this.state && previous.message === this.message) return;
|
||||
this.emit(color, message, previous);
|
||||
this.emit('change', this.toJSON(), previous);
|
||||
};
|
||||
}
|
||||
|
||||
Status.prototype.green = createStatusFn('green');
|
||||
Status.prototype.yellow = createStatusFn('yellow');
|
||||
Status.prototype.red = createStatusFn('red');
|
||||
|
||||
Status.prototype.toJSON = function () {
|
||||
return { state: this.state, message: this.message };
|
||||
};
|
||||
|
||||
module.exports = Status;
|
|
@ -1,33 +0,0 @@
|
|||
var Promise = require('bluebird');
|
||||
var isUpgradeable = require('./isUpgradeable');
|
||||
var config = require('../config');
|
||||
var _ = require('lodash');
|
||||
var client = require('./elasticsearch_client');
|
||||
module.exports = function (client) {
|
||||
return function (response) {
|
||||
var newConfig = {};
|
||||
// Check to see if there are any doc. If not then we can assume
|
||||
// nothing needs to be done
|
||||
if (response.hits.hits.length === 0) return Promise.resolve();
|
||||
|
||||
// if we already have a the current version in the index then we need to stop
|
||||
if (_.find(response.hits.hits, { _id: config.package.version })) return Promise.resolve();
|
||||
|
||||
// Look for upgradeable configs. If none of them are upgradeable
|
||||
// then resolve with null.
|
||||
var body = _.find(response.hits.hits, isUpgradeable);
|
||||
if (!body) return Promise.resolve();
|
||||
|
||||
|
||||
// if the build number is still the template string (which it wil be in development)
|
||||
// then we need to set it to the max interger. Otherwise we will set it to the build num
|
||||
body._source.buildNum = (/^@@/.test(config.buildNum)) ? Math.pow(2, 53) - 1 : parseInt(config.buildNum, 10);
|
||||
|
||||
return client.create({
|
||||
index: config.kibana.kibana_index,
|
||||
type: 'config',
|
||||
body: body._source,
|
||||
id: config.package.version
|
||||
});
|
||||
};
|
||||
};
|
|
@ -1,45 +0,0 @@
|
|||
var Promise = require('bluebird');
|
||||
var NoConnections = require('elasticsearch').errors.NoConnections;
|
||||
|
||||
var client = require('./elasticsearch_client');
|
||||
var logger = require('./logger');
|
||||
var config = require('../config');
|
||||
|
||||
function waitForPong() {
|
||||
return client.ping({requestTimeout: config.kibana.startup_timeout})
|
||||
.catch(function (err) {
|
||||
if (!(err instanceof NoConnections)) throw err;
|
||||
|
||||
logger.info('Unable to connect to elasticsearch at %s. Retrying in 2.5 seconds.', config.elasticsearch);
|
||||
return Promise.delay(2500).then(waitForPong);
|
||||
});
|
||||
}
|
||||
|
||||
function waitForShards() {
|
||||
return client.cluster.health({
|
||||
timeout: '5s', // tells es to not sit around and wait forever
|
||||
index: config.kibana.kibana_index
|
||||
})
|
||||
.then(function (resp) {
|
||||
// if "timed_out" === true then elasticsearch could not
|
||||
// find any idices matching our filter within 5 seconds
|
||||
if (resp.timed_out) {
|
||||
logger.info('No existing kibana index found');
|
||||
return;
|
||||
}
|
||||
|
||||
// If status === "red" that means that index(es) were found
|
||||
// but the shards are not ready for queries
|
||||
if (resp.status === 'red') {
|
||||
logger.info('Elasticsearch is still initializing the kibana index... Trying again in 2.5 second.');
|
||||
return Promise.delay(2500).then(waitForShards);
|
||||
}
|
||||
|
||||
// otherwise we are g2g
|
||||
logger.info('Found kibana index');
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
return waitForPong().then(waitForShards);
|
||||
};
|
16
src/server/lib/write_pid_file.js
Normal file
16
src/server/lib/write_pid_file.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
var fs = require('fs');
|
||||
var Promise = require('bluebird');
|
||||
module.exports = function (server) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
var config = server.config();
|
||||
var pidFile = config.get('kibana.server.pidFile');
|
||||
if (!pidFile) return resolve(server);
|
||||
fs.writeFile(pidFile, process.pid, function (err) {
|
||||
if (err) {
|
||||
server.log('error', { err: err });
|
||||
return reject(err);
|
||||
}
|
||||
resolve(server);
|
||||
});
|
||||
});
|
||||
};
|
24
src/server/plugins/config/index.js
Normal file
24
src/server/plugins/config/index.js
Normal file
|
@ -0,0 +1,24 @@
|
|||
var _ = require('lodash');
|
||||
var Promise = require('bluebird');
|
||||
var kibana = require('../../');
|
||||
var listPlugins = require('../../lib/plugins/list_plugins');
|
||||
|
||||
module.exports = new kibana.Plugin({
|
||||
init: function (server, options) {
|
||||
|
||||
server.route({
|
||||
method: 'GET',
|
||||
path: '/config',
|
||||
handler: function (request, reply) {
|
||||
var config = server.config();
|
||||
reply({
|
||||
kibana_index: config.get('kibana.index'),
|
||||
default_app_id: config.get('kibana.defaultAppId'),
|
||||
shard_timeout: config.get('elasticsearch.shardTimeout'),
|
||||
plugins: listPlugins(server)
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
});
|
47
src/server/plugins/elasticsearch/index.js
Normal file
47
src/server/plugins/elasticsearch/index.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
var url = require('url');
|
||||
var http = require('http');
|
||||
var fs = require('fs');
|
||||
var resolve = require('url').resolve;
|
||||
var querystring = require('querystring');
|
||||
var kibana = require('../../');
|
||||
var healthCheck = require('./lib/health_check');
|
||||
var exposeClient = require('./lib/expose_client');
|
||||
var createProxy = require('./lib/create_proxy');
|
||||
|
||||
module.exports = new kibana.Plugin({
|
||||
|
||||
init: function (server, options) {
|
||||
var config = server.config();
|
||||
|
||||
// Expose the client to the server
|
||||
exposeClient(server);
|
||||
|
||||
|
||||
createProxy(server, 'GET', '/elasticsearch/{paths*}');
|
||||
createProxy(server, 'POST', '/elasticsearch/_mget');
|
||||
createProxy(server, 'POST', '/elasticsearch/_msearch');
|
||||
|
||||
function noBulkCheck(request, reply) {
|
||||
if (/\/_bulk/.test(request.path)) {
|
||||
return reply({
|
||||
error: 'You can not send _bulk requests to this interface.'
|
||||
}).code(400).takeover();
|
||||
}
|
||||
return reply.continue();
|
||||
}
|
||||
|
||||
createProxy(
|
||||
server,
|
||||
['PUT', 'POST', 'DELETE'],
|
||||
'/elasticsearch/' + config.get('kibana.index') + '/{paths*}',
|
||||
{
|
||||
prefix: '/' + config.get('kibana.index'),
|
||||
config: { pre: [ noBulkCheck ] }
|
||||
}
|
||||
);
|
||||
|
||||
// Set up the health check service and start it.
|
||||
healthCheck(this, server).start();
|
||||
|
||||
}
|
||||
});
|
38
src/server/plugins/elasticsearch/lib/check_es_version.js
Normal file
38
src/server/plugins/elasticsearch/lib/check_es_version.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
var _ = require('lodash');
|
||||
_.mixin(require('lodash-deep'));
|
||||
var esBool = require('./es_bool');
|
||||
var versionMath = require('./version_math');
|
||||
var SetupError = require('./setup_error');
|
||||
module.exports = function (server) {
|
||||
return function () {
|
||||
var client = server.plugins.elasticsearch.client;
|
||||
var minimumElasticsearchVersion = server.config().get('elasticsearch.minimumVerison');
|
||||
return client.nodes.info()
|
||||
.then(function (info) {
|
||||
var badNodes = _.filter(info.nodes, function (node) {
|
||||
// remove client nodes (Logstash)
|
||||
var isClient = _.deepGet(node, 'attributes.client');
|
||||
if (isClient != null && esBool(isClient) === true) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// remove nodes that are gte the min version
|
||||
var v = node.version.split('-')[0];
|
||||
return !versionMath.gte(minimumElasticsearchVersion, v);
|
||||
});
|
||||
|
||||
if (!badNodes.length) return true;
|
||||
|
||||
var badNodeNames = badNodes.map(function (node) {
|
||||
return 'Elasticsearch v' + node.version + ' @ ' + node.http_address + ' (' + node.ip + ')';
|
||||
});
|
||||
|
||||
var message = 'This version of Kibana requires Elasticsearch ' +
|
||||
minimumElasticsearchVersion + ' or higher on all nodes. ' +
|
||||
'I found the following incompatible nodes in your cluster: ' +
|
||||
badNodeNames.join(',');
|
||||
server.plugins.elasticsearch.status.red(message);
|
||||
throw new SetupError(server, message);
|
||||
});
|
||||
};
|
||||
};
|
30
src/server/plugins/elasticsearch/lib/create_agent.js
Normal file
30
src/server/plugins/elasticsearch/lib/create_agent.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
var url = require('url');
|
||||
var fs = require('fs');
|
||||
var http = require('http');
|
||||
var agentOptions;
|
||||
module.exports = function (server) {
|
||||
var config = server.config();
|
||||
var target = url.parse(config.get('elasticsearch.url'));
|
||||
|
||||
if (!agentOptions) {
|
||||
agentOptions = {
|
||||
rejectUnauthorized: config.get('elasticsearch.ssl.verify')
|
||||
};
|
||||
|
||||
var customCA;
|
||||
if (/^https/.test(target.protocol) && config.get('elasticsearch.ssl.ca')) {
|
||||
customCA = fs.readFileSync(config.get('elasticsearch.ssl.ca'), 'utf8');
|
||||
agentOptions.ca = [customCA];
|
||||
}
|
||||
|
||||
// Add client certificate and key if required by elasticsearch
|
||||
if (/^https/.test(target.protocol) &&
|
||||
config.get('elasticsearch.ssl.cert') &&
|
||||
config.get('elasticsearch.ssl.key')) {
|
||||
agentOptions.crt = fs.readFileSync(config.get('elasticsearch.ssl.cert'), 'utf8');
|
||||
agentOptions.key = fs.readFileSync(config.get('elasticsearch.ssl.key'), 'utf8');
|
||||
}
|
||||
}
|
||||
|
||||
return new http.Agent(agentOptions);
|
||||
};
|
40
src/server/plugins/elasticsearch/lib/create_kibana_index.js
Normal file
40
src/server/plugins/elasticsearch/lib/create_kibana_index.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
var SetupError = require('./setup_error');
|
||||
var format = require('util').format;
|
||||
module.exports = function (server) {
|
||||
var client = server.plugins.elasticsearch.client;
|
||||
var index = server.config().get('kibana.index');
|
||||
|
||||
function handleError(message) {
|
||||
return function (err) {
|
||||
throw new SetupError(server, message, err);
|
||||
};
|
||||
}
|
||||
|
||||
return client.indices.create({
|
||||
index: index,
|
||||
body: {
|
||||
settings: {
|
||||
number_of_shards: 1,
|
||||
number_of_replicas: 1
|
||||
},
|
||||
mappings: {
|
||||
config: {
|
||||
properties: {
|
||||
buildNum: {
|
||||
type: 'string',
|
||||
index: 'not_analyzed'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch(handleError('Unable to create Kibana index "<%= kibana.index %>"'))
|
||||
.then(function () {
|
||||
return client.cluster.health({
|
||||
waitForStatus: 'yellow',
|
||||
index: index
|
||||
})
|
||||
.catch(handleError('Waiting for Kibana index "<%= kibana.index %>" to come online failed.'));
|
||||
});
|
||||
};
|
19
src/server/plugins/elasticsearch/lib/create_proxy.js
Normal file
19
src/server/plugins/elasticsearch/lib/create_proxy.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
var createAgent = require('./create_agent');
|
||||
var mapUri = require('./map_uri');
|
||||
module.exports = function createProxy(server, method, route, opts) {
|
||||
opts = opts || {};
|
||||
var options = {
|
||||
method: method,
|
||||
path: route,
|
||||
handler: {
|
||||
proxy: {
|
||||
mapUri: mapUri(server, opts.prefix),
|
||||
passThrough: true,
|
||||
agent: createAgent(server)
|
||||
}
|
||||
}
|
||||
};
|
||||
if (opts && opts.config) options.config = opts.config;
|
||||
server.route(options);
|
||||
};
|
||||
|
17
src/server/plugins/elasticsearch/lib/es_bool.js
Normal file
17
src/server/plugins/elasticsearch/lib/es_bool.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
var map = {
|
||||
'false': false,
|
||||
'off': false,
|
||||
'no': false,
|
||||
'0': false,
|
||||
'true': true,
|
||||
'on': true,
|
||||
'yes': true,
|
||||
'1': true
|
||||
};
|
||||
module.exports = function (str) {
|
||||
var bool = map[String(str)];
|
||||
if (typeof bool !== 'boolean') {
|
||||
throw new TypeError('"' + str + '" does not map to an esBool');
|
||||
}
|
||||
return bool;
|
||||
};
|
51
src/server/plugins/elasticsearch/lib/expose_client.js
Normal file
51
src/server/plugins/elasticsearch/lib/expose_client.js
Normal file
|
@ -0,0 +1,51 @@
|
|||
var elasticsearch = require('elasticsearch');
|
||||
var _ = require('lodash');
|
||||
var fs = require('fs');
|
||||
var util = require('util');
|
||||
var url = require('url');
|
||||
|
||||
module.exports = function (server) {
|
||||
var config = server.config();
|
||||
var uri = url.parse(config.get('elasticsearch.url'));
|
||||
var username = config.get('elasticsearch.username');
|
||||
var password = config.get('elasticsearch.password');
|
||||
var verify_ssl = config.get('elasticsearch.ssl.verify');
|
||||
var client_crt = config.get('elasticsearch.ssl.cert');
|
||||
var client_key = config.get('elasticsearch.ssl.key');
|
||||
var ca = config.get('elasticsearch.ssl.ca');
|
||||
|
||||
if (username && password) {
|
||||
uri.auth = util.format('%s:%s', username, password);
|
||||
}
|
||||
|
||||
var ssl = { rejectUnauthorized: verify_ssl };
|
||||
if (client_crt && client_key) {
|
||||
ssl.cert = fs.readFileSync(client_crt, 'utf8');
|
||||
ssl.key = fs.readFileSync(client_key, 'utf8');
|
||||
}
|
||||
if (ca) {
|
||||
ssl.ca = fs.readFileSync(ca, 'utf8');
|
||||
}
|
||||
|
||||
var client = new elasticsearch.Client({
|
||||
host: url.format(uri),
|
||||
ssl: ssl,
|
||||
apiVersion: '1.4',
|
||||
log: function (config) {
|
||||
this.error = function (err) {
|
||||
server.log(['error', 'elasticsearch'], err);
|
||||
};
|
||||
this.warning = function (message) {
|
||||
server.log(['warn', 'elasticsearch'], '[ elasticsearch ] ' + message);
|
||||
};
|
||||
this.info = _.noop;
|
||||
this.debug = _.noop;
|
||||
this.trace = _.noop;
|
||||
this.close = _.noop;
|
||||
}
|
||||
});
|
||||
|
||||
server.expose('client', client);
|
||||
return client;
|
||||
|
||||
};
|
90
src/server/plugins/elasticsearch/lib/health_check.js
Normal file
90
src/server/plugins/elasticsearch/lib/health_check.js
Normal file
|
@ -0,0 +1,90 @@
|
|||
var Promise = require('bluebird');
|
||||
var elasticsearch = require('elasticsearch');
|
||||
var exposeClient = require('./expose_client');
|
||||
var migrateConfig = require('./migrate_config');
|
||||
var createKibanaIndex = require('./create_kibana_index');
|
||||
var checkEsVersion = require('./check_es_version');
|
||||
var NoConnections = elasticsearch.errors.NoConnections;
|
||||
var util = require('util');
|
||||
var format = util.format;
|
||||
module.exports = function (plugin, server) {
|
||||
var config = server.config();
|
||||
var client = server.plugins.elasticsearch.client;
|
||||
|
||||
plugin.status.yellow('Waiting for Elasticsearch');
|
||||
|
||||
|
||||
function waitForPong() {
|
||||
return client.ping({ requestTimeout: 1500 }).catch(function (err) {
|
||||
if (!(err instanceof NoConnections)) throw err;
|
||||
|
||||
plugin.status.red(format('Unable to connect to Elasticsearch at %s. Retrying in 2.5 seconds.', config.get('elasticsearch.url')));
|
||||
|
||||
return Promise.delay(2500).then(waitForPong);
|
||||
});
|
||||
}
|
||||
|
||||
function waitForShards() {
|
||||
return client.cluster.health({
|
||||
timeout: '5s', // tells es to not sit around and wait forever
|
||||
index: config.get('kibana.index')
|
||||
})
|
||||
.then(function (resp) {
|
||||
// if "timed_out" === true then elasticsearch could not
|
||||
// find any idices matching our filter within 5 seconds
|
||||
if (resp.timed_out) {
|
||||
plugin.status.yellow('No existing Kibana index found');
|
||||
return createKibanaIndex(server);
|
||||
}
|
||||
|
||||
// If status === "red" that means that index(es) were found
|
||||
// but the shards are not ready for queries
|
||||
if (resp.status === 'red') {
|
||||
plugin.status.red('Elasticsearch is still initializing the kibana index... Trying again in 2.5 second.');
|
||||
return Promise.delay(2500).then(waitForShards);
|
||||
}
|
||||
|
||||
// otherwise we are g2g
|
||||
plugin.status.green('Kibana index ready');
|
||||
});
|
||||
}
|
||||
|
||||
var running = false;
|
||||
|
||||
function runHealthCheck() {
|
||||
if (running) {
|
||||
setTimeout(function () {
|
||||
healthCheck()
|
||||
.then(runHealthCheck)
|
||||
.catch(function (err) {
|
||||
server.log('error', err);
|
||||
runHealthCheck();
|
||||
});
|
||||
}, 2500);
|
||||
}
|
||||
}
|
||||
|
||||
function healthCheck() {
|
||||
return waitForPong()
|
||||
.then(checkEsVersion(server))
|
||||
.then(waitForShards)
|
||||
.then(migrateConfig(server));
|
||||
}
|
||||
|
||||
return {
|
||||
isRunning: function () {
|
||||
return running;
|
||||
},
|
||||
run: function () {
|
||||
return healthCheck();
|
||||
},
|
||||
start: function () {
|
||||
running = true;
|
||||
return healthCheck().then(runHealthCheck, runHealthCheck);
|
||||
},
|
||||
stop: function () {
|
||||
running = false;
|
||||
}
|
||||
};
|
||||
|
||||
};
|
|
@ -1,18 +1,18 @@
|
|||
var config = require('../config');
|
||||
var semver = require('semver');
|
||||
var rcVersionRegex = /(\d+\.\d+\.\d+)\-rc(\d+)/i;
|
||||
|
||||
module.exports = function (doc) {
|
||||
module.exports = function (server, doc) {
|
||||
var config = server.config();
|
||||
if (/beta|snapshot/i.test(doc._id)) return false;
|
||||
if (!doc._id) return false;
|
||||
if (doc._id === config.package.version) return false;
|
||||
if (doc._id === config.get('kibana.package.version')) return false;
|
||||
|
||||
var packageRcRelease = Infinity;
|
||||
var rcRelease = Infinity;
|
||||
var packageVersion = config.package.version;
|
||||
var packageVersion = config.get('kibana.package.version');
|
||||
var version = doc._id;
|
||||
var matches = doc._id.match(rcVersionRegex);
|
||||
var packageMatches = config.package.version.match(rcVersionRegex);
|
||||
var packageMatches = config.get('kibana.package.version').match(rcVersionRegex);
|
||||
|
||||
if (matches) {
|
||||
version = matches[1];
|
20
src/server/plugins/elasticsearch/lib/map_uri.js
Normal file
20
src/server/plugins/elasticsearch/lib/map_uri.js
Normal file
|
@ -0,0 +1,20 @@
|
|||
var querystring = require('querystring');
|
||||
var resolve = require('url').resolve;
|
||||
module.exports = function mapUri(server, prefix) {
|
||||
var config = server.config();
|
||||
return function (request, done) {
|
||||
var paths = request.params.paths;
|
||||
if (!paths) {
|
||||
paths = request.path.replace('/elasticsearch', '');
|
||||
}
|
||||
if (prefix) {
|
||||
paths = prefix + '/' + paths;
|
||||
}
|
||||
var url = config.get('elasticsearch.url');
|
||||
if (!/\/$/.test(url)) url += '/';
|
||||
if (paths) url = resolve(url, paths);
|
||||
var query = querystring.stringify(request.query);
|
||||
if (query) url += '?' + query;
|
||||
done(null, url);
|
||||
};
|
||||
};
|
29
src/server/plugins/elasticsearch/lib/migrate_config.js
Normal file
29
src/server/plugins/elasticsearch/lib/migrate_config.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
var upgrade = require('./upgrade_config');
|
||||
|
||||
module.exports = function (server) {
|
||||
return function () {
|
||||
var config = server.config();
|
||||
var client = server.plugins.elasticsearch.client;
|
||||
var options = {
|
||||
index: config.get('kibana.index'),
|
||||
type: 'config',
|
||||
body: {
|
||||
size: 1000,
|
||||
sort: [ { buildNum: { order: 'desc' } } ],
|
||||
query: {
|
||||
filtered: {
|
||||
filter: {
|
||||
bool: {
|
||||
must_not: [ { query: { match: { _id: '@@version' } } } ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return client.search(options).then(upgrade(server));
|
||||
};
|
||||
};
|
||||
|
||||
|
18
src/server/plugins/elasticsearch/lib/setup_error.js
Normal file
18
src/server/plugins/elasticsearch/lib/setup_error.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
var _ = require('lodash');
|
||||
var util = require('util');
|
||||
|
||||
function SetupError(server, template, err) {
|
||||
var config = server.config().get();
|
||||
// don't override other setup errors
|
||||
if (err && err instanceof SetupError) return err;
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.name = this.constructor.name;
|
||||
this.message = _.template(template, config);
|
||||
if (err) {
|
||||
this.origError = err;
|
||||
if (err.stack) this.stack = err.stack;
|
||||
}
|
||||
}
|
||||
util.inherits(SetupError, Error);
|
||||
module.exports = SetupError;
|
||||
|
42
src/server/plugins/elasticsearch/lib/upgrade_config.js
Normal file
42
src/server/plugins/elasticsearch/lib/upgrade_config.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
var Promise = require('bluebird');
|
||||
var isUpgradeable = require('./is_upgradeable');
|
||||
var _ = require('lodash');
|
||||
var format = require('util').format;
|
||||
module.exports = function (server) {
|
||||
var client = server.plugins.elasticsearch.client;
|
||||
var config = server.config();
|
||||
return function (response) {
|
||||
var newConfig = {};
|
||||
// Check to see if there are any doc. If not then we can assume
|
||||
// nothing needs to be done
|
||||
if (response.hits.hits.length === 0) return Promise.resolve();
|
||||
|
||||
// if we already have a the current version in the index then we need to stop
|
||||
if (_.find(response.hits.hits, { _id: config.get('kibana.package.version') })) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
// Look for upgradeable configs. If none of them are upgradeable
|
||||
// then resolve with null.
|
||||
var body = _.find(response.hits.hits, isUpgradeable.bind(null, server));
|
||||
if (!body) return Promise.resolve();
|
||||
|
||||
|
||||
// if the build number is still the template string (which it wil be in development)
|
||||
// then we need to set it to the max interger. Otherwise we will set it to the build num
|
||||
body._source.buildNum = Math.pow(2, 53) - 1;
|
||||
if (!/^@@/.test(config.get('kibana.buildNum'))) {
|
||||
body._source.buildNum = parseInt(config.get('kibana.buildNum'), 10);
|
||||
}
|
||||
|
||||
var logMsg = format('[ elasticsearch ] Upgrade config from %s to %s', body._id, config.get('kibana.package.version'));
|
||||
server.log('plugin', logMsg);
|
||||
return client.create({
|
||||
index: config.get('kibana.index'),
|
||||
type: 'config',
|
||||
body: body._source,
|
||||
id: config.get('kibana.package.version')
|
||||
});
|
||||
};
|
||||
};
|
||||
|
|
@ -1,22 +1,22 @@
|
|||
var _ = require('lodash');
|
||||
var config = require('../config');
|
||||
var parse = require('url').parse;
|
||||
|
||||
validate.Fail = function () {
|
||||
this.message = 'Kibana only support modifying the "' + config.kibana.kibana_index +
|
||||
validate.Fail = function (index) {
|
||||
this.message = 'Kibana only support modifying the "' + index +
|
||||
'" index. Requests that might modify other indicies are not sent to elasticsearch.';
|
||||
};
|
||||
|
||||
validate.BadIndex = function (index) {
|
||||
validate.Fail.call(this);
|
||||
validate.Fail.call(this, index);
|
||||
this.message = 'Bad index "' + index + '" in request. ' + this.message;
|
||||
};
|
||||
|
||||
function validate(req) {
|
||||
function validate(server, req) {
|
||||
var config = server.config();
|
||||
var method = req.method.toUpperCase();
|
||||
if (method === 'GET' || method === 'HEAD') return true;
|
||||
|
||||
var segments = _.compact(parse(req.url).pathname.split('/'));
|
||||
var segments = _.compact(parse(req.path).pathname.split('/'));
|
||||
var maybeIndex = _.first(segments);
|
||||
var maybeMethod = _.last(segments);
|
||||
|
||||
|
@ -24,9 +24,9 @@ function validate(req) {
|
|||
var rem = (method === 'DELETE');
|
||||
|
||||
// everything below this point assumes a destructive request of some sort
|
||||
if (!add && !rem) throw new validate.Fail();
|
||||
if (!add && !rem) throw new validate.Fail(config.get('kibana.index'));
|
||||
|
||||
var bodyStr = String(req.rawBody);
|
||||
var bodyStr = String(req.payload);
|
||||
var jsonBody = bodyStr && parseJson(bodyStr);
|
||||
var bulkBody = bodyStr && parseBulk(bodyStr);
|
||||
|
||||
|
@ -40,7 +40,7 @@ function validate(req) {
|
|||
var maybeMsearch = ('_msearch' === maybeMethod && add && bulkBody);
|
||||
|
||||
// indication that this request is against kibana
|
||||
var maybeKibanaIndex = (maybeIndex === config.kibana.kibana_index);
|
||||
var maybeKibanaIndex = (maybeIndex === config.get('kibana.index'));
|
||||
|
||||
if (!maybeBulk) validateNonBulkDestructive();
|
||||
else validateBulkBody(bulkBody);
|
||||
|
@ -66,7 +66,7 @@ function validate(req) {
|
|||
var body = new Array(parts.length);
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
var part = parseJson(parts[i]);
|
||||
if (!part) throw new validate.Fail();
|
||||
if (!part) throw new validate.Fail(config.get('kibana.index'));
|
||||
|
||||
body[i] = part;
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ function validate(req) {
|
|||
// allow bulk bodies sent to _msearch
|
||||
if (maybeMsearch) return;
|
||||
|
||||
throw new validate.Fail();
|
||||
throw new validate.Fail(config.get('kibana.index'));
|
||||
}
|
||||
|
||||
function validateBulkBody(body) {
|
||||
|
@ -98,14 +98,15 @@ function validate(req) {
|
|||
var op = _.keys(header).join('');
|
||||
var meta = header[op];
|
||||
|
||||
if (!meta) throw new validate.Fail();
|
||||
if (!meta) throw new validate.Fail(config.get('kibana.index'));
|
||||
|
||||
var index = meta._index || maybeIndex;
|
||||
if (index !== config.kibana.kibana_index) {
|
||||
if (index !== config.get('kibana.index')) {
|
||||
throw new validate.BadIndex(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = validate;
|
||||
module.exports = validate;
|
||||
|
139
src/server/plugins/elasticsearch/lib/version_math.js
Normal file
139
src/server/plugins/elasticsearch/lib/version_math.js
Normal file
|
@ -0,0 +1,139 @@
|
|||
var _ = require('lodash');
|
||||
|
||||
function VersionMathException(message) {
|
||||
this.message = message;
|
||||
this.name = 'VersionMathException';
|
||||
}
|
||||
|
||||
// Determine if a specific version meets the minimum requirement
|
||||
var compare = function (required, installed) {
|
||||
if (_.isUndefined(installed)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!required || !installed) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
var a = installed.split('.');
|
||||
var b = required.split('.');
|
||||
var i;
|
||||
|
||||
// leave suffixes as is ("RC1 or -SNAPSHOT")
|
||||
for (i = 0; i < Math.min(a.length, 3); ++i) {
|
||||
a[i] = Number(a[i]);
|
||||
}
|
||||
for (i = 0; i < Math.min(b.length, 3); ++i) {
|
||||
b[i] = Number(b[i]);
|
||||
}
|
||||
if (a.length === 2) {
|
||||
a[2] = 0;
|
||||
}
|
||||
|
||||
if (a[0] > b[0]) { return true; }
|
||||
if (a[0] < b[0]) { return false; }
|
||||
|
||||
if (a[1] > b[1]) { return true; }
|
||||
if (a[1] < b[1]) { return false; }
|
||||
|
||||
if (a[2] > b[2]) { return true; }
|
||||
if (a[2] < b[2]) { return false; }
|
||||
|
||||
if (a.length > 3) {
|
||||
// rc/beta suffix
|
||||
if (b.length <= 3) {
|
||||
return false;
|
||||
} // no suffix on b -> a<b
|
||||
return a[3] >= b[3];
|
||||
}
|
||||
if (b.length > 3) {
|
||||
// b has a suffix but a not -> a>b
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
// Sort versions from lowest to highest
|
||||
var sortVersions = function (versions) {
|
||||
if (!_.isArray(versions)) versions = [versions];
|
||||
|
||||
return _.uniq(versions).sort(function (a, b) {
|
||||
return compare(a, b) ? -1 : 1;
|
||||
});
|
||||
};
|
||||
|
||||
// Get the max version in this cluster
|
||||
var max = function (versions) {
|
||||
return sortVersions(versions).pop();
|
||||
};
|
||||
|
||||
// Return the lowest version in the cluster
|
||||
var min = function (versions) {
|
||||
return sortVersions(versions).shift();
|
||||
};
|
||||
|
||||
// Check if the lowest version in the cluster is >= to `version`
|
||||
var gte = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return compare(version, min(_versions));
|
||||
};
|
||||
|
||||
// Check if the highest version in the cluster is <= to `version`
|
||||
var lte = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return compare(max(_versions), version);
|
||||
};
|
||||
|
||||
// check if lowest version in cluster = `version`
|
||||
var eq = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return version === min(_versions) ? true : false;
|
||||
};
|
||||
|
||||
// version > lowest version in cluster?
|
||||
var gt = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return version === min(_versions) ? false : gte(version, _versions);
|
||||
};
|
||||
|
||||
// version < highest version in cluster?
|
||||
var lt = function (version, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
return version === max(_versions) ? false : lte(version, _versions);
|
||||
};
|
||||
|
||||
/*
|
||||
Takes a version string with one of the following optional comparison prefixes: >,>=,<.<=
|
||||
and evaluates if the cluster meets the requirement. If the prefix is omitted exact match
|
||||
is assumed
|
||||
*/
|
||||
var is = function (equation, versions) {
|
||||
var _versions = sortVersions(versions);
|
||||
var _v = equation;
|
||||
var _cf;
|
||||
|
||||
if (_v.charAt(0) === '>') {
|
||||
_cf = _v.charAt(1) === '=' ? gte(_v.slice(2), _versions) : gt(_v.slice(1), _versions);
|
||||
} else if (_v.charAt(0) === '<') {
|
||||
_cf = _v.charAt(1) === '=' ? lte(_v.slice(2), _versions) : lt(_v.slice(1), _versions);
|
||||
} else {
|
||||
_cf = eq(_v, _versions);
|
||||
}
|
||||
|
||||
return _cf;
|
||||
};
|
||||
|
||||
|
||||
|
||||
module.exports = {
|
||||
min: min,
|
||||
max: max,
|
||||
is: is,
|
||||
eq: eq,
|
||||
gt: gt,
|
||||
gte: gte,
|
||||
lt: lt,
|
||||
lte: lte
|
||||
};
|
||||
|
16
src/server/plugins/static/index.js
Normal file
16
src/server/plugins/static/index.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
var kibana = require('../../');
|
||||
|
||||
module.exports = new kibana.Plugin({
|
||||
init: function (server, options) {
|
||||
var config = server.config();
|
||||
server.route({
|
||||
method: 'GET',
|
||||
path: '/{param*}',
|
||||
handler: {
|
||||
directory: {
|
||||
path: config.get('kibana.publicFolder')
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
60
src/server/plugins/status/index.js
Normal file
60
src/server/plugins/status/index.js
Normal file
|
@ -0,0 +1,60 @@
|
|||
var join = require('path').join;
|
||||
var kibana = require('../../');
|
||||
var status = require('../../lib/status');
|
||||
var Series = require('./lib/series');
|
||||
|
||||
module.exports = new kibana.Plugin({
|
||||
|
||||
init: function (server, options) {
|
||||
|
||||
var config = server.config();
|
||||
|
||||
var fiveMinuteData = {
|
||||
rss: new Series(60),
|
||||
heapTotal: new Series(60),
|
||||
heapUsed: new Series(60),
|
||||
load: new Series(60),
|
||||
delay: new Series(60),
|
||||
concurrency: new Series(60),
|
||||
responseTimeAvg: new Series(60),
|
||||
responseTimeMax: new Series(60),
|
||||
requests: new Series(60),
|
||||
};
|
||||
|
||||
server.plugins.good.monitor.on('ops', function (event) {
|
||||
var port = String(config.get('kibana.server.port'));
|
||||
fiveMinuteData.rss.push(event.psmem.rss);
|
||||
fiveMinuteData.heapTotal.push(event.psmem.heapTotal);
|
||||
fiveMinuteData.heapUsed.push(event.psmem.heapUsed);
|
||||
fiveMinuteData.load.push(event.osload);
|
||||
fiveMinuteData.delay.push(event.psdelay);
|
||||
fiveMinuteData.concurrency.push(parseInt(event.concurrents[port], 0));
|
||||
if (event.responseTimes[port]) {
|
||||
var responseTimeAvg = event.responseTimes[port].avg;
|
||||
if (isNaN(responseTimeAvg)) responseTimeAvg = 0;
|
||||
fiveMinuteData.responseTimeAvg.push(responseTimeAvg);
|
||||
fiveMinuteData.responseTimeMax.push(event.responseTimes[port].max);
|
||||
} else {
|
||||
fiveMinuteData.responseTimeAvg.push(0);
|
||||
fiveMinuteData.responseTimeMax.push(0);
|
||||
}
|
||||
if (event.requests[port]) {
|
||||
fiveMinuteData.requests.push(event.requests[port].total);
|
||||
} else {
|
||||
fiveMinuteData.requests.push(0);
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
method: 'GET',
|
||||
path: '/status/health',
|
||||
handler: function (request, reply) {
|
||||
return reply({
|
||||
metrics: fiveMinuteData,
|
||||
status: status
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
});
|
15
src/server/plugins/status/lib/series.js
Normal file
15
src/server/plugins/status/lib/series.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
function Series(size) {
|
||||
this.size = size;
|
||||
this.data = [];
|
||||
}
|
||||
|
||||
Series.prototype.push = function (value) {
|
||||
this.data.unshift([Date.now(), value]);
|
||||
if (this.data.length > this.size) this.data.pop();
|
||||
};
|
||||
|
||||
Series.prototype.toJSON = function () {
|
||||
return this.data;
|
||||
};
|
||||
|
||||
module.exports = Series;
|
9
src/server/plugins/status/public/index.html
Normal file
9
src/server/plugins/status/public/index.html
Normal file
|
@ -0,0 +1,9 @@
|
|||
<html>
|
||||
<head>
|
||||
<title>Kibana Status</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Kibana Status Page</h1>
|
||||
<p>Statusy stuff goes here... it's goign to be totally awesome!</p>
|
||||
</body>
|
||||
</html>
|
|
@ -1,17 +0,0 @@
|
|||
var express = require('express');
|
||||
var router = express.Router();
|
||||
var config = require('../config');
|
||||
var _ = require('lodash');
|
||||
|
||||
router.get('/config', function (req, res, next) {
|
||||
var keys = [
|
||||
'kibana_index',
|
||||
'default_app_id',
|
||||
'shard_timeout'
|
||||
];
|
||||
var data = _.pick(config.kibana, keys);
|
||||
data.plugins = config.plugins;
|
||||
res.json(data);
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -1,134 +0,0 @@
|
|||
var config = require('../config');
|
||||
var request = require('request');
|
||||
var buffer = require('buffer');
|
||||
var querystring = require('querystring');
|
||||
var express = require('express');
|
||||
var _ = require('lodash');
|
||||
var fs = require('fs');
|
||||
var url = require('url');
|
||||
var target = url.parse(config.elasticsearch);
|
||||
var join = require('path').join;
|
||||
var logger = require('../lib/logger');
|
||||
var validateRequest = require('../lib/validateRequest');
|
||||
|
||||
|
||||
// If the target is backed by an SSL and a CA is provided via the config
|
||||
// then we need to inject the CA
|
||||
var customCA;
|
||||
if (/^https/.test(target.protocol) && config.kibana.ca) {
|
||||
customCA = fs.readFileSync(config.kibana.ca, 'utf8');
|
||||
}
|
||||
// Add client certificate and key if required by elasticsearch
|
||||
var clientCrt;
|
||||
var clientKey;
|
||||
if (/^https/.test(target.protocol) && config.kibana.kibana_elasticsearch_client_crt && config.kibana.kibana_elasticsearch_client_key) {
|
||||
clientCrt = fs.readFileSync(config.kibana.kibana_elasticsearch_client_crt, 'utf8');
|
||||
clientKey = fs.readFileSync(config.kibana.kibana_elasticsearch_client_key, 'utf8');
|
||||
}
|
||||
|
||||
// Create the router
|
||||
var router = module.exports = express.Router();
|
||||
|
||||
// We need to capture the raw body before moving on
|
||||
router.use(function (req, res, next) {
|
||||
var chunks = [];
|
||||
req.on('data', function (chunk) {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
req.on('end', function () {
|
||||
req.rawBody = Buffer.concat(chunks);
|
||||
next();
|
||||
});
|
||||
});
|
||||
|
||||
router.use(function (req, res, next) {
|
||||
try {
|
||||
validateRequest(req);
|
||||
return next();
|
||||
} catch (err) {
|
||||
logger.error({ req: req }, err.message || 'Bad Request');
|
||||
res.status(403).send(err.message || 'Bad Request');
|
||||
}
|
||||
});
|
||||
|
||||
function getPort(req) {
|
||||
var matches = req.headers.host.match(/:(\d+)/);
|
||||
if (matches) return matches[1];
|
||||
return req.connection.pair ? '443' : '80';
|
||||
}
|
||||
|
||||
// Create the proxy middleware
|
||||
router.use(function (req, res, next) {
|
||||
var uri = _.defaults({}, target);
|
||||
|
||||
// Add a slash to the end of the URL so resolve doesn't remove it.
|
||||
var path = (/\/$/.test(uri.path)) ? uri.path : uri.path + '/';
|
||||
path = url.resolve(path, '.' + req.url);
|
||||
|
||||
if (uri.auth) {
|
||||
var auth = new Buffer(uri.auth);
|
||||
req.headers.authorization = 'Basic ' + auth.toString('base64');
|
||||
}
|
||||
|
||||
var options = {
|
||||
url: config.elasticsearch + path,
|
||||
method: req.method,
|
||||
headers: _.defaults({}, req.headers),
|
||||
strictSSL: config.kibana.verify_ssl,
|
||||
timeout: config.request_timeout
|
||||
};
|
||||
|
||||
options.headers['x-forward-for'] = req.connection.remoteAddress || req.socket.remoteAddress;
|
||||
options.headers['x-forward-port'] = getPort(req);
|
||||
options.headers['x-forward-proto'] = req.connection.pair ? 'https' : 'http';
|
||||
|
||||
// If the server has a custom CA we need to add it to the agent options
|
||||
if (customCA) {
|
||||
options.agentOptions = { ca: [customCA] };
|
||||
}
|
||||
|
||||
// Add client key and certificate for elasticsearch if needed.
|
||||
if (clientCrt && clientKey) {
|
||||
if (!options.agentOptions) {
|
||||
options.agentOptions = {};
|
||||
}
|
||||
options.agentOptions.cert = clientCrt;
|
||||
options.agentOptions.key = clientKey;
|
||||
}
|
||||
|
||||
// Only send the body if it's a PATCH, PUT, or POST
|
||||
if (req.rawBody) {
|
||||
options.headers['content-length'] = req.rawBody.length;
|
||||
options.body = req.rawBody.toString('utf8');
|
||||
} else {
|
||||
options.headers['content-length'] = 0;
|
||||
}
|
||||
|
||||
// To support the elasticsearch_preserve_host feature we need to change the
|
||||
// host header to the target host header. I don't quite understand the value
|
||||
// of this... but it's a feature we had before so I guess we are keeping it.
|
||||
if (config.kibana.elasticsearch_preserve_host) {
|
||||
options.headers.host = target.host;
|
||||
}
|
||||
|
||||
// Create the request and pipe the response
|
||||
var esRequest = request(options);
|
||||
esRequest.on('error', function (err) {
|
||||
logger.error({ err: err });
|
||||
var code = 502;
|
||||
var body = { message: 'Bad Gateway' };
|
||||
|
||||
if (err.code === 'ECONNREFUSED') {
|
||||
body.message = 'Unable to connect to Elasticsearch';
|
||||
}
|
||||
|
||||
if (err.message === 'DEPTH_ZERO_SELF_SIGNED_CERT') {
|
||||
body.message = 'SSL handshake with Elasticsearch failed';
|
||||
}
|
||||
|
||||
body.err = err.message;
|
||||
if (!res.headersSent) res.status(code).json(body);
|
||||
});
|
||||
esRequest.pipe(res);
|
||||
});
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
extends layout
|
||||
|
||||
block content
|
||||
h1= message
|
||||
h2= error.status
|
||||
pre #{error.stack}
|
|
@ -1,7 +0,0 @@
|
|||
doctype html
|
||||
html
|
||||
head
|
||||
title= title
|
||||
link(rel='stylesheet', href='/styles/main.css')
|
||||
body
|
||||
block content
|
|
@ -42,6 +42,12 @@ module.exports = function (grunt) {
|
|||
src: '**',
|
||||
dest: '<%= build %>/kibana/lib'
|
||||
},
|
||||
{
|
||||
expand: true,
|
||||
cwd: '<%= server %>/plugins/',
|
||||
src: '**',
|
||||
dest: '<%= build %>/kibana/plugins'
|
||||
},
|
||||
{
|
||||
expand: true,
|
||||
cwd: '<%= server %>/routes/',
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
var config = require('../utils/server-config');
|
||||
var unitTestUrl = require('util').format('http://localhost:%d/test/unit/', config.kibana.port);
|
||||
var unitTestUrl = require('util').format('http://localhost:%d/test/unit/', config.get('kibana.server.port'));
|
||||
|
||||
module.exports = {
|
||||
options: {
|
||||
|
|
|
@ -1,18 +1,21 @@
|
|||
module.exports = function (grunt) {
|
||||
grunt.registerTask('kibana_server', function (keepalive) {
|
||||
var done = this.async();
|
||||
var config = require('../src/server/config');
|
||||
config.quiet = !grunt.option('debug') && !grunt.option('verbose');
|
||||
var Kibana = require('../');
|
||||
var devStatics = require('./utils/dev_statics');
|
||||
var quiet = !grunt.option('debug') && !grunt.option('verbose');
|
||||
var port = grunt.option('port');
|
||||
var settings = { 'logging.quiet': quiet };
|
||||
if (grunt.option('port')) {
|
||||
config.port = config.kibana.port = grunt.option('port');
|
||||
settings['kibana.server.port'] = grunt.option('port');
|
||||
}
|
||||
var server = require('../src/server');
|
||||
|
||||
server.start(function (err) {
|
||||
if (err) return done(err);
|
||||
grunt.log.ok('Server started on port', config.kibana.port);
|
||||
var kibana = new Kibana(settings, [devStatics]);
|
||||
kibana.listen().then(function (server) {
|
||||
grunt.log.ok('Server started: ' + server.info.uri);
|
||||
if (keepalive !== 'keepalive') done();
|
||||
});
|
||||
}).catch(done);
|
||||
|
||||
});
|
||||
};
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ module.exports = function (grunt) {
|
|||
|
||||
grunt.registerTask('maybe_start_kibana', maybeStartServer({
|
||||
name: 'kibana-server',
|
||||
port: grunt.option('port') || config.kibana.port,
|
||||
port: grunt.option('port') || config.get('kibana.server.port'),
|
||||
tasks: ['kibana_server']
|
||||
}));
|
||||
};
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
var os = require('os');
|
||||
var config = require('./utils/server-config');
|
||||
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('run_build', [
|
||||
|
|
83
tasks/utils/dev_statics/index.js
Normal file
83
tasks/utils/dev_statics/index.js
Normal file
|
@ -0,0 +1,83 @@
|
|||
var root = require('requirefrom')('');
|
||||
var kibana = root('');
|
||||
var path = require('path');
|
||||
var glob = require('glob');
|
||||
var join = path.join;
|
||||
var rel = join.bind(null, __dirname);
|
||||
var ROOT = rel('../../../');
|
||||
var SRC = join(ROOT, 'src');
|
||||
var NODE_MODULES = join(ROOT, 'node_modules');
|
||||
var APP = join(SRC, 'kibana');
|
||||
var TEST = join(ROOT, 'test');
|
||||
var istanbul = require('./lib/istanbul');
|
||||
var amdWrapper = require('./lib/amd_wrapper');
|
||||
var kibanaSrcFilter = require('./lib/kibana_src_filter');
|
||||
|
||||
module.exports = new kibana.Plugin({
|
||||
name: 'dev_statics',
|
||||
|
||||
init: function (server, options) {
|
||||
|
||||
|
||||
server.ext('onPreHandler', istanbul({ root: SRC, displayRoot: SRC, filter: kibanaSrcFilter }));
|
||||
server.ext('onPreHandler', istanbul({ root: APP, displayRoot: SRC, filter: kibanaSrcFilter }));
|
||||
|
||||
server.route({
|
||||
path: '/test/{paths*}',
|
||||
method: 'GET',
|
||||
handler: {
|
||||
directory: {
|
||||
path: TEST
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
path: '/amd-wrap/{paths*}',
|
||||
method: 'GET',
|
||||
handler: amdWrapper({ root: ROOT })
|
||||
});
|
||||
|
||||
server.route({
|
||||
path: '/src/{paths*}',
|
||||
method: 'GET',
|
||||
handler: {
|
||||
directory: {
|
||||
path: SRC
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
path: '/node_modules/{paths*}',
|
||||
method: 'GET',
|
||||
handler: {
|
||||
directory: {
|
||||
path: NODE_MODULES
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
server.route({
|
||||
path: '/specs',
|
||||
method: 'GET',
|
||||
handler: function (request, reply) {
|
||||
var unit = join(ROOT, '/test/unit/');
|
||||
glob(join(unit, 'specs/**/*.js'), function (er, files) {
|
||||
var moduleIds = files
|
||||
.filter(function (filename) {
|
||||
return path.basename(filename).charAt(0) !== '_';
|
||||
})
|
||||
.map(function (filename) {
|
||||
return path.relative(unit, filename).replace(/\\/g, '/').replace(/\.js$/, '');
|
||||
});
|
||||
|
||||
return reply(moduleIds);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
});
|
|
@ -1,5 +1,4 @@
|
|||
module.exports = function amdRapMiddleware(opts) {
|
||||
opts = opts || {};
|
||||
module.exports = function (opts) {
|
||||
|
||||
var root = opts.root || '/';
|
||||
var path = require('path');
|
||||
|
@ -14,25 +13,32 @@ module.exports = function amdRapMiddleware(opts) {
|
|||
'grocery bag...'
|
||||
];
|
||||
|
||||
return function (req, res, next) {
|
||||
return function (request, reply) {
|
||||
// only allow prefixed requests
|
||||
if (req.url.substring(0, pathPrefix.length) !== pathPrefix) return next();
|
||||
if (request.path.substring(0, pathPrefix.length) !== pathPrefix) return reply.continue();
|
||||
|
||||
// strip the prefix and form the filename
|
||||
var filename = path.join(root, req._parsedUrl.pathname.replace('/amd-wrap/', ''));
|
||||
var filename = path.join(root, request.path.replace('/amd-wrap/', ''));
|
||||
|
||||
fs.readFile(filename, 'utf8', function (err, contents) {
|
||||
// file does not exist
|
||||
if (err) return next(err.code === 'ENOENT' ? void 0 : err);
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return reply.continue();
|
||||
}
|
||||
return reply(err);
|
||||
}
|
||||
|
||||
// respond with the wrapped code
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/javascript');
|
||||
res.end([
|
||||
var source = [
|
||||
'define(function (require, exports, module) { console.log("' + random(rap) + '");',
|
||||
contents,
|
||||
'\n});'
|
||||
].join('\n'));
|
||||
].join('\n');
|
||||
|
||||
return reply(source).code(200).type('application/javascript');
|
||||
});
|
||||
|
||||
};
|
||||
};
|
||||
|
||||
};
|
|
@ -25,11 +25,11 @@ module.exports = function instrumentationMiddleware(opts) {
|
|||
// cache filename resolution
|
||||
var fileMap = {};
|
||||
|
||||
function filenameForReq(req) {
|
||||
if (!req._parsedUrl.query || !~req._parsedUrl.query.indexOf('instrument')) return false;
|
||||
function filenameForReq(request) {
|
||||
if (request.query.instrument == null) return false;
|
||||
|
||||
// expected absolute path to the file
|
||||
var filename = path.join(root, req._parsedUrl.pathname);
|
||||
var filename = path.join(root, request.path);
|
||||
|
||||
// shortcut for dev where we could be reloading on every save
|
||||
if (fileMap[filename] !== void 0) return fileMap[filename];
|
||||
|
@ -45,42 +45,48 @@ module.exports = function instrumentationMiddleware(opts) {
|
|||
return ret;
|
||||
}
|
||||
|
||||
return function (req, res, next) {
|
||||
return function (request, reply) {
|
||||
// resolve the request to a readable filename
|
||||
var filename = filenameForReq(req);
|
||||
var filename = filenameForReq(request);
|
||||
// the file either doesn't exist of it was filtered out by opts.filter
|
||||
if (!filename) return next();
|
||||
if (!filename) return reply.continue();
|
||||
|
||||
|
||||
fs.stat(filename, function (err, stat) {
|
||||
if (err && err.code !== 'ENOENT') return next(err);
|
||||
if (err && err.code !== 'ENOENT') return reply(err).takeover();
|
||||
|
||||
if (err || !stat.isFile()) {
|
||||
// file was deleted, clear cache and move on
|
||||
delete fileMap[filename];
|
||||
return next();
|
||||
return reply.continue();
|
||||
}
|
||||
|
||||
var etag = '"' + stat.size + '-' + Number(stat.mtime) + '"';
|
||||
if (req.headers['if-none-match'] === etag) {
|
||||
res.statusCode = 304;
|
||||
res.end();
|
||||
return;
|
||||
if (request.headers['if-none-match'] === etag) {
|
||||
return reply('').code(304).takeover();
|
||||
}
|
||||
|
||||
fs.readFile(filename, 'utf8', function (err, content) {
|
||||
if (err) return next(err);
|
||||
|
||||
res.statusCode = 200;
|
||||
res.setHeader('Content-Type', 'application/javascript');
|
||||
res.setHeader('ETag', etag);
|
||||
res.end(i.instrumentSync(
|
||||
fs.readFile(filename, 'utf8', function (err, content) {
|
||||
if (err) {
|
||||
return reply(err).takeover();
|
||||
}
|
||||
|
||||
var source = i.instrumentSync(
|
||||
content,
|
||||
// make file names easier to read
|
||||
displayRoot ? path.relative(displayRoot, filename) : filename
|
||||
));
|
||||
);
|
||||
|
||||
return reply(source)
|
||||
.code(200)
|
||||
.type('application/javascript')
|
||||
.etag(etag)
|
||||
.takeover();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
};
|
||||
};
|
||||
|
5
tasks/utils/dev_statics/lib/kibana_src_filter.js
Normal file
5
tasks/utils/dev_statics/lib/kibana_src_filter.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
module.exports = function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|rison)\.js$/);
|
||||
};
|
|
@ -1 +1 @@
|
|||
module.exports = require('../../src/server/config');
|
||||
module.exports = require('../../src/server/lib/config')();
|
||||
|
|
0
test/unit/fixtures/require_from.js
Normal file
0
test/unit/fixtures/require_from.js
Normal file
4
test/unit/server/.jshintrc
Normal file
4
test/unit/server/.jshintrc
Normal file
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"extends": "../../../.jshintrc.node",
|
||||
"mocha": true
|
||||
}
|
18
test/unit/server/lib/config/check_path.js
Normal file
18
test/unit/server/lib/config/check_path.js
Normal file
|
@ -0,0 +1,18 @@
|
|||
var root = require('requirefrom')('');
|
||||
var checkPath = root('src/server/lib/config/check_path');
|
||||
var expect = require('expect.js');
|
||||
var path = require('path');
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('checkPath(path)', function () {
|
||||
|
||||
it('should return true for files that exist', function () {
|
||||
expect(checkPath(__dirname)).to.be(true);
|
||||
});
|
||||
|
||||
it('should return true for files that exist', function () {
|
||||
expect(checkPath(path.join(__dirname, 'something_fake'))).to.be(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
215
test/unit/server/lib/config/config.js
Normal file
215
test/unit/server/lib/config/config.js
Normal file
|
@ -0,0 +1,215 @@
|
|||
var root = require('requirefrom')('');
|
||||
var Config = root('src/server/lib/config/config');
|
||||
var expect = require('expect.js');
|
||||
var _ = require('lodash');
|
||||
var Joi = require('joi');
|
||||
|
||||
/**
|
||||
* Plugins should defined a config method that takes a joi object. By default
|
||||
* it should return a way to disallow config
|
||||
*
|
||||
* Config should be newed up with a joi schema (containing defaults via joi)
|
||||
*
|
||||
* var schema = { ... }
|
||||
* new Config(schema);
|
||||
*
|
||||
*/
|
||||
|
||||
var data = {
|
||||
test: {
|
||||
hosts: ['host-01', 'host-02'],
|
||||
client: {
|
||||
type: 'datastore',
|
||||
host: 'store-01',
|
||||
port: 5050
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var schema = Joi.object({
|
||||
test: Joi.object({
|
||||
enable: Joi.boolean().default(true),
|
||||
hosts: Joi.array().items(Joi.string()),
|
||||
client: Joi.object({
|
||||
type: Joi.string().default('datastore'),
|
||||
host: Joi.string(),
|
||||
port: Joi.number()
|
||||
}).default(),
|
||||
undefValue: Joi.string()
|
||||
}).default()
|
||||
}).default();
|
||||
|
||||
describe('lib/config/config', function () {
|
||||
describe('class Config()', function () {
|
||||
|
||||
describe('constructor', function () {
|
||||
|
||||
it('should not allow any config if the schema is not passed', function (done) {
|
||||
var config = new Config();
|
||||
var run = function () {
|
||||
config.set('something.enable', true);
|
||||
};
|
||||
expect(run).to.throwException();
|
||||
done();
|
||||
});
|
||||
|
||||
it('should set defaults', function () {
|
||||
var config = new Config(schema);
|
||||
expect(config.get('test.enable')).to.be(true);
|
||||
expect(config.get('test.client.type')).to.be('datastore');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('#reset(object)', function () {
|
||||
|
||||
var config;
|
||||
beforeEach(function () {
|
||||
config = new Config(schema);
|
||||
});
|
||||
|
||||
it('should reset the config object with new values', function () {
|
||||
config.set(data);
|
||||
var newData = config.get();
|
||||
newData.test.enable = false;
|
||||
config.reset(newData);
|
||||
expect(config.get()).to.eql(newData);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('#has(key)', function () {
|
||||
|
||||
var config;
|
||||
beforeEach(function () {
|
||||
config = new Config(schema);
|
||||
});
|
||||
|
||||
it('should return true for fields that exist in the schema', function () {
|
||||
expect(config.has('test.undefValue')).to.be(true);
|
||||
});
|
||||
|
||||
it('should return true for partial objects that exist in the schema', function () {
|
||||
expect(config.has('test.client')).to.be(true);
|
||||
});
|
||||
|
||||
it('should return false for fields that do not exist in the schema', function () {
|
||||
expect(config.has('test.client.pool')).to.be(false);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('#set(key, value)', function () {
|
||||
var config;
|
||||
|
||||
beforeEach(function () {
|
||||
config = new Config(schema);
|
||||
});
|
||||
|
||||
it('should use a key and value to set a config value', function () {
|
||||
config.set('test.enable', false);
|
||||
expect(config.get('test.enable')).to.be(false);
|
||||
});
|
||||
|
||||
it('should use an object to set config values', function () {
|
||||
var hosts = ['host-01', 'host-02'];
|
||||
config.set({ test: { enable: false, hosts: hosts } });
|
||||
expect(config.get('test.enable')).to.be(false);
|
||||
expect(config.get('test.hosts')).to.eql(hosts);
|
||||
});
|
||||
|
||||
it('should use a flatten object to set config values', function () {
|
||||
var hosts = ['host-01', 'host-02'];
|
||||
config.set({ 'test.enable': false, 'test.hosts': hosts });
|
||||
expect(config.get('test.enable')).to.be(false);
|
||||
expect(config.get('test.hosts')).to.eql(hosts);
|
||||
});
|
||||
|
||||
it('should override values with just the values present', function () {
|
||||
var newData = _.cloneDeep(data);
|
||||
config.set(data);
|
||||
newData.test.enable = false;
|
||||
config.set({ test: { enable: false } });
|
||||
expect(config.get()).to.eql(newData);
|
||||
});
|
||||
|
||||
it('should thow an exception when setting a value with the wrong type', function (done) {
|
||||
var run = function () {
|
||||
config.set('test.enable', 'something');
|
||||
};
|
||||
expect(run).to.throwException(function (err) {
|
||||
expect(err).to.have.property('name', 'ValidationError');
|
||||
expect(err.details[0].message).to.be('"enable" must be a boolean');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
describe('#get(key)', function () {
|
||||
|
||||
var config;
|
||||
|
||||
beforeEach(function () {
|
||||
config = new Config(schema);
|
||||
config.set(data);
|
||||
});
|
||||
|
||||
it('should return the whole config object when called without a key', function () {
|
||||
var newData = _.cloneDeep(data);
|
||||
newData.test.enable = true;
|
||||
expect(config.get()).to.eql(newData);
|
||||
});
|
||||
|
||||
it('should return the value using dot notation', function () {
|
||||
expect(config.get('test.enable')).to.be(true);
|
||||
});
|
||||
|
||||
it('should return the clone of partial object using dot notation', function () {
|
||||
expect(config.get('test.client')).to.not.be(data.test.client);
|
||||
expect(config.get('test.client')).to.eql(data.test.client);
|
||||
});
|
||||
|
||||
it('should throw exception for unknown config values', function () {
|
||||
var run = function () {
|
||||
config.get('test.does.not.exist');
|
||||
};
|
||||
expect(run).to.throwException(/Unknown config key: test.does.not.exist/);
|
||||
});
|
||||
|
||||
it('should not throw exception for undefined known config values', function () {
|
||||
var run = function getUndefValue() {
|
||||
config.get('test.undefValue');
|
||||
};
|
||||
expect(run).to.not.throwException();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('#extendSchema(key, schema)', function () {
|
||||
var config;
|
||||
beforeEach(function () {
|
||||
config = new Config(schema);
|
||||
});
|
||||
|
||||
it('should allow you to extend the schema at the top level', function () {
|
||||
var newSchema = Joi.object({ test: Joi.boolean().default(true) }).default();
|
||||
config.extendSchema('myTest', newSchema);
|
||||
expect(config.get('myTest.test')).to.be(true);
|
||||
});
|
||||
|
||||
it('should NOT allow you to extend the schema if somethign else is there', function () {
|
||||
var newSchema = Joi.object({ test: Joi.boolean().default(true) }).default();
|
||||
config.extendSchema('test', newSchema);
|
||||
var run = function () {
|
||||
config.get('test.test');
|
||||
};
|
||||
expect(run).to.throwException();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
34
test/unit/server/lib/config/explode_by.js
Normal file
34
test/unit/server/lib/config/explode_by.js
Normal file
|
@ -0,0 +1,34 @@
|
|||
var root = require('requirefrom')('');
|
||||
var explodeBy = root('src/server/lib/config/explode_by');
|
||||
var expect = require('expect.js');
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('explode_by(dot, flatObject)', function () {
|
||||
|
||||
it('should explode a flatten object with dots', function () {
|
||||
var flatObject = {
|
||||
'test.enable': true,
|
||||
'test.hosts': ['host-01', 'host-02']
|
||||
};
|
||||
expect(explodeBy('.', flatObject)).to.eql({
|
||||
test: {
|
||||
enable: true,
|
||||
hosts: ['host-01', 'host-02']
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should explode a flatten object with slashes', function () {
|
||||
var flatObject = {
|
||||
'test/enable': true,
|
||||
'test/hosts': ['host-01', 'host-02']
|
||||
};
|
||||
expect(explodeBy('/', flatObject)).to.eql({
|
||||
test: {
|
||||
enable: true,
|
||||
hosts: ['host-01', 'host-02']
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
});
|
29
test/unit/server/lib/config/flatten_with.js
Normal file
29
test/unit/server/lib/config/flatten_with.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
var root = require('requirefrom')('');
|
||||
var flattenWith = root('src/server/lib/config/flatten_with');
|
||||
var expect = require('expect.js');
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('flatten_with(dot, nestedObj)', function () {
|
||||
|
||||
it('should flatten object with dot', function () {
|
||||
var nestedObj = {
|
||||
test: {
|
||||
enable: true,
|
||||
hosts: ['host-01', 'host-02'],
|
||||
client: {
|
||||
type: 'nosql',
|
||||
pool: [{ port: 5051 }, { port: 5052 }]
|
||||
}
|
||||
}
|
||||
};
|
||||
expect(flattenWith('.', nestedObj)).to.eql({
|
||||
'test.enable': true,
|
||||
'test.hosts': ['host-01', 'host-02'],
|
||||
'test.client.type': 'nosql',
|
||||
'test.client.pool': [{ port: 5051 }, { port: 5052 }]
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
15
test/unit/server/lib/config/index.js
Normal file
15
test/unit/server/lib/config/index.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
var root = require('requirefrom')('');
|
||||
var config = root('src/server/lib/config');
|
||||
var Config = root('src/server/lib/config/config');
|
||||
var expect = require('expect.js');
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('server.config()', function (arg) {
|
||||
|
||||
it('should return a Config object', function () {
|
||||
var conf = config();
|
||||
expect(conf).to.be.an(Config);
|
||||
});
|
||||
|
||||
});
|
||||
|
30
test/unit/server/lib/config/override.js
Normal file
30
test/unit/server/lib/config/override.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
var root = require('requirefrom')('');
|
||||
var override = root('src/server/lib/config/override');
|
||||
var expect = require('expect.js');
|
||||
var _ = require('lodash');
|
||||
|
||||
describe('override(target, source)', function () {
|
||||
|
||||
it('should override the values form source to target', function () {
|
||||
var target = {
|
||||
test: {
|
||||
enable: true,
|
||||
host: ['host-01', 'host-02'],
|
||||
client: {
|
||||
type: 'sql'
|
||||
}
|
||||
}
|
||||
};
|
||||
var source = { test: { client: { type: 'nosql' } } };
|
||||
expect(override(target, source)).to.eql({
|
||||
test: {
|
||||
enable: true,
|
||||
host: ['host-01', 'host-02'],
|
||||
client: {
|
||||
type: 'nosql'
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
});
|
28
test/unit/server/lib/config/schema.js
Normal file
28
test/unit/server/lib/config/schema.js
Normal file
|
@ -0,0 +1,28 @@
|
|||
var root = require('requirefrom')('');
|
||||
var schema = root('src/server/lib/config/schema');
|
||||
var expect = require('expect.js');
|
||||
var _ = require('lodash');
|
||||
var Joi = require('joi');
|
||||
var package = root('./package.json');
|
||||
var path = require('path');
|
||||
|
||||
describe('lib/config/schema', function () {
|
||||
|
||||
describe('defaults', function () {
|
||||
|
||||
it('should resolve the package.json', function () {
|
||||
var results = Joi.validate({}, schema);
|
||||
expect(results.value.kibana.package).to.eql(package);
|
||||
});
|
||||
|
||||
it('should resolve the publicFolder', function () {
|
||||
var results = Joi.validate({}, schema);
|
||||
var publicFolder = path.resolve(__dirname, '..', '..', '..', '..', '..', 'src', 'kibana');
|
||||
expect(results.value.kibana.publicFolder).to.eql(publicFolder);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
var root = require('requirefrom')('');
|
||||
var isUpgradeable = root('src/server/lib/isUpgradeable');
|
||||
var expect = require('expect.js');
|
||||
var util = require('util');
|
||||
var package = root('package.json');
|
||||
|
||||
describe('lib/isUpgradeable', function () {
|
||||
|
||||
function upgradeDoc(_id, version, bool) {
|
||||
it(util.format('should return %s for %s <= %s', bool, _id, version), function () {
|
||||
var doc = { _id: _id };
|
||||
package.version = version;
|
||||
expect(isUpgradeable(doc)).to.be(bool);
|
||||
});
|
||||
}
|
||||
|
||||
upgradeDoc('1.0.0-beta1', package.version, false);
|
||||
upgradeDoc(package.version, package.version, false);
|
||||
upgradeDoc('4.0.0-RC1', '4.0.0-RC2', true);
|
||||
upgradeDoc('4.0.0-rc2', '4.0.0-rc1', false);
|
||||
upgradeDoc('4.0.0-rc2', '4.0.0', true);
|
||||
upgradeDoc('4.0.0-rc2', '4.0.2', true);
|
||||
upgradeDoc('4.0.1', '4.1.0-rc', true);
|
||||
upgradeDoc('4.0.0-rc1', '4.0.0', true);
|
||||
upgradeDoc('4.0.0-rc1-snapshot', '4.0.0', false);
|
||||
upgradeDoc('4.1.0-rc1-snapshot', '4.1.0-rc1', false);
|
||||
|
||||
it('should handle missing _id field', function () {
|
||||
var doc = {
|
||||
'_index': '.kibana',
|
||||
'_type': 'config',
|
||||
'_score': 1,
|
||||
'_source': {
|
||||
'buildNum': 1.7976931348623157e+308,
|
||||
'defaultIndex': '[logstash-]YYYY.MM.DD'
|
||||
}
|
||||
};
|
||||
expect(isUpgradeable(doc)).to.be(false);
|
||||
});
|
||||
|
||||
it('should handle _id of @@version', function () {
|
||||
var doc = {
|
||||
'_index': '.kibana',
|
||||
'_type': 'config',
|
||||
'_id': '@@version',
|
||||
'_score': 1,
|
||||
'_source': {
|
||||
'buildNum': 1.7976931348623157e+308,
|
||||
'defaultIndex': '[logstash-]YYYY.MM.DD'
|
||||
}
|
||||
};
|
||||
expect(isUpgradeable(doc)).to.be(false);
|
||||
});
|
||||
|
||||
});
|
76
test/unit/server/lib/plugins/check_dependencies.js
Normal file
76
test/unit/server/lib/plugins/check_dependencies.js
Normal file
|
@ -0,0 +1,76 @@
|
|||
var checkDependencies = require('../../../../../src/server/lib/plugins/check_dependencies');
|
||||
var expect = require('expect.js');
|
||||
|
||||
describe('src/server/lib/check_dependencies', function () {
|
||||
|
||||
it('should return true for first -> second -> third', function () {
|
||||
var deps = {
|
||||
first: [],
|
||||
second: ['first'],
|
||||
third: ['second']
|
||||
};
|
||||
|
||||
var results = checkDependencies('first', deps);
|
||||
expect(results).to.be(true);
|
||||
});
|
||||
|
||||
it('should throw an error for first -> third -> second -> first', function () {
|
||||
var deps = {
|
||||
first: ['third'],
|
||||
second: ['first'],
|
||||
third: ['second']
|
||||
};
|
||||
|
||||
var run = function () {
|
||||
checkDependencies('first', deps);
|
||||
};
|
||||
expect(run).to.throwException(function (e) {
|
||||
expect(e.message).to.be('Circular dependency: first -> third -> second -> first');
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for first -> missing', function () {
|
||||
var deps = {
|
||||
first: ['missing']
|
||||
};
|
||||
|
||||
var run = function () {
|
||||
checkDependencies('first', deps);
|
||||
};
|
||||
expect(run).to.throwException(function (e) {
|
||||
expect(e.message).to.be('Missing dependency: missing');
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for missing dependency', function () {
|
||||
var deps = {
|
||||
first: ['missing']
|
||||
};
|
||||
|
||||
var run = function () {
|
||||
checkDependencies('missing', deps);
|
||||
};
|
||||
expect(run).to.throwException(function (e) {
|
||||
expect(e.message).to.be('Missing dependency: missing');
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error on complex circulars', function () {
|
||||
var deps = {
|
||||
first: ['second', 'fifth'],
|
||||
second: ['fourth'],
|
||||
third: [],
|
||||
fourth: ['third'],
|
||||
fifth: ['sixth'],
|
||||
sixth: ['first']
|
||||
};
|
||||
|
||||
var run = function () {
|
||||
checkDependencies('first', deps);
|
||||
};
|
||||
expect(run).to.throwException(function (e) {
|
||||
expect(e.message).to.be('Circular dependency: first -> fifth -> sixth -> first');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
28
test/unit/server/lib/plugins/plugin.js
Normal file
28
test/unit/server/lib/plugins/plugin.js
Normal file
|
@ -0,0 +1,28 @@
|
|||
var expect = require('expect.js');
|
||||
var Plugin = require('../../../../../src/server/lib/plugins/plugin');
|
||||
|
||||
describe('lib/plugins/plugin', function () {
|
||||
|
||||
it('should assign attributes passed into the created to the object', function () {
|
||||
var plugin = new Plugin({ name: 'test', require: ['config'] });
|
||||
expect(plugin).to.have.property('name', 'test');
|
||||
expect(plugin).to.have.property('require');
|
||||
expect(plugin.require).to.eql(['config']);
|
||||
});
|
||||
|
||||
it('should by default assign an empty array to the require attribute', function () {
|
||||
var plugin = new Plugin();
|
||||
expect(plugin).to.have.property('require');
|
||||
expect(plugin.require).to.eql([]);
|
||||
});
|
||||
|
||||
it('should by default assign a function to init attribute that rejects a promise', function (done) {
|
||||
var plugin = new Plugin();
|
||||
expect(plugin).to.have.property('init');
|
||||
plugin.init().catch(function (err) {
|
||||
expect(err.message).to.be('You must override the init function for plugins');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
});
|
175
test/unit/server/lib/plugins/register_plugins.js
Normal file
175
test/unit/server/lib/plugins/register_plugins.js
Normal file
|
@ -0,0 +1,175 @@
|
|||
var _ = require('lodash');
|
||||
var expect = require('expect.js');
|
||||
var sinon = require('sinon');
|
||||
var registerPlugins = require('../../../../../src/server/lib/plugins/register_plugins');
|
||||
var Status = require('../../../../../src/server/lib/status/status');
|
||||
var systemStatus = require('../../../../../src/server/lib/status');
|
||||
var Promise = require('bluebird');
|
||||
|
||||
function createInit() {
|
||||
return sinon.stub().returns(Promise.resolve());
|
||||
}
|
||||
|
||||
describe('server/lib/register_plugins', function () {
|
||||
var server, get;
|
||||
|
||||
beforeEach(function () {
|
||||
get = sinon.stub();
|
||||
server = {
|
||||
register: sinon.stub(),
|
||||
config: sinon.stub().returns({ get: get }),
|
||||
expose: sinon.stub(),
|
||||
log: sinon.stub()
|
||||
};
|
||||
});
|
||||
|
||||
describe('registerPlugins() wrapper', function () {
|
||||
|
||||
var options = { foo: 'bar' };
|
||||
|
||||
it('should pass server, options and next to the init function', function () {
|
||||
var next = function (err) {
|
||||
server.register.args[0][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, options, next);
|
||||
var plugin = { name: 'first', init: createInit() };
|
||||
var plugins = [plugin];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
expect(plugin.init.args[0][0]).to.equal(server);
|
||||
expect(plugin.init.args[0][1]).to.equal(options);
|
||||
});
|
||||
});
|
||||
|
||||
it('should call next() when plugin.init completes', function () {
|
||||
var called = false;
|
||||
var next = function (err) {
|
||||
called = true;
|
||||
server.register.args[0][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, options, next);
|
||||
var plugin = { name: 'first', init: createInit() };
|
||||
var plugins = [plugin];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
expect(called).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('should attach the server to the plugin', function () {
|
||||
var next = function (err) {
|
||||
server.register.args[0][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, options, next);
|
||||
var plugin = { name: 'first', init: createInit() };
|
||||
var plugins = [plugin];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
expect(plugin).to.have.property('server');
|
||||
expect(plugin.server).to.eql(server);
|
||||
});
|
||||
});
|
||||
|
||||
var greenSpy, yellowSpy, createStatus;
|
||||
beforeEach(function () {
|
||||
greenSpy = sinon.spy(Status.prototype, 'green');
|
||||
yellowSpy = sinon.spy(Status.prototype, 'yellow');
|
||||
createStatus = sinon.spy(systemStatus, 'createStatus');
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
Status.prototype.green.restore();
|
||||
Status.prototype.yellow.restore();
|
||||
systemStatus.createStatus.restore();
|
||||
});
|
||||
|
||||
it('should call status.createStatus() with plugin', function () {
|
||||
var next = function (err) {
|
||||
server.register.args[0][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, options, next);
|
||||
var plugin = { name: 'first', init: createInit() };
|
||||
var plugins = [plugin];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
sinon.assert.calledOnce(createStatus);
|
||||
expect(plugin).to.have.property('status');
|
||||
expect(createStatus.args[0][0]).to.eql(plugin);
|
||||
});
|
||||
});
|
||||
it('should not set the status before init is called', function () {
|
||||
var next = function (err) {
|
||||
server.register.args[0][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, options, next);
|
||||
var plugin = { name: 'first', init: createInit() };
|
||||
var plugins = [plugin];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
expect(yellowSpy).to.have.property('callCount', 0);
|
||||
});
|
||||
});
|
||||
|
||||
it('should set the status to green and "Ready" after init', function () {
|
||||
var next = function (err) {
|
||||
server.register.args[0][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, options, next);
|
||||
var plugin = { name: 'first', init: createInit() };
|
||||
var plugins = [plugin];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
sinon.assert.calledOnce(greenSpy);
|
||||
expect(greenSpy.calledAfter(plugin.init)).to.be(true);
|
||||
expect(greenSpy.args[0][0]).to.be('Ready');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('dependencies', function () {
|
||||
var nextStub;
|
||||
|
||||
beforeEach(function () {
|
||||
var count = 0;
|
||||
var next = function (err) {
|
||||
server.register.args[count++][1](err);
|
||||
};
|
||||
server.register.yieldsTo('register', server, {}, next);
|
||||
});
|
||||
|
||||
it('should run second after first and third and third after first', function () {
|
||||
var first = { name: 'first', init: createInit() };
|
||||
var second = { name: 'second', require: ['first', 'third'], init: createInit() };
|
||||
var third = { name: 'third', require: ['first'], init: createInit() };
|
||||
var plugins = [second, first, third];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
expect(second.init.calledAfter(first.init)).to.be(true);
|
||||
expect(second.init.calledAfter(third.init)).to.be(true);
|
||||
expect(third.init.calledAfter(first.init)).to.be(true);
|
||||
sinon.assert.calledThrice(server.register);
|
||||
});
|
||||
});
|
||||
|
||||
it('should run first, second, third', function () {
|
||||
var first = { name: 'first', init: createInit() };
|
||||
var second = { name: 'second', require: ['first'], init: createInit() };
|
||||
var third = { name: 'third', require: ['second'], init: createInit() };
|
||||
var plugins = [second, first, third];
|
||||
return registerPlugins(server, plugins).then(function () {
|
||||
sinon.assert.calledOnce(first.init);
|
||||
expect(second.init.calledAfter(first.init)).to.be(true);
|
||||
expect(third.init.calledAfter(second.init)).to.be(true);
|
||||
sinon.assert.calledThrice(server.register);
|
||||
});
|
||||
});
|
||||
|
||||
it('should detect circular dependencies', function (done) {
|
||||
var first = { name: 'first', require: ['third'], init: sinon.stub() };
|
||||
var second = { name: 'second', require: ['first'], init: sinon.stub() };
|
||||
var third = { name: 'third', require: ['second'], init: sinon.stub() };
|
||||
var plugins = [second, first, third];
|
||||
registerPlugins(server, plugins).catch(function (err) {
|
||||
expect(err).to.be.a(Error);
|
||||
expect(err.message).to.be('Circular dependency: second -> first -> third -> second');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
}); // end dependencies tests
|
||||
|
||||
});
|
38
test/unit/server/lib/status/index.js
Normal file
38
test/unit/server/lib/status/index.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
var expect = require('expect.js');
|
||||
var sinon = require('sinon');
|
||||
var status = require('../../../../../src/server/lib/status');
|
||||
var Status = require('../../../../../src/server/lib/status/status');
|
||||
|
||||
describe('lib/status/index.js', function () {
|
||||
|
||||
var plugin, yellowSpy;
|
||||
beforeEach(function () {
|
||||
plugin = {
|
||||
name: 'test',
|
||||
server: { expose: sinon.stub(), log: sinon.stub() }
|
||||
};
|
||||
yellowSpy = sinon.spy(Status.prototype, 'yellow');
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
Status.prototype.yellow.restore();
|
||||
});
|
||||
|
||||
it('should create a new status for a plugin', function () {
|
||||
status.createStatus(plugin);
|
||||
expect(status.data).to.have.property('test');
|
||||
expect(status.data.test).to.eql(plugin.status);
|
||||
});
|
||||
|
||||
it('should attach a logger to the change status', function () {
|
||||
status.createStatus(plugin);
|
||||
plugin.status.green('Ready!');
|
||||
sinon.assert.calledOnce(plugin.server.log);
|
||||
});
|
||||
|
||||
it('should serialize the statuses when toJSON is called', function () {
|
||||
status.createStatus(plugin);
|
||||
expect(JSON.stringify(status)).to.eql(JSON.stringify(status.data));
|
||||
});
|
||||
|
||||
});
|
29
test/unit/server/lib/status/log_status_change.js
Normal file
29
test/unit/server/lib/status/log_status_change.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
var expect = require('expect.js');
|
||||
var sinon = require('sinon');
|
||||
var logStatusChange = require('../../../../../src/server/lib/status/log_status_change');
|
||||
|
||||
describe('lib/status/log_status_change', function () {
|
||||
|
||||
var plugin;
|
||||
var current = { state: 'yellow', message: 'Initialize' };
|
||||
var previous = { state: 'red', message: '' };
|
||||
|
||||
beforeEach(function () {
|
||||
plugin = { name: 'test', server: { log: sinon.stub() } };
|
||||
});
|
||||
|
||||
it('should call plugin.server.log', function () {
|
||||
var fn = logStatusChange(plugin);
|
||||
fn(current, previous);
|
||||
sinon.assert.calledOnce(plugin.server.log);
|
||||
});
|
||||
|
||||
it('should call plugin.server.log with plugin and error message', function () {
|
||||
var fn = logStatusChange(plugin);
|
||||
fn(current, previous);
|
||||
sinon.assert.calledOnce(plugin.server.log);
|
||||
expect(plugin.server.log.args[0][0]).to.be('plugin');
|
||||
expect(plugin.server.log.args[0][1]).to.be('[ test ] Change status from red to yellow - Initialize');
|
||||
});
|
||||
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue