mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Merge pull request #2574 from simianhacker/feature/node-server
Node.js Version of the Server
This commit is contained in:
commit
6eacb05fdf
66 changed files with 879 additions and 1311 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,5 +1,6 @@
|
|||
.aws-config.json
|
||||
.DS_Store
|
||||
.node_binaries
|
||||
node_modules
|
||||
bower_components
|
||||
**/*.css
|
||||
|
@ -9,4 +10,5 @@ target
|
|||
.jruby
|
||||
.idea
|
||||
*.iml
|
||||
esvm
|
||||
*.log
|
||||
esvm
|
||||
|
|
|
@ -36,4 +36,4 @@
|
|||
"maxerr": 10,
|
||||
"scripturl": true,
|
||||
"evil": true
|
||||
}
|
||||
}
|
||||
|
|
9
Gemfile
9
Gemfile
|
@ -1,9 +0,0 @@
|
|||
source "https://rubygems.org"
|
||||
|
||||
gem 'sinatra', :require => 'sinatra/base'
|
||||
gem 'sinatra-contrib'
|
||||
gem 'puma'
|
||||
gem 'warbler'
|
||||
gem 'elasticsearch'
|
||||
gem 'rack-reverse-proxy', :require => 'rack/reverse_proxy'
|
||||
gem 'colorize'
|
62
Gemfile.lock
62
Gemfile.lock
|
@ -1,62 +0,0 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
backports (3.6.0)
|
||||
colorize (0.7.3)
|
||||
elasticsearch (1.0.4)
|
||||
elasticsearch-api (= 1.0.4)
|
||||
elasticsearch-transport (= 1.0.4)
|
||||
elasticsearch-api (1.0.4)
|
||||
multi_json
|
||||
elasticsearch-transport (1.0.4)
|
||||
faraday
|
||||
multi_json
|
||||
faraday (0.9.0)
|
||||
multipart-post (>= 1.2, < 3)
|
||||
jruby-jars (1.7.13)
|
||||
jruby-rack (1.1.16)
|
||||
multi_json (1.10.1)
|
||||
multipart-post (2.0.0)
|
||||
puma (2.9.0)
|
||||
rack (>= 1.1, < 2.0)
|
||||
puma (2.9.0-java)
|
||||
rack (>= 1.1, < 2.0)
|
||||
rack (1.5.2)
|
||||
rack-protection (1.5.3)
|
||||
rack
|
||||
rack-reverse-proxy (0.4.4)
|
||||
rack (>= 1.0.0)
|
||||
rack-test (0.6.2)
|
||||
rack (>= 1.0)
|
||||
rake (10.1.0)
|
||||
rubyzip (1.1.6)
|
||||
sinatra (1.4.5)
|
||||
rack (~> 1.4)
|
||||
rack-protection (~> 1.4)
|
||||
tilt (~> 1.3, >= 1.3.4)
|
||||
sinatra-contrib (1.4.2)
|
||||
backports (>= 2.0)
|
||||
multi_json
|
||||
rack-protection
|
||||
rack-test
|
||||
sinatra (~> 1.4.0)
|
||||
tilt (~> 1.3)
|
||||
tilt (1.4.1)
|
||||
warbler (1.4.4)
|
||||
jruby-jars (>= 1.5.6, < 2.0)
|
||||
jruby-rack (>= 1.0.0)
|
||||
rake (>= 0.9.6)
|
||||
rubyzip (>= 0.9, < 1.2)
|
||||
|
||||
PLATFORMS
|
||||
java
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
colorize
|
||||
elasticsearch
|
||||
puma
|
||||
rack-reverse-proxy
|
||||
sinatra
|
||||
sinatra-contrib
|
||||
warbler
|
|
@ -14,8 +14,8 @@ module.exports = function (grunt) {
|
|||
buildApp: __dirname + '/build/kibana', // build directory for the app
|
||||
configFile: __dirname + '/src/server/config/kibana.yml',
|
||||
|
||||
jrubyVersion: '1.7.14',
|
||||
jrubyPath: __dirname + '/.jruby',
|
||||
nodeVersion: '0.10.35',
|
||||
platforms: ['darwin-x64', 'linux-x64', 'linux-x86', 'windows'],
|
||||
|
||||
unitTestDir: __dirname + '/test/unit',
|
||||
testUtilsDir: __dirname + '/test/utils',
|
||||
|
|
70
package.json
70
package.json
|
@ -1,10 +1,56 @@
|
|||
{
|
||||
"name": "kibana",
|
||||
"private": true,
|
||||
"description": "Kibana is an open source (Apache Licensed), browser based analytics and search dashboard for Elasticsearch. Kibana is a snap to setup and start using. Kibana strives to be easy to get started with, while also being flexible and powerful, just like Elasticsearch.",
|
||||
"keywords": [
|
||||
"kibana",
|
||||
"elasticsearch",
|
||||
"logstash",
|
||||
"analytics",
|
||||
"visualizations",
|
||||
"dashboards",
|
||||
"dashboarding"
|
||||
],
|
||||
"private": false,
|
||||
"version": "4.0.0-beta3",
|
||||
"description": "Kibana 4",
|
||||
"main": "Gulpfile.js",
|
||||
"dependencies": {},
|
||||
"main": "src/server/app.js",
|
||||
"homepage": "http://www.elasticsearch.org/overview/kibana/",
|
||||
"bugs": "https://github.com/elasticsearch/kibana/issues",
|
||||
"license": "Apache-2.0",
|
||||
"author": "Rashid Khan <rashid.khan@elasticsearch.com>",
|
||||
"contributors": [
|
||||
"Spencer Alger <spencer.alger@elasticsearch.com>",
|
||||
"Chris Cowan <chris.cowan@elasticsearch.com>",
|
||||
"Joe Fleming <joe.fleming@elasticsearch.com>",
|
||||
"Lukas Olson <lukas.olson@elasticsearch.com>"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "grunt test",
|
||||
"start": "node ./src/server/bin/kibana.js",
|
||||
"server": "node ./src/server/bin/kibana.js",
|
||||
"precommit": "grunt hintStagedFiles"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/elasticsearch/kibana.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"ansicolors": "^0.3.2",
|
||||
"body-parser": "~1.10.1",
|
||||
"bunyan": "^1.2.3",
|
||||
"commander": "^2.6.0",
|
||||
"compression": "^1.3.0",
|
||||
"cookie-parser": "~1.3.3",
|
||||
"debug": "~2.1.1",
|
||||
"express": "~4.10.6",
|
||||
"glob": "^4.3.2",
|
||||
"http-proxy": "^1.8.1",
|
||||
"jade": "~1.8.2",
|
||||
"js-yaml": "^3.2.5",
|
||||
"less-middleware": "1.0.x",
|
||||
"lodash": "^2.4.1",
|
||||
"morgan": "~1.5.1",
|
||||
"serve-favicon": "~2.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bluebird": "~2.0.7",
|
||||
"connect": "~2.19.5",
|
||||
|
@ -27,7 +73,7 @@
|
|||
"grunt-s3": "~0.2.0-alpha.3",
|
||||
"grunt-saucelabs": "~8.3.2",
|
||||
"html-entities": "^1.1.1",
|
||||
"http-proxy": "~1.1.4",
|
||||
"http-proxy": "~1.8.1",
|
||||
"husky": "~0.6.0",
|
||||
"istanbul": "~0.2.4",
|
||||
"load-grunt-config": "~0.7.0",
|
||||
|
@ -46,20 +92,8 @@
|
|||
"simple-git": "^0.11.0",
|
||||
"tar": "^1.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "grunt test --use-jruby",
|
||||
"server": "grunt server",
|
||||
"precommit": "grunt hintStagedFiles",
|
||||
"prepush": "echo"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git@github.com:elasticsearch/kibana.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "Apache 2.0",
|
||||
"bugs": {
|
||||
"url": "https://github.com/elasticsearch/kibana/issues"
|
||||
},
|
||||
"homepage": "https://www.elasticsearch.org/overview/kibana"
|
||||
}
|
||||
}
|
||||
|
|
5
src/server/.jshintrc
Normal file
5
src/server/.jshintrc
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"extends": "../.jshintrc",
|
||||
"node": true
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
# Kibana @@version
|
||||
|
||||
Kibana is an open source (Apache Licensed), browser based analytics and search dashboard for Elasticsearch. Kibana is a snap to setup and start using. Kibana strives to be easy to get started with, while also being flexible and powerful, just like Elasticsearch.
|
||||
|
||||
## Installation
|
||||
|
||||
* Download: http://www.elasticsearch.org/overview/kibana/installation/
|
||||
* Run **bin/kibana** on unix, or **bin/kibana.bat** on Windows.
|
||||
* Visit http://localhost:5601
|
||||
|
||||
## Need Help?
|
||||
|
||||
Need help? Try #elasticsearch or #logstash on Freenode IRC. You can also find help on the elasticsearch-users@googlegroups.com or logstash-users@googlegroups.com mailing lists.
|
||||
|
||||
You can also find documentation at http://www.elasticsearch.com/guide/en/kibana/current
|
||||
|
||||
## Contributing
|
||||
|
||||
If you have a bugfix or new feature that you would like to contribute to Kibana, please find or open an issue about it first. Kibana is an open source project that is available on Github: https://github.com/elasticsearch/kibana
|
|
@ -1,7 +0,0 @@
|
|||
source "https://rubygems.org"
|
||||
|
||||
gem 'sinatra', :require => 'sinatra/base'
|
||||
gem 'sinatra-contrib'
|
||||
gem 'puma'
|
||||
gem 'rack-reverse-proxy', :require => 'rack/reverse_proxy'
|
||||
gem 'colorize'
|
|
@ -1,40 +0,0 @@
|
|||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
backports (3.6.0)
|
||||
colorize (0.7.3)
|
||||
multi_json (1.10.1)
|
||||
puma (2.9.0)
|
||||
rack (>= 1.1, < 2.0)
|
||||
puma (2.9.0-java)
|
||||
rack (>= 1.1, < 2.0)
|
||||
rack (1.5.2)
|
||||
rack-protection (1.5.3)
|
||||
rack
|
||||
rack-reverse-proxy (0.4.4)
|
||||
rack (>= 1.0.0)
|
||||
rack-test (0.6.2)
|
||||
rack (>= 1.0)
|
||||
sinatra (1.4.5)
|
||||
rack (~> 1.4)
|
||||
rack-protection (~> 1.4)
|
||||
tilt (~> 1.3, >= 1.3.4)
|
||||
sinatra-contrib (1.4.2)
|
||||
backports (>= 2.0)
|
||||
multi_json
|
||||
rack-protection
|
||||
rack-test
|
||||
sinatra (~> 1.4.0)
|
||||
tilt (~> 1.3)
|
||||
tilt (1.4.1)
|
||||
|
||||
PLATFORMS
|
||||
java
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
colorize
|
||||
puma
|
||||
rack-reverse-proxy
|
||||
sinatra
|
||||
sinatra-contrib
|
75
src/server/app.js
Normal file
75
src/server/app.js
Normal file
|
@ -0,0 +1,75 @@
|
|||
var express = require('express');
|
||||
var path = require('path');
|
||||
var favicon = require('serve-favicon');
|
||||
var requestLogger = require('./lib/requestLogger');
|
||||
var appHeaders = require('./lib/appHeaders');
|
||||
var cookieParser = require('cookie-parser');
|
||||
var bodyParser = require('body-parser');
|
||||
var compression = require('compression');
|
||||
var config = require('./config');
|
||||
|
||||
var routes = require('./routes/index');
|
||||
var proxy = require('./routes/proxy');
|
||||
|
||||
var app = express();
|
||||
|
||||
// view engine setup
|
||||
app.set('views', path.join(__dirname, 'views'));
|
||||
app.set('view engine', 'jade');
|
||||
app.set('x-powered-by', false);
|
||||
|
||||
app.use(favicon(path.join(config.public_folder, 'styles', 'theme', 'elk.ico')));
|
||||
app.use(requestLogger());
|
||||
app.use(appHeaders());
|
||||
|
||||
if (app.get('env') === 'development') {
|
||||
require('./dev')(app);
|
||||
}
|
||||
|
||||
// The proxy must be set up before all the other middleware.
|
||||
// TODO: WE might want to move the middleware to each of the individual routes
|
||||
// so we don't have weird conflicts in the future.
|
||||
app.use('/elasticsearch', proxy);
|
||||
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({ extended: false }));
|
||||
app.use(cookieParser());
|
||||
app.use(compression());
|
||||
app.use(express.static(config.public_folder));
|
||||
if (config.external_plugins_folder) app.use('/plugins', express.static(config.external_plugins_folder));
|
||||
|
||||
app.use('/', routes);
|
||||
|
||||
// catch 404 and forward to error handler
|
||||
app.use(function (req, res, next) {
|
||||
var err = new Error('Not Found');
|
||||
err.status = 404;
|
||||
next(err);
|
||||
});
|
||||
|
||||
// error handlers
|
||||
|
||||
// development error handler
|
||||
// will print stacktrace
|
||||
if (app.get('env') === 'development') {
|
||||
app.use(function (err, req, res, next) {
|
||||
res.status(err.status || 500);
|
||||
res.render('error', {
|
||||
message: err.message,
|
||||
error: err
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// production error handler
|
||||
// no stacktraces leaked to user
|
||||
app.use(function (err, req, res, next) {
|
||||
res.status(err.status || 500);
|
||||
res.render('error', {
|
||||
message: err.message,
|
||||
error: {}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
module.exports = app;
|
|
@ -1,97 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
require "optparse"
|
||||
require 'rubygems'
|
||||
require 'puma/cli'
|
||||
require "yaml"
|
||||
|
||||
HERE = File.expand_path(File.dirname(__FILE__))
|
||||
|
||||
module Kibana
|
||||
def self.global_settings
|
||||
@settings ||= {}
|
||||
end
|
||||
end
|
||||
|
||||
# Defaults for the options
|
||||
options = {
|
||||
:config => ENV["CONFIG_PATH"] || File.expand_path("#{HERE}/../config/kibana.yml"),
|
||||
:plugins_folder => ENV["PLUGINS_FOLDER"]
|
||||
}
|
||||
|
||||
# Create a new parser
|
||||
parser = OptionParser.new do |opts|
|
||||
opts.on('-e', '--elasticsearch URI', 'Elasticsearch instance') do |arg|
|
||||
options[:elasticsearch] = arg
|
||||
end
|
||||
opts.on('-c', '--config PATH', 'Path to config file') do |arg|
|
||||
options[:config] = arg
|
||||
end
|
||||
opts.on('-p', '--port PORT', 'Kibana port') do |arg|
|
||||
options[:port] = arg
|
||||
end
|
||||
opts.on('-q', '--quiet', 'Turns off logging') do |arg|
|
||||
options[:quiet] = arg
|
||||
end
|
||||
opts.on('-H', '--host HOST', 'Kibana host') do |arg|
|
||||
options[:host] = arg
|
||||
end
|
||||
opts.on('-v', '--version', 'Display version') do |arg|
|
||||
puts ENV['KIBANA_VERSION'] || 'dev-build'
|
||||
exit
|
||||
end
|
||||
opts.on('--plugins', 'Path to the folder to scan for plugins') do |arg|
|
||||
options[:plugins_folder] = arg
|
||||
end
|
||||
opts.on('-h', '--help', 'Display this screen') do
|
||||
puts opts
|
||||
exit
|
||||
end
|
||||
end
|
||||
|
||||
# Set the usage banner
|
||||
parser.banner = "Usage: kibana <options>\n\n"
|
||||
|
||||
# Parse the command line arguments
|
||||
parser.parse! ARGV
|
||||
|
||||
# Load the config from default
|
||||
config = YAML.load(IO.read(options[:config]))
|
||||
|
||||
# Set the override for the port
|
||||
port = (options[:port] || config['port'])
|
||||
|
||||
# Set the override for the host
|
||||
host = (options[:host] || config['host'])
|
||||
|
||||
# Set the override for Elasticsaerch
|
||||
elasticsearch = (options[:elasticsearch] || config['elasticsearch'])
|
||||
|
||||
# If the env isn't set we need to set it to development
|
||||
ENV["RACK_ENV"] = "development" if ENV["RACK_ENV"].nil?
|
||||
|
||||
# Set the global_settings that are shared across every app
|
||||
Kibana.global_settings[:port] = port || 5601
|
||||
Kibana.global_settings[:host] = host || '0.0.0.0'
|
||||
Kibana.global_settings[:request_timeout] = config["request_timeout"].nil? ? 60 : config["request_timeout"]
|
||||
Kibana.global_settings[:shard_timeout] = config["shard_timeout"].nil? ? 30000 : config["shard_timeout"]
|
||||
|
||||
Kibana.global_settings[:config] = config
|
||||
Kibana.global_settings[:elasticsearch] = elasticsearch
|
||||
Kibana.global_settings[:root] = File.expand_path("#{HERE}/../")
|
||||
Kibana.global_settings[:quiet] = options[:quiet]
|
||||
Kibana.global_settings[:external_plugins_folder] = options[:plugins_folder] || nil
|
||||
|
||||
# Set the public folder based on whether we are running in production or not.
|
||||
if ENV['RACK_ENV'] == ('production')
|
||||
Kibana.global_settings[:public_folder] = File.expand_path("#{HERE}/../public/")
|
||||
Kibana.global_settings[:bundled_plugins_folder] = File.expand_path("#{HERE}/../public/plugins")
|
||||
else
|
||||
Kibana.global_settings[:public_folder] = File.expand_path("#{HERE}/../../kibana/")
|
||||
Kibana.global_settings[:bundled_plugins_folder] = File.expand_path("#{HERE}/../../kibana/plugins")
|
||||
end
|
||||
|
||||
# Add the root of the project to the load path
|
||||
$LOAD_PATH.unshift(Kibana.global_settings[:root])
|
||||
|
||||
require "lib/server"
|
||||
Kibana::Server.run(Kibana.global_settings)
|
|
@ -1,26 +1,21 @@
|
|||
@echo off
|
||||
|
||||
SETLOCAL
|
||||
if not defined JAVA_HOME goto java_home_err
|
||||
|
||||
set SCRIPT_DIR=%~dp0
|
||||
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
|
||||
|
||||
set RACK_ENV=production
|
||||
set CONFIG_PATH=%DIR%\config\kibana.yml
|
||||
set PLUGINS_FOLDER=%DIR%\plugins
|
||||
set KIBANA_VERSION=@@version
|
||||
|
||||
|
||||
TITLE Kibana %KIBANA_VERSION%
|
||||
|
||||
"%JAVA_HOME%\bin\java" -jar "%DIR%\lib\kibana.jar" %*
|
||||
|
||||
:java_home_err
|
||||
echo JAVA_HOME enviroment variable must be set!
|
||||
pause
|
||||
goto finally
|
||||
|
||||
:finally
|
||||
|
||||
ENDLOCAL
|
||||
@echo off
|
||||
|
||||
SETLOCAL
|
||||
|
||||
set SCRIPT_DIR=%~dp0
|
||||
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
|
||||
|
||||
set NODE=%DIR%\node\node.exe
|
||||
set SERVER=%DIR%\src\bin\kibana.js
|
||||
set NODE_ENV="production"
|
||||
set CONFIG_PATH=%DIR%\config\kibana.yml
|
||||
|
||||
TITLE Kibana Server @@version
|
||||
|
||||
%NODE% %SERVER% %*
|
||||
|
||||
:finally
|
||||
|
||||
ENDLOCAL
|
||||
|
||||
|
||||
|
|
59
src/server/bin/kibana.js
Executable file
59
src/server/bin/kibana.js
Executable file
|
@ -0,0 +1,59 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var program = require('commander');
|
||||
var env = (process.env.NODE_ENV) ? process.env.NODE_ENV : 'development';
|
||||
var path = require('path');
|
||||
var packagePath = path.resolve(__dirname, '..', '..', '..', 'package.json');
|
||||
if (env !== 'development') {
|
||||
packagePath = path.resolve(__dirname, '..', 'package.json');
|
||||
}
|
||||
var package = require(packagePath);
|
||||
|
||||
|
||||
program.description('Kibana is an open source (Apache Licensed), browser based analytics and search dashboard for Elasticsearch.');
|
||||
program.version(package.version);
|
||||
program.option('-e, --elasticsearch <uri>', 'Elasticsearch instance');
|
||||
program.option('-c, --config <path>', 'Path to the config file');
|
||||
program.option('-p, --port <port>', 'The port to bind to', parseInt);
|
||||
program.option('-q, --quiet', 'Turns off logging');
|
||||
program.option('-H, --host <host>', 'The host to bind to');
|
||||
program.option('--plugins <path>', 'Path to scan for plugins');
|
||||
program.parse(process.argv);
|
||||
|
||||
// This needs to be set before the config is loaded. CONFIG_PATH is used to
|
||||
// override the kibana.yml config path which gets read when the config/index.js
|
||||
// is parsed for the first time.
|
||||
if (program.config) {
|
||||
process.env.CONFIG_PATH = program.config;
|
||||
}
|
||||
|
||||
// This needs to be set before the config is loaded. PLUGINS_PATH is used to
|
||||
// set the external plugins folder.
|
||||
if (program.plugins) {
|
||||
process.env.PLUGINS_FOLDER = program.plugins;
|
||||
}
|
||||
|
||||
// Load the config
|
||||
var config = require('../config');
|
||||
|
||||
if (program.elasticsearch) {
|
||||
config.elasticsearch = program.elasticsearch;
|
||||
}
|
||||
|
||||
if (program.port) {
|
||||
config.port = program.port;
|
||||
}
|
||||
|
||||
if (program.quiet) {
|
||||
config.quiet = program.quiet;
|
||||
}
|
||||
|
||||
if (program.host) {
|
||||
config.host = program.host;
|
||||
}
|
||||
|
||||
|
||||
// Load and start the server. This must happen after all the config changes
|
||||
// have been made since the server also requires the config.
|
||||
var server = require('../');
|
||||
server.start();
|
31
src/server/bin/kibana.sh
Executable file → Normal file
31
src/server/bin/kibana.sh
Executable file → Normal file
|
@ -1,5 +1,4 @@
|
|||
#!/bin/sh
|
||||
|
||||
SCRIPT=$0
|
||||
|
||||
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
||||
|
@ -14,31 +13,9 @@ while [ -h "$SCRIPT" ] ; do
|
|||
fi
|
||||
done
|
||||
|
||||
DIR=$(dirname "${SCRIPT}")
|
||||
DIR=$(dirname "${SCRIPT}")/..
|
||||
NODE=${DIR}/node/bin/node
|
||||
SERVER=${DIR}/src/bin/kibana.js
|
||||
|
||||
if [ -x "${JAVA_HOME}/bin/java" ]; then
|
||||
JAVA="${JAVA_HOME}/bin/java"
|
||||
else
|
||||
JAVA=$(which java)
|
||||
fi
|
||||
CONFIG_PATH="${DIR}/config/kibana.yml" NODE_ENV="production" exec "${NODE}" ${SERVER} ${@}
|
||||
|
||||
if [ ! -x "${JAVA}" ]; then
|
||||
echo "Could not find any executable Java binary. Please install Java in your PATH or set JAVA_HOME"
|
||||
exit 1
|
||||
fi
|
||||
>&2 echo "The Kibana Backend is starting up... be patient"
|
||||
|
||||
JAVA_OPTS="-Xmx512m $JAVA_OPTS"
|
||||
|
||||
# Clear gem paths so that we only use the gems inside the kibana.jar
|
||||
export GEM_HOME=
|
||||
export GEM_PATH=
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
KIBANA_VERSION=@@version \
|
||||
CONFIG_PATH=${DIR}/../config/kibana.yml \
|
||||
PLUGINS_FOLDER=${DIR}/../plugins \
|
||||
RACK_ENV=production \
|
||||
exec "${JAVA}" \
|
||||
$JAVA_OPTS \
|
||||
-jar "${DIR}/../lib/kibana.jar" "$@"
|
31
src/server/config/index.js
Normal file
31
src/server/config/index.js
Normal file
|
@ -0,0 +1,31 @@
|
|||
var _ = require('lodash');
|
||||
var fs = require('fs');
|
||||
var yaml = require('js-yaml');
|
||||
var path = require('path');
|
||||
var listPlugins = require('../lib/listPlugins');
|
||||
var configPath = process.env.CONFIG_PATH || path.join(__dirname, 'kibana.yml');
|
||||
var kibana = yaml.safeLoad(fs.readFileSync(configPath, 'utf8'));
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
|
||||
// Check if the local public folder is present. This means we are running in
|
||||
// the NPM module. If it's not there then we are running in the git root.
|
||||
var public_folder = path.resolve(__dirname, '..', 'public');
|
||||
try {
|
||||
fs.statSync(public_folder);
|
||||
} catch (err) {
|
||||
public_folder = path.resolve(__dirname, '..', '..', 'kibana');
|
||||
}
|
||||
|
||||
var config = module.exports = {
|
||||
port : kibana.port || 5601,
|
||||
host : kibana.host || '0.0.0.0',
|
||||
elasticsearch : kibana.elasticsearch_url || 'http : //localhost : 9200',
|
||||
root : path.normalize(path.join(__dirname, '..')),
|
||||
quiet : false,
|
||||
public_folder : public_folder,
|
||||
external_plugins_folder : process.env.PLUGINS_FOLDER || null,
|
||||
bundled_plugins_folder : path.resolve(public_folder, 'plugins'),
|
||||
kibana : kibana
|
||||
};
|
||||
|
||||
config.plugins = listPlugins(config);
|
|
@ -5,7 +5,7 @@ port: 5601
|
|||
host: "0.0.0.0"
|
||||
|
||||
# The Elasticsearch instance to use for all your queries.
|
||||
elasticsearch: "http://localhost:9200"
|
||||
elasticsearch_url: "http://localhost:9200"
|
||||
|
||||
# If your Elasticsearch is protected with basic auth:
|
||||
# elasticsearch_username: user
|
||||
|
@ -22,16 +22,15 @@ kibana_index: ".kibana"
|
|||
# The default application to load.
|
||||
default_app_id: "discover"
|
||||
|
||||
# Time in seconds to wait for responses from the back end or elasticsearch.
|
||||
# Note this should always be higher than "shard_timeout".
|
||||
# Time in milliseconds to wait for responses from the back end or elasticsearch.
|
||||
# This must be > 0
|
||||
request_timeout: 60
|
||||
request_timeout: 500000
|
||||
|
||||
# Time in milliseconds for Elasticsearch to wait for responses from shards.
|
||||
# Note this should always be lower than "request_timeout".
|
||||
# Set to 0 to disable (not recommended).
|
||||
shard_timeout: 30000
|
||||
# Set to 0 to disable.
|
||||
shard_timeout: 0
|
||||
|
||||
# Set to false to have a complete disregard for the validity of the SSL
|
||||
# certificate.
|
||||
verify_ssl: true
|
||||
|
||||
|
|
|
@ -1,178 +0,0 @@
|
|||
# Disable Rake-environment-task framework detection by uncommenting/setting to false
|
||||
# Warbler.framework_detection = false
|
||||
|
||||
# Warbler web application assembly configuration file
|
||||
Warbler::Config.new do |config|
|
||||
# Features: additional options controlling how the jar is built.
|
||||
# Currently the following features are supported:
|
||||
# - gemjar: package the gem repository in a jar file in WEB-INF/lib
|
||||
# - executable: embed a web server and make the war executable
|
||||
# - compiled: compile .rb files to .class files
|
||||
# config.features = %w(gemjar)
|
||||
|
||||
# Application directories to be included in the webapp.
|
||||
config.dirs = %w(bin config routes lib public)
|
||||
|
||||
# Additional files/directories to include, above those in config.dirs
|
||||
# config.includes = FileList["db"]
|
||||
|
||||
# Additional files/directories to exclude
|
||||
# config.excludes = FileList["lib/tasks/*"]
|
||||
|
||||
# Additional Java .jar files to include. Note that if .jar files are placed
|
||||
# in lib (and not otherwise excluded) then they need not be mentioned here.
|
||||
# JRuby and JRuby-Rack are pre-loaded in this list. Be sure to include your
|
||||
# own versions if you directly set the value
|
||||
# config.java_libs += FileList["lib/java/*.jar"]
|
||||
|
||||
# Loose Java classes and miscellaneous files to be included.
|
||||
# config.java_classes = FileList["target/classes/**.*"]
|
||||
|
||||
# One or more pathmaps defining how the java classes should be copied into
|
||||
# the archive. The example pathmap below accompanies the java_classes
|
||||
# configuration above. See http://rake.rubyforge.org/classes/String.html#M000017
|
||||
# for details of how to specify a pathmap.
|
||||
# config.pathmaps.java_classes << "%{target/classes/,}p"
|
||||
|
||||
# Bundler support is built-in. If Warbler finds a Gemfile in the
|
||||
# project directory, it will be used to collect the gems to bundle
|
||||
# in your application. If you wish to explicitly disable this
|
||||
# functionality, uncomment here.
|
||||
# config.bundler = false
|
||||
|
||||
# An array of Bundler groups to avoid including in the war file.
|
||||
# Defaults to ["development", "test", "assets"].
|
||||
# config.bundle_without = []
|
||||
|
||||
# Other gems to be included. If you don't use Bundler or a gemspec
|
||||
# file, you need to tell Warbler which gems your application needs
|
||||
# so that they can be packaged in the archive.
|
||||
# For Rails applications, the Rails gems are included by default
|
||||
# unless the vendor/rails directory is present.
|
||||
# config.gems += ["activerecord-jdbcmysql-adapter", "jruby-openssl"]
|
||||
# config.gems << "tzinfo"
|
||||
# config.gems << "sinatra"
|
||||
|
||||
# Uncomment this if you don't want to package rails gem.
|
||||
# config.gems -= ["rails"]
|
||||
|
||||
# The most recent versions of gems are used.
|
||||
# You can specify versions of gems by using a hash assignment:
|
||||
# config.gems["rails"] = "2.3.10"
|
||||
|
||||
# You can also use regexps or Gem::Dependency objects for flexibility or
|
||||
# finer-grained control.
|
||||
# config.gems << /^merb-/
|
||||
# config.gems << Gem::Dependency.new("merb-core", "= 0.9.3")
|
||||
|
||||
# Include gem dependencies not mentioned specifically. Default is
|
||||
# true, uncomment to turn off.
|
||||
# config.gem_dependencies = false
|
||||
|
||||
# Array of regular expressions matching relative paths in gems to be
|
||||
# excluded from the war. Defaults to empty, but you can set it like
|
||||
# below, which excludes test files.
|
||||
# config.gem_excludes = [/^(test|spec)\//]
|
||||
|
||||
# Pathmaps for controlling how application files are copied into the archive
|
||||
# config.pathmaps.application = ["WEB-INF/%p"]
|
||||
|
||||
# Name of the archive (without the extension). Defaults to the basename
|
||||
# of the project directory.
|
||||
config.jar_name = "kibana"
|
||||
|
||||
# Name of the MANIFEST.MF template for the war file. Defaults to a simple
|
||||
# MANIFEST.MF that contains the version of Warbler used to create the war file.
|
||||
# config.manifest_file = "config/MANIFEST.MF"
|
||||
|
||||
# When using the 'compiled' feature and specified, only these Ruby
|
||||
# files will be compiled. Default is to compile all \.rb files in
|
||||
# the application.
|
||||
# config.compiled_ruby_files = FileList['app/**/*.rb']
|
||||
|
||||
# Determines if ruby files in supporting gems will be compiled.
|
||||
# Ignored unless compile feature is used.
|
||||
# config.compile_gems = false
|
||||
|
||||
# When set it specify the bytecode version for compiled class files
|
||||
# config.bytecode_version = "1.6"
|
||||
|
||||
# When set to true, Warbler will override the value of ENV['GEM_HOME'] even it
|
||||
# has already been set. When set to false it will use any existing value of
|
||||
# GEM_HOME if it is set.
|
||||
# config.override_gem_home = true
|
||||
|
||||
# Allows for specifing custom executables
|
||||
# config.executable = ["rake", "bin/rake"]
|
||||
|
||||
# Sets default (prefixed) parameters for the executables
|
||||
# config.executable_params = "do:something"
|
||||
|
||||
# If set to true, moves jar files into WEB-INF/lib. Prior to version 1.4.2 of Warbler this was done
|
||||
# by default. But since 1.4.2 this config defaults to false. It may need to be set to true for
|
||||
# web servers that do not explode the WAR file.
|
||||
# Alternatively, this option can be set to a regular expression, which will
|
||||
# act as a jar selector -- only jar files that match the pattern will be
|
||||
# included in the archive.
|
||||
# config.move_jars_to_webinf_lib = false
|
||||
|
||||
# === War files only below here ===
|
||||
|
||||
# Path to the pre-bundled gem directory inside the war file. Default
|
||||
# is 'WEB-INF/gems'. Specify path if gems are already bundled
|
||||
# before running Warbler. This also sets 'gem.path' inside web.xml.
|
||||
# config.gem_path = "WEB-INF/vendor/bundler_gems"
|
||||
|
||||
# Files for WEB-INF directory (next to web.xml). This contains
|
||||
# web.xml by default. If there is an .erb-File it will be processed
|
||||
# with webxml-config. You may want to exclude this file via
|
||||
# config.excludes.
|
||||
# config.webinf_files += FileList["jboss-web.xml"]
|
||||
|
||||
# Files to be included in the root of the webapp. Note that files in public
|
||||
# will have the leading 'public/' part of the path stripped during staging.
|
||||
# config.public_html = FileList["public/**/*", "doc/**/*"]
|
||||
|
||||
# Pathmaps for controlling how public HTML files are copied into the .war
|
||||
# config.pathmaps.public_html = ["%{public/,}p"]
|
||||
|
||||
# Embedded webserver to use with the 'executable' feature. Currently supported
|
||||
# webservers are:
|
||||
# * <tt>winstone</tt> (default) - Winstone 0.9.10 from sourceforge
|
||||
# * <tt>jenkins-ci.winstone</tt> - Improved Winstone from Jenkins CI
|
||||
# * <tt>jetty</tt> - Embedded Jetty from Eclipse
|
||||
# config.webserver = 'jetty'
|
||||
|
||||
# Value of RAILS_ENV for the webapp -- default as shown below
|
||||
# config.webxml.rails.env = ENV['RAILS_ENV'] || 'production'
|
||||
|
||||
# Application booter to use, one of :rack, :rails, or :merb (autodetected by default)
|
||||
# config.webxml.booter = :rails
|
||||
|
||||
# Set JRuby to run in 1.9 mode.
|
||||
# config.webxml.jruby.compat.version = "1.9"
|
||||
|
||||
# When using the :rack booter, "Rackup" script to use.
|
||||
# - For 'rackup.path', the value points to the location of the rackup
|
||||
# script in the web archive file. You need to make sure this file
|
||||
# gets included in the war, possibly by adding it to config.includes
|
||||
# or config.webinf_files above.
|
||||
# - For 'rackup', the rackup script you provide as an inline string
|
||||
# is simply embedded in web.xml.
|
||||
# The script is evaluated in a Rack::Builder to load the application.
|
||||
# Examples:
|
||||
# config.webxml.rackup.path = 'WEB-INF/hello.ru'
|
||||
# config.webxml.rackup = %{require './lib/demo'; run Rack::Adapter::Camping.new(Demo)}
|
||||
# config.webxml.rackup = require 'cgi' && CGI::escapeHTML(File.read("config.ru"))
|
||||
|
||||
# Control the pool of Rails runtimes. Leaving unspecified means
|
||||
# the pool will grow as needed to service requests. It is recommended
|
||||
# that you fix these values when running a production server!
|
||||
# If you're using threadsafe! mode, you probably don't want to set these values,
|
||||
# since 1 runtime(default for threadsafe mode) will be enough.
|
||||
# config.webxml.jruby.min.runtimes = 2
|
||||
# config.webxml.jruby.max.runtimes = 4
|
||||
|
||||
# JNDI data source name
|
||||
# config.webxml.jndi = 'jdbc/rails'
|
||||
end
|
59
src/server/dev/index.js
Normal file
59
src/server/dev/index.js
Normal file
|
@ -0,0 +1,59 @@
|
|||
/* jshint node:true */
|
||||
|
||||
var express = require('express');
|
||||
var instrumentationMiddleware = require('./_instrumentation');
|
||||
var amdRapperMiddleware = require('./_amd_rapper');
|
||||
|
||||
var glob = require('glob');
|
||||
var path = require('path');
|
||||
var join = path.join;
|
||||
var rel = join.bind(null, __dirname);
|
||||
var ROOT = rel('../../../');
|
||||
var SRC = join(ROOT, 'src');
|
||||
var NODE_MODULES = join(ROOT, 'node_modules');
|
||||
var APP = join(SRC, 'kibana');
|
||||
var TEST = join(ROOT, 'test');
|
||||
|
||||
module.exports = function (app) {
|
||||
app.use(instrumentationMiddleware({
|
||||
root: SRC,
|
||||
displayRoot: SRC,
|
||||
filter: function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|next_tick|rison)\.js$/);
|
||||
}
|
||||
}));
|
||||
|
||||
app.use(instrumentationMiddleware({
|
||||
root: APP,
|
||||
displayRoot: SRC,
|
||||
filter: function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|next_tick|rison)\.js$/);
|
||||
}
|
||||
}));
|
||||
|
||||
app.use(amdRapperMiddleware({
|
||||
root: ROOT
|
||||
}));
|
||||
|
||||
app.use('/test', express.static(TEST));
|
||||
app.use('/src', express.static(SRC));
|
||||
app.use('/node_modules', express.static(NODE_MODULES));
|
||||
app.use('/specs', function (req, res) {
|
||||
var unit = join(ROOT, '/test/unit/');
|
||||
glob(join(unit, 'specs/**/*.js'), function (er, files) {
|
||||
var moduleIds = files
|
||||
.filter(function (filename) {
|
||||
return path.basename(filename).charAt(0) !== '_';
|
||||
})
|
||||
.map(function (filename) {
|
||||
return path.relative(unit, filename).replace(/\.js$/, '');
|
||||
});
|
||||
|
||||
res.end(JSON.stringify(moduleIds));
|
||||
});
|
||||
});
|
||||
};
|
65
src/server/index.js
Normal file
65
src/server/index.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var app = require('./app');
|
||||
var http = require('http');
|
||||
var config = require('./config');
|
||||
var logger = require('./lib/logger');
|
||||
|
||||
|
||||
/**
|
||||
* Create HTTP server.
|
||||
*/
|
||||
|
||||
var server = http.createServer(app);
|
||||
server.on('error', onError);
|
||||
server.on('listening', onListening);
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "error" event.
|
||||
*/
|
||||
|
||||
function onError(error) {
|
||||
if (error.syscall !== 'listen') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// handle specific listen errors with friendly messages
|
||||
switch (error.code) {
|
||||
case 'EACCES':
|
||||
logger.error({ err: error }, 'Port %s requires elevated privileges', app.get('port'));
|
||||
process.exit(1);
|
||||
break;
|
||||
case 'EADDRINUSE':
|
||||
logger.error({ err: error }, 'Port %s is already in use', app.get('port'));
|
||||
process.exit(1);
|
||||
break;
|
||||
default:
|
||||
logger.error({ err: error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Event listener for HTTP server "listening" event.
|
||||
*/
|
||||
|
||||
function onListening() {
|
||||
var address = server.address();
|
||||
logger.info('Listening on %s:%d', address.address, address.port);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
server: server,
|
||||
start: function (cb) {
|
||||
var port = parseInt(process.env.PORT, 10) || config.port || 3000;
|
||||
var host = process.env.HOST || config.host || '127.0.0.1';
|
||||
app.set('port', port);
|
||||
server.listen(port, host, cb);
|
||||
}
|
||||
};
|
||||
|
||||
if (require.main === module) {
|
||||
module.exports.start();
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
require "rack/commonlogger"
|
||||
require "colorize"
|
||||
|
||||
class ColorLogger < Rack::CommonLogger
|
||||
def log(env, status, header, begin_at)
|
||||
now = Time.now
|
||||
length = extract_content_length(header)
|
||||
|
||||
case status
|
||||
when 300..399
|
||||
statusColor = :yellow
|
||||
when 400..499
|
||||
statusColor = :red
|
||||
when 500..599
|
||||
statusColor = :magenta
|
||||
else
|
||||
statusColor = :green
|
||||
end
|
||||
|
||||
msg = (now.strftime('%b %d, %Y @ %H:%M:%S.%L')).light_black << ' '
|
||||
msg << env["REQUEST_METHOD"].light_blue << ' '
|
||||
msg << env["PATH_INFO"]
|
||||
msg << (env["QUERY_STRING"].empty? ? '' : "?#{env["QUERY_STRING"]}" ) << ' '
|
||||
msg << status.to_s.send(statusColor) << ' '
|
||||
msg << ((now - begin_at) * 1000).to_i.to_s << 'ms - ' << length
|
||||
msg << "\n"
|
||||
|
||||
# If there is an error then we need to append the stack
|
||||
if env['sinatra.error'] && status != 404
|
||||
error = env['sinatra.error']
|
||||
msg << "#{error.message}\n #{error.backtrace.join("\n ")}".send(statusColor)
|
||||
msg << "\n"
|
||||
end
|
||||
|
||||
logger = @logger || env['rack.errors']
|
||||
if logger.respond_to?(:write)
|
||||
logger.write(msg)
|
||||
else
|
||||
logger << msg
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
# This monkeypatch is needed to ensure the X-Frame-Options header is
|
||||
# never set by rack-protection.
|
||||
#
|
||||
# http://stackoverflow.com/a/19232793/296172
|
||||
#
|
||||
module Rack
|
||||
module Protection
|
||||
class FrameOptions < Base
|
||||
def call(env)
|
||||
@app.call(env)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,46 +0,0 @@
|
|||
require "rack/commonlogger"
|
||||
|
||||
class JSONLogger < Rack::CommonLogger
|
||||
def log(env, status, header, begin_at)
|
||||
now = Time.now
|
||||
length = extract_content_length(header)
|
||||
|
||||
data = {
|
||||
"@timestamp" => now.iso8601,
|
||||
:status => status.to_s[0..3],
|
||||
:level => status < 399 ? "INFO" : 'ERROR',
|
||||
:name => "Kibana",
|
||||
:request_method => env["REQUEST_METHOD"],
|
||||
:request => env["PATH_INFO"] + (env["QUERY_STRING"].empty? ? "" : "#{env['QUERY_STRING']}"),
|
||||
:path => env["PATH_INFO"],
|
||||
:query_string => env["QUERY_STRING"],
|
||||
:remote_addr => env['HTTP_X_FORWARD_FOR'] || env["REMOTE_ADDR"],
|
||||
:remote_user => env["REMOTE_USER"],
|
||||
:http_version => env["HTTP_VERSION"],
|
||||
:content_length => length,
|
||||
:response_time => ((now - begin_at) * 1000).to_i # convert to milliseconds
|
||||
}
|
||||
|
||||
# If there is an error then we need to append the stack
|
||||
if env['sinatra.error']
|
||||
error = env['sinatra.error']
|
||||
data[:error] = {
|
||||
:name => error.class.to_s,
|
||||
:message => error.message,
|
||||
:stack => error.backtrace
|
||||
}
|
||||
end
|
||||
|
||||
data[:message] = "#{data[:request_method]} #{data[:path]+(data[:query_string].empty? ? '' : '?'+data[:query_string])} #{data[:status]} #{data[:response_time]}ms - #{data[:content_length].empty? ? '-' : data[:content_length]}"
|
||||
|
||||
logger = @logger || env['rack.errors']
|
||||
msg = data.to_json+"\n"
|
||||
|
||||
if logger.respond_to?(:write)
|
||||
logger.write(msg)
|
||||
else
|
||||
logger << msg
|
||||
end
|
||||
end
|
||||
end
|
||||
|
40
src/server/lib/JSONStream.js
Normal file
40
src/server/lib/JSONStream.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
var _ = require('lodash');
|
||||
var Writable = require('stream').Writable;
|
||||
var util = require('util');
|
||||
|
||||
var levels = {
|
||||
10: 'trace',
|
||||
20: 'debug',
|
||||
30: 'info',
|
||||
40: 'warn',
|
||||
50: 'error',
|
||||
60: 'fatal'
|
||||
};
|
||||
|
||||
function JSONStream (options) {
|
||||
options = options || {};
|
||||
Writable.call(this, options);
|
||||
}
|
||||
|
||||
util.inherits(JSONStream, Writable);
|
||||
|
||||
JSONStream.prototype._write = function (entry, encoding, callback) {
|
||||
entry = JSON.parse(entry.toString('utf8'));
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
|
||||
var output = {
|
||||
'@timestamp': entry.time,
|
||||
'level': levels[entry.level],
|
||||
'message': entry.msg,
|
||||
'node_env': env,
|
||||
'request': entry.req,
|
||||
'response': entry.res
|
||||
};
|
||||
|
||||
if (entry.error) output.error = entry.err;
|
||||
|
||||
process.stdout.write(JSON.stringify(output) + "\n");
|
||||
callback();
|
||||
};
|
||||
|
||||
module.exports = JSONStream;
|
|
@ -1,36 +0,0 @@
|
|||
require "rack/file"
|
||||
|
||||
PATH_INFO = 'PATH_INFO'
|
||||
|
||||
module Kibana
|
||||
class MultiStatic
|
||||
def initialize(app, options={})
|
||||
@app = app
|
||||
@prefix = options[:prefix] || '/'
|
||||
@servers = (options[:paths] || []).map {|p| Rack::File.new(File.expand_path(p)) }
|
||||
end
|
||||
|
||||
def call(env)
|
||||
resp = nil
|
||||
orig_path = env[PATH_INFO]
|
||||
|
||||
if orig_path.start_with? @prefix
|
||||
env[PATH_INFO] = orig_path.sub @prefix, '/'
|
||||
else
|
||||
return @app.call(env)
|
||||
end
|
||||
|
||||
@servers.each do |server|
|
||||
resp = server.call(env)
|
||||
|
||||
resp = nil if resp[0] == 404
|
||||
break if resp
|
||||
end
|
||||
|
||||
return resp if resp
|
||||
|
||||
env[PATH_INFO] = orig_path
|
||||
@app.call(env)
|
||||
end
|
||||
end
|
||||
end
|
66
src/server/lib/StdOutStream.js
Normal file
66
src/server/lib/StdOutStream.js
Normal file
|
@ -0,0 +1,66 @@
|
|||
var bunyan = require('bunyan');
|
||||
var ansicolors = require('ansicolors');
|
||||
var Writable = require('stream').Writable;
|
||||
var util = require('util');
|
||||
|
||||
var levels = {
|
||||
10: 'trace',
|
||||
20: 'debug',
|
||||
30: 'info',
|
||||
40: 'warn',
|
||||
50: 'error',
|
||||
60: 'fatal'
|
||||
};
|
||||
|
||||
var colors = {
|
||||
10: 'blue',
|
||||
20: 'green',
|
||||
30: 'cyan',
|
||||
40: 'yellow',
|
||||
50: 'red',
|
||||
60: 'magenta'
|
||||
};
|
||||
|
||||
var levelColor = function (code) {
|
||||
if (code < 299) {
|
||||
return ansicolors.green(code);
|
||||
}
|
||||
if (code < 399) {
|
||||
return ansicolors.yellow(code)
|
||||
};
|
||||
if (code < 499) {
|
||||
return ansicolors.magenta(code)
|
||||
};
|
||||
return ansicolors.red(code);
|
||||
};
|
||||
|
||||
function StdOutStream(options) {
|
||||
Writable.call(this, options);
|
||||
}
|
||||
|
||||
util.inherits(StdOutStream, Writable);
|
||||
|
||||
StdOutStream.prototype._write = function (entry, encoding, callback) {
|
||||
entry = JSON.parse(entry.toString('utf8'));
|
||||
|
||||
var crayon = ansicolors[colors[entry.level]];
|
||||
var output = crayon(levels[entry.level].toUpperCase());
|
||||
output += ' ';
|
||||
output += ansicolors.brightBlack(entry.time);
|
||||
output += ' ';
|
||||
|
||||
if (entry.req && entry.res) {
|
||||
output += util.format('%s %s ', entry.req.method, entry.req.url);
|
||||
output += levelColor(entry.res.statusCode);
|
||||
output += ansicolors.brightBlack(util.format(' %dms - %d', entry.res.responseTime, entry.res.contentLength));
|
||||
} else if (entry.msg) {
|
||||
output += entry.msg;
|
||||
}
|
||||
process.stdout.write(output + "\n");
|
||||
if (entry.err) {
|
||||
process.stdout.write(ansicolors.brightRed(entry.err.stack) + "\n");
|
||||
}
|
||||
callback();
|
||||
};
|
||||
|
||||
module.exports = StdOutStream;
|
|
@ -1,63 +0,0 @@
|
|||
# Add the root of the project to the $LOAD_PATH, For some reason it seems
|
||||
# to be getting lost when we use warble to make the jar. This fixes it :D
|
||||
$LOAD_PATH.unshift(Kibana.global_settings[:root])
|
||||
|
||||
require "logger"
|
||||
require "json"
|
||||
require "lib/JSONLogger"
|
||||
require "lib/ColorLogger"
|
||||
require "routes/home"
|
||||
require "sinatra/json"
|
||||
require "routes/proxy"
|
||||
require "lib/FrameOptions"
|
||||
require "routes/plugins"
|
||||
# require "rack/deflater"
|
||||
|
||||
class Logger
|
||||
alias_method :write, :<<
|
||||
end
|
||||
|
||||
module Kibana
|
||||
class App < Sinatra::Base
|
||||
|
||||
helpers Sinatra::JSON
|
||||
|
||||
configure do
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.formatter = proc do |severity, datetime, progname, msg|
|
||||
data = {
|
||||
'@timestamp' => datetime.iso8601,
|
||||
:level => severity,
|
||||
:name => progname || "Kibana",
|
||||
:message => msg
|
||||
}
|
||||
data.to_json + "\n"
|
||||
end
|
||||
set :logger, logger
|
||||
end
|
||||
|
||||
configure :production do
|
||||
use JSONLogger, settings.logger unless Kibana.global_settings[:quiet]
|
||||
end
|
||||
|
||||
configure :quiet do
|
||||
set :logger, false
|
||||
end
|
||||
|
||||
configure :development do
|
||||
use ColorLogger, settings.logger unless Kibana.global_settings[:quiet]
|
||||
end
|
||||
|
||||
not_found do
|
||||
json :status => 404, :message => "Not Found"
|
||||
end
|
||||
|
||||
# use Rack::Deflater
|
||||
|
||||
# Routes go here
|
||||
use Routes::Home
|
||||
use Routes::Proxy
|
||||
use Routes::Plugins
|
||||
|
||||
end
|
||||
end
|
6
src/server/lib/appHeaders.js
Normal file
6
src/server/lib/appHeaders.js
Normal file
|
@ -0,0 +1,6 @@
|
|||
module.exports = function () {
|
||||
return function (req, res, next) {
|
||||
res.header('X-App-Name', 'kibana');
|
||||
next();
|
||||
};
|
||||
};
|
|
@ -1,7 +0,0 @@
|
|||
module Kibana
|
||||
module Helpers
|
||||
def doSomething()
|
||||
"Do it!"
|
||||
end
|
||||
end
|
||||
end
|
19
src/server/lib/listPlugins.js
Normal file
19
src/server/lib/listPlugins.js
Normal file
|
@ -0,0 +1,19 @@
|
|||
var _ = require('lodash');
|
||||
var glob = require('glob');
|
||||
var path = require('path');
|
||||
|
||||
var plugins = function (dir) {
|
||||
if (!dir) return [];
|
||||
var files = glob.sync(path.join(dir, '*', 'index.js')) || [];
|
||||
return files.map(function (file) {
|
||||
return file.replace(dir, 'plugins').replace(/\.js$/, '');
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = function (config) {
|
||||
var bundled_plugin_ids = config.kibana.bundled_plugin_ids || [];
|
||||
var bundled_plugins = plugins(config.bundled_plugins_folder);
|
||||
var external_plugins = plugins(config.external_plugins_folder);
|
||||
return bundled_plugin_ids.concat(bundled_plugins, external_plugins);
|
||||
};
|
||||
|
33
src/server/lib/logger.js
Normal file
33
src/server/lib/logger.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
var _ = require('lodash');
|
||||
var morgan = require('morgan');
|
||||
var env = process.env.NODE_ENV || 'development';
|
||||
var bunyan = require('bunyan');
|
||||
var StdOutStream = require('./StdOutStream');
|
||||
var JSONStream = require('./JSONStream');
|
||||
var config = require('../config');
|
||||
var stream = { stream: new JSONStream() };
|
||||
var streams = [];
|
||||
|
||||
if (env === 'development') {
|
||||
stream.stream = new StdOutStream();
|
||||
}
|
||||
|
||||
if (!config.quiet) {
|
||||
streams.push(stream);
|
||||
}
|
||||
|
||||
var logger = module.exports = bunyan.createLogger({
|
||||
name: 'Kibana',
|
||||
streams: streams,
|
||||
serializers: _.assign(bunyan.stdSerializers, {
|
||||
res: function (res) {
|
||||
if (!res) return res;
|
||||
return {
|
||||
statusCode: res.statusCode,
|
||||
responseTime: res.responseTime,
|
||||
contentLength: res.contentLength
|
||||
};
|
||||
}
|
||||
})
|
||||
});
|
||||
|
15
src/server/lib/requestLogger.js
Normal file
15
src/server/lib/requestLogger.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
var logger = require('./logger');
|
||||
module.exports = function (options) {
|
||||
return function (req, res, next) {
|
||||
var startTime = new Date();
|
||||
var end = res.end;
|
||||
res.end = function (chunk, encoding) {
|
||||
var contentLength = parseInt(res._header['content-length'], 10);
|
||||
res.responseTime = (new Date()).getTime() - startTime.getTime();
|
||||
res.contentLength = isNaN(contentLength) ? 0 : contentLength;
|
||||
end.call(res, chunk, encoding);
|
||||
logger.info({ req: req, res: res }, '%s %s %d - %dms', req.method, req.url, res.statusCode, res.responseTime);
|
||||
};
|
||||
next();
|
||||
};
|
||||
};
|
|
@ -1,68 +0,0 @@
|
|||
require "rubygems"
|
||||
require "bundler/setup"
|
||||
require "puma"
|
||||
require "colorize"
|
||||
require "json"
|
||||
require "#{Kibana.global_settings[:root]}/lib/app"
|
||||
|
||||
# Require the application
|
||||
module Kibana
|
||||
module Server
|
||||
|
||||
DEFAULTS = {
|
||||
:host => '0.0.0.0',
|
||||
:port => 5601,
|
||||
:threads => '0:16',
|
||||
:verbose => false
|
||||
}
|
||||
|
||||
def self.log(msg)
|
||||
return if Kibana.global_settings[:quiet]
|
||||
if ENV['RACK_ENV'] == 'production'
|
||||
data = {
|
||||
"@timestamp" => Time.now.iso8601,
|
||||
:level => 'INFO',
|
||||
:name => 'Kibana',
|
||||
:message => msg
|
||||
}
|
||||
puts data.to_json
|
||||
else
|
||||
message = (Time.now.strftime('%b %d, %Y @ %H:%M:%S.%L')).light_black << ' '
|
||||
message << msg.yellow
|
||||
puts message
|
||||
end
|
||||
end
|
||||
|
||||
def self.run(options = {})
|
||||
|
||||
options = DEFAULTS.merge(options)
|
||||
min, max = options[:threads].split(':', 2)
|
||||
|
||||
app = Kibana::App.new()
|
||||
server = Puma::Server.new(app)
|
||||
|
||||
# Configure server
|
||||
begin
|
||||
server.add_tcp_listener(options[:host], options[:port])
|
||||
rescue Errno::EADDRINUSE
|
||||
log("tcp://#{options[:host]}:#{options[:port]} is in use")
|
||||
exit(1)
|
||||
end
|
||||
|
||||
server.min_threads = min
|
||||
server.max_threads = max
|
||||
|
||||
begin
|
||||
log("Kibana server started on tcp://#{options[:host]}:#{options[:port]} in #{ENV['RACK_ENV']} mode.")
|
||||
server.run.join
|
||||
rescue Interrupt
|
||||
log("Kibana server gracefully stopping, waiting for requests to finish")
|
||||
server.stop(true)
|
||||
log("Kibana server stopped.")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
require "sinatra/base"
|
||||
require "sinatra/json"
|
||||
require "yaml"
|
||||
require "timeout"
|
||||
require "openssl"
|
||||
|
||||
module Kibana
|
||||
module Routes
|
||||
class Base < Sinatra::Base
|
||||
helpers Sinatra::JSON
|
||||
configure do
|
||||
config = Kibana.global_settings[:config].clone()
|
||||
config['elasticsearch'] = Kibana.global_settings[:elasticsearch]
|
||||
config['port'] = Kibana.global_settings[:port].to_i
|
||||
config['request_timeout'] = Kibana.global_settings[:request_timeout]
|
||||
config['shard_timeout'] = Kibana.global_settings[:shard_timeout]
|
||||
|
||||
|
||||
|
||||
set :root, Kibana.global_settings[:root]
|
||||
set :public_folder, Kibana.global_settings[:public_folder]
|
||||
set :bundled_plugins_folder, Kibana.global_settings[:bundled_plugins_folder]
|
||||
set :external_plugins_folder, Kibana.global_settings[:external_plugins_folder]
|
||||
set :httponly, true
|
||||
set :config, config
|
||||
set :bundled_plugin_ids, config['bundled_plugin_ids'] || []
|
||||
|
||||
set :show_exceptions, false
|
||||
set :raise_errors, false
|
||||
set :dump_errors, false
|
||||
end
|
||||
|
||||
error do
|
||||
status 500
|
||||
end
|
||||
|
||||
error OpenSSL::SSL::SSLError do
|
||||
status 502
|
||||
json :message => "SSL handshake with Elasticsearch failed"
|
||||
end
|
||||
|
||||
error Errno::ECONNREFUSED do
|
||||
status 502
|
||||
json :message => "Unable to connect to Elasticsearch"
|
||||
end
|
||||
|
||||
error Timeout::Error do
|
||||
status 504
|
||||
json :message => "Timeout while waiting for Elasticsearch"
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,46 +0,0 @@
|
|||
require "routes/base"
|
||||
|
||||
module Kibana
|
||||
module Routes
|
||||
class Home < Base
|
||||
|
||||
get "/" do
|
||||
File.read(File.join(settings.public_folder, 'index.html'))
|
||||
end
|
||||
|
||||
get "/config" do
|
||||
# Clone the settings object and change the elasticsearch attribute
|
||||
# to the proxy for elasticsearch
|
||||
data = settings.config.clone()
|
||||
plugins = external_plugins.concat(bundled_plugins)
|
||||
data['plugins'] = plugins
|
||||
|
||||
# Remove keys we do not want to expose
|
||||
["elasticsearch", "elasticsearch_username", "elasticsearch_password"].each { |key| data.delete(key) }
|
||||
|
||||
json data
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def external_plugins
|
||||
plugins_ids_in(settings.external_plugins_folder)
|
||||
end
|
||||
|
||||
def bundled_plugins
|
||||
plugins_ids_in(settings.bundled_plugins_folder).concat(settings.bundled_plugin_ids)
|
||||
end
|
||||
|
||||
def plugins_ids_in(dir)
|
||||
if dir
|
||||
indexes = Dir.glob(File.join(dir, '*', 'index.js'))
|
||||
else
|
||||
indexes = []
|
||||
end
|
||||
|
||||
indexes.map { |path| path.sub(dir, 'plugins').sub(/\.js$/, '') }
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
17
src/server/routes/index.js
Normal file
17
src/server/routes/index.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
var express = require('express');
|
||||
var router = express.Router();
|
||||
var config = require('../config');
|
||||
var _ = require('lodash');
|
||||
|
||||
router.get('/config', function (req, res, next) {
|
||||
var keys = [
|
||||
'kibana_index',
|
||||
'default_app_id',
|
||||
'shard_timeout'
|
||||
];
|
||||
var data = _.pick(config.kibana, keys);
|
||||
data.plugins = config.plugins;
|
||||
res.json(data);
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -1,15 +0,0 @@
|
|||
require "routes/base"
|
||||
require "lib/MultiStatic"
|
||||
|
||||
module Kibana
|
||||
module Routes
|
||||
class Plugins < Base
|
||||
use Kibana::MultiStatic,
|
||||
prefix: '/plugins/',
|
||||
paths: [
|
||||
settings.bundled_plugins_folder,
|
||||
settings.external_plugins_folder
|
||||
].compact
|
||||
end
|
||||
end
|
||||
end
|
52
src/server/routes/proxy.js
Normal file
52
src/server/routes/proxy.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
var logger = require('../lib/logger');
|
||||
var express = require('express');
|
||||
var router = module.exports = express.Router();
|
||||
var httpProxy = require('http-proxy');
|
||||
var config = require('../config');
|
||||
var url = require('url');
|
||||
var target = url.parse(config.elasticsearch);
|
||||
var proxy = new httpProxy.createProxyServer({});
|
||||
var buffer = require('buffer');
|
||||
|
||||
proxy.on('proxyReq', function (proxyReq, req, res, options) {
|
||||
// To support the elasticsearch_preserve_host feature we need to change the
|
||||
// host header to the target host header.
|
||||
if (config.kibana.elasticsearch_preserve_host) {
|
||||
proxyReq.setHeader('host', target.host);
|
||||
}
|
||||
|
||||
// Support for handling basic auth
|
||||
if (config.kibana.elasticsearch_username && config.kibana.elasticsearch_password) {
|
||||
var code = new buffer.Buffer(config.kibana.elasticsearch_username + ':' + config.kibana.elasticsearch_password);
|
||||
var auth = 'Basic ' + code.toString('base64');
|
||||
proxyReq.setHeader('authorization', auth);
|
||||
}
|
||||
});
|
||||
|
||||
// Error handling for the proxy
|
||||
proxy.on('error', function (err, req, res) {
|
||||
var code = 502;
|
||||
var body = { message: 'Bad Gateway' };
|
||||
|
||||
if (err.code === 'ECONNREFUSED') {
|
||||
body.message = 'Unable to connect to Elasticsearch';
|
||||
}
|
||||
|
||||
if (err.message === 'DEPTH_ZERO_SELF_SIGNED_CERT') {
|
||||
body.message = 'SSL handshake with Elasticsearch failed';
|
||||
}
|
||||
|
||||
res.writeHead(502, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(body));
|
||||
});
|
||||
|
||||
router.use(function (req, res, next) {
|
||||
var options = {
|
||||
target: config.elasticsearch,
|
||||
secure: config.kibana.verify_ssl,
|
||||
xfwd: true,
|
||||
timeout: (config.kibana.request_timeout) * 1000
|
||||
};
|
||||
proxy.web(req, res, options);
|
||||
});
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
require "routes/base"
|
||||
require "rack/reverse_proxy"
|
||||
|
||||
module Kibana
|
||||
module Routes
|
||||
class Proxy < Base
|
||||
# Rack middleware goes here
|
||||
config = settings.config
|
||||
use Rack::ReverseProxy do
|
||||
reverse_proxy_options timeout: config["request_timeout"]
|
||||
@global_options[:verify_ssl] = config["verify_ssl"].nil? ? true : config["verify_ssl"]
|
||||
reverse_proxy(/^\/elasticsearch(.*)$/, "#{config["elasticsearch"]}$1",
|
||||
username: config["elasticsearch_username"],
|
||||
password: config["elasticsearch_password"],
|
||||
preserve_host: config["elasticsearch_preserve_host"].nil? ? true : config["elasticsearch_preserve_host"])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
6
src/server/views/error.jade
Normal file
6
src/server/views/error.jade
Normal file
|
@ -0,0 +1,6 @@
|
|||
extends layout
|
||||
|
||||
block content
|
||||
h1= message
|
||||
h2= error.status
|
||||
pre #{error.stack}
|
7
src/server/views/layout.jade
Normal file
7
src/server/views/layout.jade
Normal file
|
@ -0,0 +1,7 @@
|
|||
doctype html
|
||||
html
|
||||
head
|
||||
title= title
|
||||
link(rel='stylesheet', href='/styles/main.css')
|
||||
body
|
||||
block content
|
|
@ -12,9 +12,6 @@ module.exports = function (grunt) {
|
|||
'requirejs',
|
||||
'clean:unneeded_source_in_build',
|
||||
'copy:server_src',
|
||||
'download_jruby',
|
||||
'install_gems',
|
||||
'warble',
|
||||
'replace:dist',
|
||||
'copy:dist',
|
||||
'compile_dist_readme',
|
||||
|
@ -22,6 +19,9 @@ module.exports = function (grunt) {
|
|||
'make_plugin_dir',
|
||||
'copy:plugin_readme',
|
||||
'describe_bundled_plugins',
|
||||
'npm_install_kibana',
|
||||
'clean:test_from_node_modules',
|
||||
'download_node_binaries',
|
||||
'copy:versioned_dist',
|
||||
'create_packages'
|
||||
]);
|
||||
|
|
|
@ -28,6 +28,7 @@ module.exports = function (grunt) {
|
|||
'<%= app %>/public/{css-builder,normalize}.js'
|
||||
]
|
||||
},
|
||||
dev_only_plugins: '<%= build %>/src/plugins/<%= devPlugins %>'
|
||||
dev_only_plugins: '<%= build %>/src/plugins/<%= devPlugins %>',
|
||||
test_from_node_modules: '<%= build %>/dist/kibana/src/node_modules/**/*test*'
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
module.exports = function (grunt) {
|
||||
var version = grunt.config.get('pkg.version');
|
||||
var platforms = grunt.config.get('platforms');
|
||||
var config = {
|
||||
|
||||
kibana_src: {
|
||||
|
@ -12,16 +13,22 @@ module.exports = function (grunt) {
|
|||
server_src: {
|
||||
files: [
|
||||
{
|
||||
src: '<%= server %>/Gemfile',
|
||||
dest: '<%= build %>/kibana/Gemfile'
|
||||
src: '<%= root %>/package.json',
|
||||
dest: '<%= build %>/kibana/package.json'
|
||||
},
|
||||
{
|
||||
src: '<%= server %>/Gemfile.lock',
|
||||
dest: '<%= build %>/kibana/Gemfile.lock'
|
||||
src: '<%= server %>/app.js',
|
||||
dest: '<%= build %>/kibana/app.js'
|
||||
},
|
||||
{
|
||||
src: '<%= server %>/bin/initialize',
|
||||
dest: '<%= build %>/kibana/bin/initialize'
|
||||
src: '<%= server %>/index.js',
|
||||
dest: '<%= build %>/kibana/index.js'
|
||||
},
|
||||
{
|
||||
expand: true,
|
||||
cwd: '<%= server %>/bin/',
|
||||
src: '**',
|
||||
dest: '<%= build %>/kibana/bin'
|
||||
},
|
||||
{
|
||||
expand: true,
|
||||
|
@ -40,6 +47,12 @@ module.exports = function (grunt) {
|
|||
cwd: '<%= server %>/routes/',
|
||||
src: '**',
|
||||
dest: '<%= build %>/kibana/routes'
|
||||
},
|
||||
{
|
||||
expand: true,
|
||||
cwd: '<%= server %>/views/',
|
||||
src: '**',
|
||||
dest: '<%= build %>/kibana/views'
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -49,9 +62,9 @@ module.exports = function (grunt) {
|
|||
files: [
|
||||
{
|
||||
expand: true,
|
||||
cwd: '<%= build %>/kibana/',
|
||||
src: '*.jar',
|
||||
dest: '<%= build %>/dist/kibana/lib/'
|
||||
cwd: '<%= build %>/kibana',
|
||||
src: '**',
|
||||
dest: '<%= build %>/dist/kibana/src'
|
||||
},
|
||||
{
|
||||
expand: true,
|
||||
|
@ -64,14 +77,7 @@ module.exports = function (grunt) {
|
|||
|
||||
versioned_dist: {
|
||||
options: { mode: true },
|
||||
files: [
|
||||
{
|
||||
expand: true,
|
||||
cwd: '<%= build %>/dist/kibana',
|
||||
src: '**',
|
||||
dest: '<%= build %>/dist/kibana-' + version
|
||||
}
|
||||
]
|
||||
files: []
|
||||
},
|
||||
|
||||
plugin_readme: {
|
||||
|
@ -85,5 +91,20 @@ module.exports = function (grunt) {
|
|||
|
||||
};
|
||||
|
||||
platforms.forEach(function (platform) {
|
||||
config.versioned_dist.files.push({
|
||||
expand: true,
|
||||
cwd: '<%= build %>/dist/kibana',
|
||||
src: '**',
|
||||
dest: '<%= build %>/dist/kibana-' + version + '-' + platform
|
||||
});
|
||||
config.versioned_dist.files.push({
|
||||
expand: true,
|
||||
cwd: '<%= root %>/.node_binaries/' + platform,
|
||||
src: '**',
|
||||
dest: '<%= build %>/dist/kibana-' + version + '-' + platform + '/node'
|
||||
});
|
||||
});
|
||||
|
||||
return config;
|
||||
};
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
var config = require('../utils/server-config');
|
||||
var unitTestUrl = require('util').format('http://localhost:%d/test/unit/', config.kibana.port);
|
||||
|
||||
module.exports = {
|
||||
options: {
|
||||
log: true,
|
||||
|
@ -6,9 +9,7 @@ module.exports = {
|
|||
},
|
||||
unit: {
|
||||
options: {
|
||||
urls: [
|
||||
'http://localhost:8000/test/unit/'
|
||||
]
|
||||
urls: [ unitTestUrl ]
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
module.exports = function (grunt) {
|
||||
var jrubyPath = grunt.config.get('jrubyPath');
|
||||
var jruby = jrubyPath + '/bin/jruby';
|
||||
var cmd = grunt.config.get('src') + '/server/bin/initialize';
|
||||
var os = require('os');
|
||||
var arch = os.arch();
|
||||
var platform = os.platform();
|
||||
|
||||
// config:
|
||||
// wait: should task wait until the script exits before finishing
|
||||
|
@ -19,16 +19,6 @@ module.exports = function (grunt) {
|
|||
var args = ['-H', '127.0.0.1'];
|
||||
|
||||
var config = {
|
||||
mri_server: {
|
||||
options: options,
|
||||
cmd: cmd,
|
||||
args: args
|
||||
},
|
||||
jruby_server: {
|
||||
options: options,
|
||||
cmd: jruby,
|
||||
args: [cmd].concat(args)
|
||||
},
|
||||
built_kibana: {
|
||||
options: {
|
||||
wait: false,
|
||||
|
@ -36,7 +26,7 @@ module.exports = function (grunt) {
|
|||
quiet: true,
|
||||
failOnError: false
|
||||
},
|
||||
cmd: './target/<%= pkg.name + "-" + pkg.version %>/bin/kibana',
|
||||
cmd: './target/<%= pkg.name + "-" + pkg.version %>-' + platform + '-' + arch + '/bin/kibana',
|
||||
args: args
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
var config = require('../utils/server-config');
|
||||
var unitTestUrl = require('util').format('http://localhost:%d/test/unit/?saucelabs=true', config.kibana.port);
|
||||
var buildId = 'test build';
|
||||
if (process.env.TRAVIS_BUILD_ID) {
|
||||
buildId = 'travis build #' + process.env.TRAVIS_BUILD_ID;
|
||||
|
@ -8,7 +10,7 @@ module.exports = {
|
|||
options: {
|
||||
username: 'kibana',
|
||||
key: process.env.SAUCE_ACCESS_KEY,
|
||||
urls: ['http://localhost:8000/test/unit/?saucelabs=true'],
|
||||
urls: [ unitTestUrl ],
|
||||
testname: 'Kibana Browser Tests',
|
||||
build: buildId,
|
||||
concurrency: 10,
|
||||
|
|
|
@ -38,22 +38,5 @@ module.exports = function (grunt) {
|
|||
delete config.test;
|
||||
}
|
||||
|
||||
var ruby_server = grunt.config.get('ruby_server');
|
||||
if (ruby_server) {
|
||||
config.kibana_server = {
|
||||
files: [
|
||||
'src/server/**/*.rb',
|
||||
'src/server/**/*.yml'
|
||||
],
|
||||
tasks: [
|
||||
'stop:' + ruby_server,
|
||||
'run:' + ruby_server
|
||||
],
|
||||
options: {
|
||||
spawn: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return config;
|
||||
};
|
||||
|
|
|
@ -10,22 +10,30 @@ module.exports = function (grunt) {
|
|||
var target = grunt.config.get('target');
|
||||
var packageName = grunt.config.get('pkg.name');
|
||||
var version = grunt.config.get('pkg.version');
|
||||
var archiveName = join(target, packageName + '-' + version);
|
||||
var distPath = join(grunt.config.get('build'), 'dist');
|
||||
var platforms = grunt.config.get('platforms');
|
||||
|
||||
var tgzCmd = 'tar -zcvf ' + archiveName + '.tar.gz kibana-' + version;
|
||||
var zipCmd = 'zip -r ' + archiveName + '.zip kibana-' + version;
|
||||
var createPackage = function (platform) {
|
||||
var options = { cwd: distPath };
|
||||
var name = packageName + '-' + version + '-' + platform;
|
||||
var archiveName = join(target, name);
|
||||
var tgzCmd = 'tar -zcf ' + archiveName + '.tar.gz ' + name;
|
||||
var zipCmd = 'zip -rq ' + archiveName + '.zip ' + name;
|
||||
|
||||
var options = { cwd: distPath };
|
||||
if (platform === 'windows') {
|
||||
zipCmd = 'zip -rq -ll ' + archiveName + '.zip ' + name;
|
||||
}
|
||||
|
||||
mkdirp.mkdirpAsync(target)
|
||||
.then(function (arg) {
|
||||
return exec(tgzCmd, options);
|
||||
})
|
||||
.then(function (arg) {
|
||||
return exec(zipCmd, options);
|
||||
})
|
||||
.finally(done);
|
||||
return mkdirp.mkdirpAsync(target)
|
||||
.then(function (arg) {
|
||||
return exec(tgzCmd, options);
|
||||
})
|
||||
.then(function (arg) {
|
||||
return exec(zipCmd, options);
|
||||
});
|
||||
};
|
||||
|
||||
Promise.map(platforms, createPackage).finally(done);
|
||||
|
||||
});
|
||||
};
|
||||
|
|
|
@ -6,8 +6,7 @@ module.exports = function (grunt) {
|
|||
'less',
|
||||
'jade',
|
||||
'esvm:dev',
|
||||
'ruby_server',
|
||||
'maybe_start_server',
|
||||
'maybe_start_kibana',
|
||||
'watch'
|
||||
];
|
||||
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
var zlib = require('zlib');
|
||||
var tar = require('tar');
|
||||
var request = require('request');
|
||||
var mkdirp = require('mkdirp');
|
||||
var ProgressBar = require('progress');
|
||||
var fs = require('fs');
|
||||
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('download_jruby', 'Downloads and installs jruby', function () {
|
||||
var done = this.async();
|
||||
var jrubyPath = grunt.config.get('jrubyPath');
|
||||
var jrubyVersion = grunt.config.get('jrubyVersion');
|
||||
var url = 'http://jruby.org.s3.amazonaws.com/downloads/' + jrubyVersion + '/jruby-bin-' + jrubyVersion + '.tar.gz';
|
||||
|
||||
fs.stat(jrubyPath, function (err, stat) {
|
||||
if (err) {
|
||||
mkdirp(jrubyPath, function (err) {
|
||||
if (err) return done(err);
|
||||
var unzip = zlib.createGunzip();
|
||||
var out = tar.Extract({ path: jrubyPath, strip: 1 });
|
||||
out.on('close', done).on('error', done);
|
||||
var req = request.get(url);
|
||||
var bar;
|
||||
if (!process.env.JENKINS_HOME) {
|
||||
req.on('response', function (resp) {
|
||||
var total = parseInt(resp.headers['content-length'], 10);
|
||||
bar = new ProgressBar('[:bar] :percent :etas', {
|
||||
complete: '=',
|
||||
incomplete: ' ',
|
||||
width: 80,
|
||||
clear: true,
|
||||
total: total
|
||||
});
|
||||
});
|
||||
req.on('data', function (buffer) {
|
||||
bar.tick(buffer.length);
|
||||
});
|
||||
}
|
||||
req.pipe(unzip).pipe(out);
|
||||
});
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
65
tasks/download_node_binaries.js
Normal file
65
tasks/download_node_binaries.js
Normal file
|
@ -0,0 +1,65 @@
|
|||
var _ = require('lodash');
|
||||
var zlib = require('zlib');
|
||||
var tar = require('tar');
|
||||
var request = require('request');
|
||||
var mkdirp = require('mkdirp');
|
||||
var fs = require('fs');
|
||||
var join = require('path').join;
|
||||
var filesPatern = _.template('node-v<%- version %>-<%- platform %>.tar.gz');
|
||||
var urlPattern = _.template('http://nodejs.org/dist/v<%- version %>/<%- file %>');
|
||||
var Promise = require('bluebird');
|
||||
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('download_node_binaries', 'Download the node.js binaries', function () {
|
||||
var platforms = _.without(grunt.config.get('platforms'), 'windows');
|
||||
var rootPath = grunt.config.get('root');
|
||||
var version = grunt.config.get('nodeVersion');
|
||||
|
||||
var handle404 = function (response) {
|
||||
if (response.statusCode !== 200) {
|
||||
throw new Error(response.request.href + ' failed with a ' + response.statusCode);
|
||||
}
|
||||
};
|
||||
|
||||
var downloadWindows = function (cb) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
var dest = join(rootPath, '.node_binaries', 'windows');
|
||||
fs.stat(dest, function (err) {
|
||||
if (!err) return resolve(); // skip downloading if we already have them
|
||||
var url = urlPattern({ version: version, file: 'node.exe'});
|
||||
mkdirp(dest, function (err) {
|
||||
if (err) return reject(err);
|
||||
var out = fs.createWriteStream(join(dest, 'node.exe'));
|
||||
out.on('close', resolve).on('error', reject);
|
||||
var req = request.get(url);
|
||||
req.on('response', handle404);
|
||||
req.pipe(out);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
var download = function (platform) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
var dest = join(rootPath, '.node_binaries', platform);
|
||||
fs.stat(dest, function (err) {
|
||||
if (!err) return resolve(); // skip downloading if we already have them
|
||||
var file = filesPatern({ version: version, platform: platform });
|
||||
var url = urlPattern({ version: version, file: file });
|
||||
mkdirp(dest, function (err) {
|
||||
if (err) return reject(err);
|
||||
var unzip = zlib.createGunzip();
|
||||
var out = tar.Extract({ path: dest, strip: 1 });
|
||||
out.on('close', resolve).on('error', reject);
|
||||
var req = request.get(url);
|
||||
req.on('response', handle404);
|
||||
req.pipe(unzip).pipe(out);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return Promise.map(platforms, download).then(downloadWindows).nodeify(this.async());
|
||||
});
|
||||
};
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
var child_process = require('child_process');
|
||||
var join = require('path').join;
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('install_gems', 'Install Ruby Gems', function () {
|
||||
var done = this.async();
|
||||
var gemfile = join(grunt.config.get('root'), 'Gemfile');
|
||||
var jrubyPath = grunt.config.get('jrubyPath');
|
||||
var jruby = jrubyPath + '/bin/jruby -S';
|
||||
var command = jruby + ' gem install bundler && ' + jruby + ' bundle install --gemfile ' + gemfile;
|
||||
child_process.exec(command, function (err, stdout, stderr) {
|
||||
if (err) {
|
||||
grunt.log.error(stderr);
|
||||
return done(err);
|
||||
}
|
||||
grunt.log.writeln(stdout);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
14
tasks/kibana_server.js
Normal file
14
tasks/kibana_server.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
module.exports = function (grunt) {
|
||||
grunt.registerTask('kibana_server', function (keepalive) {
|
||||
var done = this.async();
|
||||
var config = require('../src/server/config');
|
||||
config.quiet = true;
|
||||
var server = require('../src/server');
|
||||
|
||||
server.start(function () {
|
||||
grunt.log.ok('Server started on port', config.kibana.port);
|
||||
if (keepalive !== 'keepalive') done();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
module.exports = function (grunt) {
|
||||
grunt.registerTask('maybe_start_server', function () {
|
||||
var http = require('http');
|
||||
|
||||
var req = http.request({
|
||||
method: 'HEAD',
|
||||
path: '/',
|
||||
host: 'localhost',
|
||||
port: 8000
|
||||
});
|
||||
|
||||
function onResponse(res) {
|
||||
if (res.headers.pong === 'Kibana 4 Dev Server') {
|
||||
grunt.log.writeln('server already started');
|
||||
} else {
|
||||
grunt.log.error('another server is already running at localhost:8000!');
|
||||
}
|
||||
done(res);
|
||||
}
|
||||
|
||||
function onError() {
|
||||
grunt.task.run(['server']);
|
||||
done();
|
||||
}
|
||||
|
||||
var done = (function (cb) {
|
||||
return function (res) {
|
||||
req.removeListener('error', onError);
|
||||
req.removeListener('response', onResponse);
|
||||
if (res) res.socket.destroy();
|
||||
cb();
|
||||
};
|
||||
})(this.async());
|
||||
|
||||
req.on('error', onError);
|
||||
req.on('response', onResponse);
|
||||
req.end();
|
||||
});
|
||||
};
|
61
tasks/maybe_start_kibana.js
Normal file
61
tasks/maybe_start_kibana.js
Normal file
|
@ -0,0 +1,61 @@
|
|||
module.exports = function (grunt) {
|
||||
var config = require('./utils/server-config');
|
||||
|
||||
var maybeStartServer = function (options) {
|
||||
return function () {
|
||||
var http = require('http');
|
||||
var opts = {
|
||||
method: 'HEAD',
|
||||
path: '/',
|
||||
host: 'localhost',
|
||||
port: options.port
|
||||
};
|
||||
|
||||
grunt.log.debug('checking for server', JSON.stringify(opts));
|
||||
|
||||
var req = http.request(opts);
|
||||
|
||||
function onResponse(res) {
|
||||
grunt.log.debug('Server responded with', res.statusCode);
|
||||
var app = res.headers['x-app-name'];
|
||||
|
||||
if (res.statusCode === 200 && app && app === 'kibana') {
|
||||
grunt.log.ok('Kibana server already started on port', options.port);
|
||||
} else {
|
||||
grunt.log.error('Another server is already running on port', options.port);
|
||||
process.exit(1);
|
||||
}
|
||||
done(res);
|
||||
}
|
||||
|
||||
function onError(err) {
|
||||
if (err.code !== 'ECONNREFUSED') {
|
||||
grunt.log.error('Kibana server check failed', err);
|
||||
}
|
||||
|
||||
grunt.config.set(options.name, true);
|
||||
grunt.task.run(options.tasks);
|
||||
done();
|
||||
}
|
||||
|
||||
var done = (function (cb) {
|
||||
return function (res) {
|
||||
req.removeListener('error', onError);
|
||||
req.removeListener('response', onResponse);
|
||||
if (res) res.socket.destroy();
|
||||
cb();
|
||||
};
|
||||
})(this.async());
|
||||
|
||||
req.on('error', onError);
|
||||
req.on('response', onResponse);
|
||||
req.end();
|
||||
};
|
||||
};
|
||||
|
||||
grunt.registerTask('maybe_start_kibana', maybeStartServer({
|
||||
name: 'kibana-server',
|
||||
port: config.kibana.port,
|
||||
tasks: ['kibana_server']
|
||||
}));
|
||||
};
|
|
@ -1,13 +1,11 @@
|
|||
var child_process = require('child_process');
|
||||
var join = require('path').join;
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('warble', 'Creates an executable jar.', function () {
|
||||
grunt.registerTask('npm_install_kibana', 'NPM isntall kibana server into dist', function () {
|
||||
var done = this.async();
|
||||
var jrubyPath = grunt.config.get('jrubyPath');
|
||||
var command = jrubyPath + '/bin/jruby -S warble';
|
||||
var options = {
|
||||
cwd: join(grunt.config.get('build'), 'kibana')
|
||||
};
|
||||
var cwd = join(grunt.config.get('build'), 'dist', 'kibana', 'src');
|
||||
var command = 'npm install --production';
|
||||
var options = { cwd: cwd };
|
||||
child_process.exec(command, options, function (err, stdout, stderr) {
|
||||
if (err) {
|
||||
grunt.log.error(stderr);
|
||||
|
@ -18,3 +16,5 @@ module.exports = function (grunt) {
|
|||
});
|
||||
});
|
||||
};
|
||||
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
var request = require('request');
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('ruby_server', function () {
|
||||
var done = this.async();
|
||||
|
||||
request.get('http://localhost:5601/config', function (err, resp, body) {
|
||||
// err is a failed response, no server is running
|
||||
if (err) {
|
||||
// run mri_server by default
|
||||
var tasks = ['run:mri_server'];
|
||||
grunt.config.set('ruby_server', 'mri_server');
|
||||
|
||||
// if jruby flag is set, use jruby
|
||||
if (grunt.option('use-jruby')) {
|
||||
tasks = [
|
||||
'download_jruby',
|
||||
'install_gems',
|
||||
'run:jruby_server',
|
||||
'wait_for_jruby'
|
||||
];
|
||||
grunt.config.set('ruby_server', 'jruby_server');
|
||||
}
|
||||
|
||||
grunt.task.run(tasks);
|
||||
|
||||
// response means server is already running
|
||||
} else {
|
||||
grunt.log.error('Another ruby server is running on localhost:5601.');
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
};
|
|
@ -1,3 +1,6 @@
|
|||
var os = require('os');
|
||||
var config = require('./utils/server-config');
|
||||
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('run_build', [
|
||||
'build',
|
||||
|
@ -7,12 +10,14 @@ module.exports = function (grunt) {
|
|||
'wait:built_kibana'
|
||||
]);
|
||||
|
||||
var arch = os.arch();
|
||||
var platform = os.platform();
|
||||
var join = require('path').join;
|
||||
var extract = require('./utils/spawn')(
|
||||
'tar',
|
||||
[
|
||||
'-xzf',
|
||||
grunt.config.process('<%= pkg.name %>-<%= pkg.version %>.tar.gz')
|
||||
grunt.config.process('<%= pkg.name %>-<%= pkg.version %>-' + platform + '-' + arch + '.tar.gz')
|
||||
],
|
||||
join(__dirname, '../target')
|
||||
);
|
||||
|
@ -22,6 +27,6 @@ module.exports = function (grunt) {
|
|||
});
|
||||
|
||||
grunt.registerTask('_open_built_kibana', function () {
|
||||
require('opn')('http://localhost:5601');
|
||||
require('opn')('http://localhost:' + config.kibana.port);
|
||||
});
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
module.exports = function (grunt) {
|
||||
grunt.registerTask('server', function (keepalive) {
|
||||
var done = this.async();
|
||||
var DevServer = require('../test/utils/dev_server');
|
||||
var server = new DevServer();
|
||||
|
||||
server.listen(8000).then(function () {
|
||||
console.log('visit http://localhost:8000');
|
||||
|
||||
if (keepalive !== 'keepalive') {
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
|
@ -24,8 +24,7 @@ module.exports = function (grunt) {
|
|||
|
||||
var tasks = [
|
||||
'jshint',
|
||||
'ruby_server',
|
||||
'maybe_start_server',
|
||||
'maybe_start_kibana',
|
||||
'jade',
|
||||
'less',
|
||||
getTestTask()
|
||||
|
@ -35,8 +34,7 @@ module.exports = function (grunt) {
|
|||
|
||||
grunt.registerTask('quick-test', function () {
|
||||
var tasks = [
|
||||
'ruby_server',
|
||||
'maybe_start_server',
|
||||
'maybe_start_kibana',
|
||||
getTestTask()
|
||||
];
|
||||
grunt.task.run(tasks);
|
||||
|
@ -44,14 +42,12 @@ module.exports = function (grunt) {
|
|||
|
||||
grunt.registerTask('coverage', [
|
||||
'blanket',
|
||||
'ruby_server',
|
||||
'maybe_start_server',
|
||||
'maybe_start_kibana',
|
||||
'mocha:coverage'
|
||||
]);
|
||||
|
||||
grunt.registerTask('test:watch', [
|
||||
'ruby_server',
|
||||
'maybe_start_server',
|
||||
'maybe_start_kibana',
|
||||
'watch:test'
|
||||
]);
|
||||
};
|
||||
|
|
1
tasks/utils/server-config.js
Normal file
1
tasks/utils/server-config.js
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('../../src/server/config');
|
|
@ -1,18 +0,0 @@
|
|||
var request = require('request');
|
||||
module.exports = function (grunt) {
|
||||
|
||||
grunt.registerTask('wait_for_jruby', 'Is it started yet?', function () {
|
||||
var done = this.async();
|
||||
function checkJRuby() {
|
||||
request('http://127.0.0.1:5601', function (err, resp) {
|
||||
if (err) {
|
||||
setTimeout(checkJRuby, 1000);
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
});
|
||||
}
|
||||
checkJRuby();
|
||||
|
||||
});
|
||||
};
|
|
@ -1,145 +0,0 @@
|
|||
/* jshint node:true */
|
||||
|
||||
var connect = require('connect');
|
||||
var http = require('http');
|
||||
var Promise = require('bluebird');
|
||||
|
||||
var instrumentationMiddleware = require('./_instrumentation');
|
||||
var amdRapperMiddleware = require('./_amd_rapper');
|
||||
var proxy = require('http-proxy').createProxyServer({});
|
||||
|
||||
var glob = require('glob');
|
||||
var path = require('path');
|
||||
var join = path.join;
|
||||
var rel = join.bind(null, __dirname);
|
||||
var ROOT = rel('../../../');
|
||||
var SRC = join(ROOT, 'src');
|
||||
var APP = join(SRC, 'kibana');
|
||||
var TEST = join(ROOT, 'test');
|
||||
var PLUGINS = join(SRC, 'plugins');
|
||||
|
||||
module.exports = function DevServer(opts) {
|
||||
opts = opts || {};
|
||||
|
||||
var server = this;
|
||||
var app = connect();
|
||||
var httpServer = http.createServer(app);
|
||||
|
||||
// Kibana Backend Proxy
|
||||
app.use(function (req, res, next) {
|
||||
// Proxy config and es requests to the Kibana Backend
|
||||
if (/^\/(config|elasticsearch\/)/.test(req.url)) {
|
||||
return proxy.web(req, res, { target: 'http://localhost:5601' });
|
||||
}
|
||||
|
||||
next();
|
||||
});
|
||||
|
||||
app.use(instrumentationMiddleware({
|
||||
root: SRC,
|
||||
displayRoot: SRC,
|
||||
filter: function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|next_tick|rison)\.js$/);
|
||||
}
|
||||
}));
|
||||
|
||||
app.use(instrumentationMiddleware({
|
||||
root: APP,
|
||||
displayRoot: SRC,
|
||||
filter: function (filename) {
|
||||
return filename.match(/.*\/src\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/bower_components\/.*\.js$/)
|
||||
&& !filename.match(/.*\/src\/kibana\/utils\/(event_emitter|next_tick|rison)\.js$/);
|
||||
}
|
||||
}));
|
||||
|
||||
app.use(amdRapperMiddleware({
|
||||
root: ROOT
|
||||
}));
|
||||
|
||||
app.use(connect.static(ROOT));
|
||||
app.use(connect.static(APP));
|
||||
app.use('/test', connect.static(TEST));
|
||||
app.use('/plugins', connect.static(PLUGINS));
|
||||
|
||||
app.use('/specs', function (req, res) {
|
||||
var unit = join(ROOT, '/test/unit/');
|
||||
glob(join(unit, 'specs/**/*.js'), function (er, files) {
|
||||
var moduleIds = files
|
||||
.filter(function (filename) {
|
||||
return path.basename(filename).charAt(0) !== '_';
|
||||
})
|
||||
.map(function (filename) {
|
||||
return path.relative(unit, filename).replace(/\.js$/, '');
|
||||
});
|
||||
|
||||
res.end(JSON.stringify(moduleIds));
|
||||
});
|
||||
});
|
||||
|
||||
// respond to the "maybe_start_server" pings
|
||||
app.use(function (req, res, next) {
|
||||
if (req.method !== 'HEAD' || req.url !== '/') return next();
|
||||
res.statusCode === 200;
|
||||
res.setHeader('Pong', 'Kibana 4 Dev Server');
|
||||
res.end();
|
||||
});
|
||||
|
||||
app.use(function (req, res, next) {
|
||||
if (req.url !== '/') return next();
|
||||
res.statusCode = 303;
|
||||
res.setHeader('Location', '/src/');
|
||||
res.end();
|
||||
});
|
||||
|
||||
// prevent chrome's stupid "this page is in spanish" on the directories page
|
||||
app.use(function (req, res, next) {
|
||||
res.setHeader('Content-Language', 'en');
|
||||
next();
|
||||
});
|
||||
|
||||
// allow browsing directories
|
||||
app.use(connect.directory(ROOT));
|
||||
|
||||
server.listenOnFirstOpenPort = function (ports) {
|
||||
var options = ports.slice(0);
|
||||
|
||||
// wrap this logic in an IIFE so that we can call it again later
|
||||
return (function attempt() {
|
||||
var port = options.shift();
|
||||
if (!port) return Promise.reject(new Error('None of the supplied options succeeded'));
|
||||
|
||||
return server.listen(port)
|
||||
// filter out EADDRINUSE errors and call attempt again
|
||||
.catch(function (err) {
|
||||
if (err.code === 'EADDRINUSE') return attempt();
|
||||
throw err;
|
||||
});
|
||||
})();
|
||||
};
|
||||
|
||||
server.listen = function (port) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
var done = function (err) {
|
||||
httpServer.removeListener('error', done);
|
||||
httpServer.removeListener('listening', done);
|
||||
|
||||
// pass the error along
|
||||
if (err) return reject(err);
|
||||
|
||||
resolve(server.port = httpServer.address().port);
|
||||
};
|
||||
|
||||
// call done with an error
|
||||
httpServer.on('error', done, true);
|
||||
// call done without any args
|
||||
httpServer.on('listening', done, true);
|
||||
|
||||
httpServer.listen(port);
|
||||
});
|
||||
};
|
||||
|
||||
server.close = httpServer.close.bind(httpServer);
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue