Closes #2720 - Support CA Certificates with the proxy

- Closes #2720
- Added `ca` option to kibana.yml
- Rewrote the Proxy to use Request and honor the custom CA
- Added some packages to support the proxy
- Removed http-proxy
This commit is contained in:
Chris Cowan 2015-01-23 19:12:19 -07:00
parent 2bed1807d1
commit afa294f75c
3 changed files with 93 additions and 39 deletions

View file

@ -43,13 +43,15 @@
"debug": "~2.1.1",
"express": "~4.10.6",
"glob": "^4.3.2",
"http-proxy": "^1.8.1",
"jade": "~1.8.2",
"js-yaml": "^3.2.5",
"less-middleware": "1.0.x",
"lodash": "^2.4.1",
"morgan": "~1.5.1",
"serve-favicon": "~2.2.0"
"request": "^2.40.0",
"serve-favicon": "~2.2.0",
"semver": "^4.2.0",
"ssl-root-cas": "^1.1.7"
},
"devDependencies": {
"bluebird": "~2.0.7",
@ -86,7 +88,6 @@
"opn": "~1.0.0",
"path-browserify": "0.0.0",
"progress": "^1.1.8",
"request": "^2.40.0",
"requirejs": "~2.1.14",
"rjs-build-analysis": "0.0.3",
"simple-git": "^0.11.0",

View file

@ -10,6 +10,8 @@ elasticsearch_url: "http://localhost:9200"
# If your Elasticsearch is protected with basic auth:
# elasticsearch_username: user
# elasticsearch_password: pass
# elasticsearch_username: test
# elasticsearch_password: test
# preserve_elasticsearch_host true will send the hostname specified in `elasticsearch`. If you set it to false,
# then the host you use to connect to *this* Kibana instance will be sent.
@ -34,3 +36,7 @@ shard_timeout: 0
# certificate.
verify_ssl: true
# If you need to provide a CA certificate for your Elasticsarech instance, put
# the path of the pem file here.
# ca: /path/to/your/CA.pem

View file

@ -1,52 +1,99 @@
var logger = require('../lib/logger');
var express = require('express');
var router = module.exports = express.Router();
var httpProxy = require('http-proxy');
var config = require('../config');
var request = require('request');
var buffer = require('buffer');
var querystring = require('querystring');
var express = require('express');
var _ = require('lodash');
var fs = require('fs');
var url = require('url');
var target = url.parse(config.elasticsearch);
var proxy = new httpProxy.createProxyServer({});
var buffer = require('buffer');
var join = require('path').join;
proxy.on('proxyReq', function (proxyReq, req, res, options) {
// To support the elasticsearch_preserve_host feature we need to change the
// host header to the target host header.
if (config.kibana.elasticsearch_preserve_host) {
proxyReq.setHeader('host', target.host);
// If the target is backed by an SSL and a CA is provided via the config
// then we need to inject the CA
var hasCustomCA = false;
if (/^https/.test(target.protocol) && config.kibana.ca) {
var sslRootCAs = require('ssl-root-cas/latest');
sslRootCAs.inject();
var ca = fs.readFileSync(config.kibana.ca, 'utf8');
var https = require('https');
https.globalAgent.options.ca.push(ca);
hasCustomCA = true;
}
// Create the router
var router = module.exports = express.Router();
// We need to capture the raw body before moving on
router.use(function (req, res, next) {
req.rawBody = '';
req.setEncoding('utf8');
req.on('data', function (chunk) {
req.rawBody += chunk;
});
req.on('end', next);
});
// Create the proxy middleware
router.use(function (req, res, next) {
var uri = _.defaults({}, target);
var options = {
url: uri.protocol + '//' + uri.host + req.path,
method: req.method,
headers: { },
strictSSL: config.kibana.verify_ssl,
timeout: config.kibana.request_timeout
};
// If the server has a custom CA we need to add it to the agent options
if (hasCustomCA) {
options.agentOptions = { ca: https.globalAgent.options.ca };
}
// Only send the body if it's a PATCH, PUT, or POST
if (_.contains(['PATCH', 'PUT', 'POST'], options.method) && req.rawBody) {
options.body = req.rawBody;
}
// If there is a query string we need to stringify it and send it with
// the request
if (Object.keys(req.query).length !== 0) {
options.url += '?' + querystring.stringify(req.query);
}
// Support for handling basic auth
if (config.kibana.elasticsearch_username && config.kibana.elasticsearch_password) {
var code = new buffer.Buffer(config.kibana.elasticsearch_username + ':' + config.kibana.elasticsearch_password);
var auth = 'Basic ' + code.toString('base64');
proxyReq.setHeader('authorization', auth);
}
});
// Error handling for the proxy
proxy.on('error', function (err, req, res) {
var code = 502;
var body = { message: 'Bad Gateway' };
if (err.code === 'ECONNREFUSED') {
body.message = 'Unable to connect to Elasticsearch';
options.headers.authorization = auth;
}
if (err.message === 'DEPTH_ZERO_SELF_SIGNED_CERT') {
body.message = 'SSL handshake with Elasticsearch failed';
// To support the elasticsearch_preserve_host feature we need to change the
// host header to the target host header. I don't quite understand the value
// of this... but it's a feature we had before so I guess we are keeping it.
if (config.kibana.elasticsearch_preserve_host) {
options.headers.host = target.host;
}
res.writeHead(502, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(body));
});
router.use(function (req, res, next) {
var options = {
target: config.elasticsearch,
secure: config.kibana.verify_ssl,
xfwd: true,
timeout: (config.kibana.request_timeout)
};
proxy.web(req, res, options);
// Create the request and pipe the response
var esRequest = request(options);
esRequest.on('error', function (err) {
var code = 502;
var body = { message: 'Bad Gateway' };
if (err.code === 'ECONNREFUSED') {
body.message = 'Unable to connect to Elasticsearch';
}
if (err.message === 'DEPTH_ZERO_SELF_SIGNED_CERT') {
body.message = 'SSL handshake with Elasticsearch failed';
}
body.err = err.message;
res.json(body);
});
esRequest.pipe(res);
});