mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
fix style issues in the server
This commit is contained in:
parent
dd9ff1852f
commit
38a935fa10
10 changed files with 32 additions and 30 deletions
|
@ -7,4 +7,7 @@ indent_size = 2
|
|||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
insert_final_newline = false
|
||||
|
|
|
@ -34,7 +34,7 @@ module.exports = function (grunt) {
|
|||
'Gruntfile.js',
|
||||
'<%= root %>/tasks/**/*.js',
|
||||
'<%= src %>/kibana/*.js',
|
||||
'<%= src %>/server/*.js',
|
||||
'<%= src %>/server/**/*.js',
|
||||
'<%= src %>/kibana/{components,directives,factories,filters,plugins,registry,services,utils}/**/*.js',
|
||||
'<%= unitTestDir %>/**/*.js',
|
||||
'!<%= unitTestDir %>/specs/vislib/fixture/**/*'
|
||||
|
|
|
@ -32,7 +32,6 @@ if (app.get('env') === 'development') {
|
|||
// TODO: WE might want to move the middleware to each of the individual routes
|
||||
// so we don't have weird conflicts in the future.
|
||||
app.use('/elasticsearch', proxy);
|
||||
app.use('/enforcer', require('./lib/enforce'));
|
||||
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({ extended: false }));
|
||||
|
|
|
@ -52,6 +52,6 @@ verify_ssl: true
|
|||
# Set the path to where you would like the process id file to be created.
|
||||
# pid_file: /var/run/kibana.pid
|
||||
|
||||
# If you would like to send the log output to a file you can set the path below.
|
||||
# If you would like to send the log output to a file you can set the path below.
|
||||
# This will also turn off the STDOUT log output.
|
||||
# log_file: ./kibana.log
|
||||
# log_file: ./kibana.log
|
||||
|
|
|
@ -25,12 +25,15 @@ var levelColor = function (code) {
|
|||
if (code < 299) {
|
||||
return ansicolors.green(code);
|
||||
}
|
||||
|
||||
if (code < 399) {
|
||||
return ansicolors.yellow(code)
|
||||
};
|
||||
return ansicolors.yellow(code);
|
||||
}
|
||||
|
||||
if (code < 499) {
|
||||
return ansicolors.magenta(code)
|
||||
};
|
||||
return ansicolors.magenta(code);
|
||||
}
|
||||
|
||||
return ansicolors.red(code);
|
||||
};
|
||||
|
||||
|
@ -56,10 +59,13 @@ StdOutStream.prototype._write = function (entry, encoding, callback) {
|
|||
} else if (entry.msg) {
|
||||
output += entry.msg;
|
||||
}
|
||||
process.stdout.write(output + "\n");
|
||||
|
||||
process.stdout.write(output + '\n');
|
||||
|
||||
if (entry.err) {
|
||||
process.stdout.write(ansicolors.brightRed(entry.err.stack) + "\n");
|
||||
process.stdout.write(ansicolors.brightRed(entry.err.stack) + '\n');
|
||||
}
|
||||
|
||||
callback();
|
||||
};
|
||||
|
||||
|
|
|
@ -10,18 +10,21 @@ if (config.kibana.kibana_elasticsearch_username && config.kibana.kibana_elastics
|
|||
uri.auth = util.format('%s:%s', config.kibana.kibana_elasticsearch_username, config.kibana.kibana_elasticsearch_password);
|
||||
}
|
||||
|
||||
var ssl = { rejectUnauthorized: config.kibana.verify_ssl }
|
||||
var ssl = { rejectUnauthorized: config.kibana.verify_ssl };
|
||||
|
||||
if (config.kibana.kibana_elasticsearch_client_crt && config.kibana.kibana_elasticsearch_client_key) {
|
||||
ssl.cert = fs.readFileSync(config.kibana.kibana_elasticsearch_client_crt , 'utf8');
|
||||
ssl.key = fs.readFileSync(config.kibana.kibana_elasticsearch_client_key , 'utf8');
|
||||
ssl.cert = fs.readFileSync(config.kibana.kibana_elasticsearch_client_crt, 'utf8');
|
||||
ssl.key = fs.readFileSync(config.kibana.kibana_elasticsearch_client_key, 'utf8');
|
||||
}
|
||||
|
||||
if (config.kibana.ca) {
|
||||
ssl.ca = fs.readFileSync(config.kibana.ca , 'utf8');
|
||||
ssl.ca = fs.readFileSync(config.kibana.ca, 'utf8');
|
||||
}
|
||||
|
||||
module.exports = new elasticsearch.Client({
|
||||
host: url.format(uri),
|
||||
ssl: ssl,
|
||||
pingTimeout: config.kibana.ping_timeout,
|
||||
log: function (config) {
|
||||
this.error = function (err) {
|
||||
logger.error({ err: err });
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -9,7 +9,7 @@ var config = require('../config');
|
|||
var streams = [];
|
||||
|
||||
// Set the default stream based on the enviroment. If we are on development then
|
||||
// then we are going to create a pretty stream. Everytyhing else will get the
|
||||
// then we are going to create a pretty stream. Everytyhing else will get the
|
||||
// JSON stream to stdout.
|
||||
var defaultStream;
|
||||
if (env === 'development') {
|
||||
|
|
|
@ -6,7 +6,8 @@ var logger = require('./logger');
|
|||
var config = require('../config');
|
||||
|
||||
function waitForPong() {
|
||||
return client.ping({ requestTimeout: 1500 }).catch(function (err) {
|
||||
return client.ping()
|
||||
.catch(function (err) {
|
||||
if (!(err instanceof NoConnections)) throw err;
|
||||
|
||||
logger.info('Unable to connect to elasticsearch at %s. Retrying in 2.5 seconds.', config.elasticsearch);
|
||||
|
|
|
@ -67,8 +67,7 @@ router.use(function (req, res, next) {
|
|||
|
||||
if (uri.auth) {
|
||||
var auth = new Buffer(uri.auth);
|
||||
base64_auth = auth.toString('base64');
|
||||
req.headers.authorization = "Basic " + base64_auth;
|
||||
req.headers.authorization = 'Basic ' + auth.toString('base64');
|
||||
}
|
||||
|
||||
var options = {
|
||||
|
@ -87,10 +86,10 @@ router.use(function (req, res, next) {
|
|||
if (customCA) {
|
||||
options.agentOptions = { ca: [customCA] };
|
||||
}
|
||||
|
||||
|
||||
// Add client key and certificate for elasticsearch if needed.
|
||||
if (clientCrt && clientKey) {
|
||||
if (! options.agentOptions ) {
|
||||
if (!options.agentOptions) {
|
||||
options.agentOptions = {};
|
||||
}
|
||||
options.agentOptions.cert = clientCrt;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue