mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Support for multiple Elasticsearch nodes and sniffing (#21928)
* Node sniffing * update monitoring * cleaning * remove tests * rewrite tests using the proxy * fix monitoring mocha tests * fix payload * return full error * default interval false * add sniff settings to monitoring * add docs for sniff settings * re-add index search integration test * catch parse error * capital * fix merge * hapi upgrade * return * pass by prefix * rm empty test fille * split error handling * fix merge * update recent elasticsearch.url references * prettier vs eslint * transport.request, include query params * mega comment * revert elasticsearchUrl * safe hosts * more docs es.url references * remove unused ml/esServerUrl * log if both set
This commit is contained in:
parent
a94fd11a3e
commit
b5b64f4706
38 changed files with 239 additions and 440 deletions
|
@ -24,8 +24,8 @@
|
|||
# The Kibana server's name. This is used for display purposes.
|
||||
#server.name: "your-hostname"
|
||||
|
||||
# The URL of the Elasticsearch instance to use for all your queries.
|
||||
#elasticsearch.url: "http://localhost:9200"
|
||||
# The URLs of the Elasticsearch instances to use for all your queries.
|
||||
#elasticsearch.hosts: ["http://localhost:9200"]
|
||||
|
||||
# When this setting's value is true Kibana uses the hostname specified in the server.host
|
||||
# setting. When the value of this setting is false, Kibana uses the hostname of the host
|
||||
|
|
|
@ -67,7 +67,7 @@ and {ref}/cluster-update-settings.html[Cluster update settings].
|
|||
more information, see <<monitoring-settings-kb,Monitoring settings in {kib}>>.
|
||||
|
||||
. Identify where to send monitoring data. {kib} automatically
|
||||
sends metrics to the {es} cluster specified in the `elasticsearch.url` setting
|
||||
sends metrics to the {es} cluster specified in the `elasticsearch.hosts` setting
|
||||
in the `kibana.yml` file. This property has a default value of
|
||||
`http://localhost:9200`. +
|
||||
+
|
||||
|
|
|
@ -34,8 +34,8 @@ The *Monitoring* page in {kib} is empty.
|
|||
|
||||
. Confirm that {kib} is seeking monitoring data from the appropriate {es} URL.
|
||||
By default, data is retrieved from the cluster specified in the
|
||||
`elasticsearch.url` setting in the `kibana.yml` file. If you want to retrieve it
|
||||
from a different monitoring cluster, set `xpack.monitoring.elasticsearch.url`.
|
||||
`elasticsearch.hosts` setting in the `kibana.yml` file. If you want to retrieve it
|
||||
from a different monitoring cluster, set `xpack.monitoring.elasticsearch.hosts`.
|
||||
See <<monitoring-settings-kb>>.
|
||||
|
||||
. Confirm that there is monitoring data available at that URL. It is stored in
|
||||
|
|
|
@ -25,8 +25,8 @@ a gold license, you can send data from multiple clusters to the same monitoring
|
|||
cluster and view them all through the same instance of {kib}.
|
||||
|
||||
By default, data is retrieved from the cluster specified in the
|
||||
`elasticsearch.url` value in the `kibana.yml` file. If you want to retrieve it
|
||||
from a different cluster, set `xpack.monitoring.elasticsearch.url`.
|
||||
`elasticsearch.hosts` value in the `kibana.yml` file. If you want to retrieve it
|
||||
from a different cluster, set `xpack.monitoring.elasticsearch.hosts`.
|
||||
|
||||
To learn more about typical monitoring architectures,
|
||||
see {stack-ov}/how-monitoring-works.html[How monitoring works] and
|
||||
|
|
|
@ -63,13 +63,13 @@ must have a proxy that provides an HTTPS endpoint for {es}.
|
|||
|
||||
--
|
||||
|
||||
.. Specify the HTTPS protocol in the `elasticsearch.url` setting in the {kib}
|
||||
.. Specify the HTTPS protocol in the `elasticsearch.hosts` setting in the {kib}
|
||||
configuration file, `kibana.yml`:
|
||||
+
|
||||
--
|
||||
[source,yaml]
|
||||
--------------------------------------------------------------------------------
|
||||
elasticsearch.url: "https://<your_elasticsearch_host>.com:9200"
|
||||
elasticsearch.hosts: ["https://<your_elasticsearch_host>.com:9200"]
|
||||
--------------------------------------------------------------------------------
|
||||
--
|
||||
|
||||
|
@ -100,13 +100,13 @@ must have a proxy that provides an HTTPS endpoint for {es}.
|
|||
|
||||
--
|
||||
|
||||
.. Specify the HTTPS URL in the `xpack.monitoring.elasticsearch.url` setting in
|
||||
.. Specify the HTTPS URL in the `xpack.monitoring.elasticsearch.hosts` setting in
|
||||
the {kib} configuration file, `kibana.yml`
|
||||
+
|
||||
--
|
||||
[source,yaml]
|
||||
--------------------------------------------------------------------------------
|
||||
xpack.monitoring.elasticsearch.url: "https://<your_monitoring_cluster>:9200"
|
||||
xpack.monitoring.elasticsearch.hosts: ["https://<your_monitoring_cluster>:9200"]
|
||||
--------------------------------------------------------------------------------
|
||||
--
|
||||
|
||||
|
|
|
@ -37,9 +37,9 @@ Set to `true` (default) to enable {monitoring} in {kib}. Unlike the
|
|||
monitoring back-end does not run and {kib} stats are not sent to the monitoring
|
||||
cluster.
|
||||
|
||||
`xpack.monitoring.elasticsearch.url`::
|
||||
`xpack.monitoring.elasticsearch.hosts`::
|
||||
Specifies the location of the {es} cluster where your monitoring data is stored.
|
||||
By default, this is the same as the `elasticsearch.url`. This setting enables
|
||||
By default, this is the same as `elasticsearch.hosts`. This setting enables
|
||||
you to use a single {kib} instance to search and visualize data in your
|
||||
production cluster as well as monitor data sent to a dedicated monitoring
|
||||
cluster.
|
||||
|
|
|
@ -102,7 +102,7 @@ services:
|
|||
image: {docker-image}
|
||||
environment:
|
||||
SERVER_NAME: kibana.example.org
|
||||
ELASTICSEARCH_URL: http://elasticsearch.example.org
|
||||
ELASTICSEARCH_HOSTS: http://elasticsearch.example.org
|
||||
----------------------------------------------------------
|
||||
|
||||
Since environment variables are translated to CLI arguments, they take
|
||||
|
@ -117,7 +117,7 @@ images:
|
|||
[horizontal]
|
||||
`server.name`:: `kibana`
|
||||
`server.host`:: `"0"`
|
||||
`elasticsearch.url`:: `http://elasticsearch:9200`
|
||||
`elasticsearch.hosts`:: `http://elasticsearch:9200`
|
||||
`xpack.monitoring.ui.container.elasticsearch.enabled`:: `true`
|
||||
|
||||
NOTE: The setting `xpack.monitoring.ui.container.elasticsearch.enabled` is not
|
||||
|
|
|
@ -82,10 +82,10 @@ http.port: 9200
|
|||
transport.host: <external ip>
|
||||
transport.tcp.port: 9300 - 9400
|
||||
--------
|
||||
. Make sure Kibana is configured to point to your local client node. In `kibana.yml`, the `elasticsearch.url` should be set to
|
||||
`localhost:9200`.
|
||||
. Make sure Kibana is configured to point to your local client node. In `kibana.yml`, the `elasticsearch.hosts` setting should be set to
|
||||
`["localhost:9200"]`.
|
||||
+
|
||||
--------
|
||||
# The Elasticsearch instance to use for all your queries.
|
||||
elasticsearch.url: "http://localhost:9200"
|
||||
elasticsearch.hosts: ["http://localhost:9200"]
|
||||
--------
|
||||
|
|
|
@ -39,6 +39,12 @@ Elasticsearch. This value must be a positive integer.
|
|||
|
||||
`elasticsearch.shardTimeout:`:: *Default: 30000* Time in milliseconds for Elasticsearch to wait for responses from shards. Set to 0 to disable.
|
||||
|
||||
`elasticsearch.sniffInterval:`:: *Default: false* Time in milliseconds between requests to check Elasticsearch for an updated list of nodes.
|
||||
|
||||
`elasticsearch.sniffOnStart:`:: *Default: false* Attempt to find other Elasticsearch nodes on startup.
|
||||
|
||||
`elasticsearch.sniffOnConectionFault:`:: *Default: false* Update the list of Elasticsearch nodes immediately following a connection fault.
|
||||
|
||||
`elasticsearch.ssl.certificate:` and `elasticsearch.ssl.key:`:: Optional settings that provide the paths to the PEM-format SSL
|
||||
certificate and key files. These files are used to verify the identity of Kibana to Elasticsearch and are required when `xpack.ssl.verification_mode` in Elasticsearch is set to either `certificate` or `full`.
|
||||
|
||||
|
@ -57,8 +63,8 @@ requests for end-users being executed as the identity tied to the configured cer
|
|||
`elasticsearch.startupTimeout:`:: *Default: 5000* Time in milliseconds to wait for Elasticsearch at Kibana startup before
|
||||
retrying.
|
||||
|
||||
`elasticsearch.url:`:: *Default: "http://localhost:9200"* The URL of the Elasticsearch instance to use for all your
|
||||
queries.
|
||||
`elasticsearch.hosts:`:: *Default: "http://localhost:9200"* The URLs of the Elasticsearch instances to use for all your
|
||||
queries. All nodes listed here must be on the same cluster.
|
||||
|
||||
`elasticsearch.username:` and `elasticsearch.password:`:: If your Elasticsearch is protected with basic authentication,
|
||||
these settings provide the username and password that the Kibana server uses to perform maintenance on the Kibana index at
|
||||
|
|
|
@ -64,7 +64,7 @@ Generated plugins receive a handful of scripts that can be used during developme
|
|||
Start kibana and have it include this plugin. You can pass any arguments that you would normally send to `bin/kibana`
|
||||
|
||||
```
|
||||
yarn start --elasticsearch.url http://localhost:9220
|
||||
yarn start --elasticsearch.hosts http://localhost:9220
|
||||
```
|
||||
|
||||
- `yarn build`
|
||||
|
|
|
@ -21,7 +21,7 @@ See the [kibana contributing guide](https://github.com/elastic/kibana/blob/maste
|
|||
Start kibana and have it include this plugin. You can pass any arguments that you would normally send to `bin/kibana`
|
||||
|
||||
```
|
||||
yarn start --elasticsearch.url http://localhost:9220
|
||||
yarn start --elasticsearch.hosts http://localhost:9220
|
||||
```
|
||||
|
||||
- `yarn build`
|
||||
|
|
|
@ -105,7 +105,7 @@ function applyConfigOverrides(rawConfig, opts, extraCliOptions) {
|
|||
}
|
||||
}
|
||||
|
||||
if (opts.elasticsearch) set('elasticsearch.url', opts.elasticsearch);
|
||||
if (opts.elasticsearch) set('elasticsearch.hosts', opts.elasticsearch.split(','));
|
||||
if (opts.port) set('server.port', opts.port);
|
||||
if (opts.host) set('server.host', opts.host);
|
||||
if (opts.quiet) set('logging.quiet', true);
|
||||
|
@ -144,7 +144,7 @@ export default function (program) {
|
|||
command
|
||||
.description('Run the kibana server')
|
||||
.collectUnknownOptions()
|
||||
.option('-e, --elasticsearch <uri>', 'Elasticsearch instance')
|
||||
.option('-e, --elasticsearch <uri1,uri2>', 'Elasticsearch instances')
|
||||
.option(
|
||||
'-c, --config <path>',
|
||||
'Path to the config file, can be changed with the CONFIG_PATH environment variable as well. ' +
|
||||
|
|
|
@ -18,11 +18,13 @@
|
|||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
import { resolveApi } from './api_server/server';
|
||||
import { resolve, join, sep } from 'path';
|
||||
import { has, isEmpty } from 'lodash';
|
||||
import setHeaders from '../elasticsearch/lib/set_headers';
|
||||
import url from 'url';
|
||||
import { has, isEmpty, head } from 'lodash';
|
||||
|
||||
import { resolveApi } from './api_server/server';
|
||||
import { addExtensionSpecFilePath } from './api_server/spec';
|
||||
import setHeaders from '../elasticsearch/lib/set_headers';
|
||||
|
||||
import {
|
||||
ProxyConfigCollection,
|
||||
|
@ -37,7 +39,7 @@ export default function (kibana) {
|
|||
const apps = [];
|
||||
return new kibana.Plugin({
|
||||
id: 'console',
|
||||
require: [ 'elasticsearch' ],
|
||||
require: ['elasticsearch'],
|
||||
|
||||
config: function (Joi) {
|
||||
return Joi.object({
|
||||
|
@ -89,7 +91,7 @@ export default function (kibana) {
|
|||
const proxyPathFilters = options.proxyFilter.map(str => new RegExp(str));
|
||||
|
||||
server.route(createProxyRoute({
|
||||
baseUrl: config.get('elasticsearch.url'),
|
||||
baseUrl: head(config.get('elasticsearch.hosts')),
|
||||
pathFilters: proxyPathFilters,
|
||||
getConfigForReq(req, uri) {
|
||||
const whitelist = config.get('elasticsearch.requestHeadersWhitelist');
|
||||
|
@ -132,7 +134,16 @@ export default function (kibana) {
|
|||
|
||||
injectDefaultVars(server) {
|
||||
return {
|
||||
elasticsearchUrl: server.config().get('elasticsearch.url')
|
||||
elasticsearchUrl: url.format(
|
||||
Object.assign(
|
||||
url.parse(
|
||||
head(
|
||||
server.config().get('elasticsearch.hosts')
|
||||
)
|
||||
),
|
||||
{ auth: false }
|
||||
)
|
||||
)
|
||||
};
|
||||
},
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ const RowParser = require('./row_parser');
|
|||
const InputMode = require('./mode/input');
|
||||
const utils = require('../utils');
|
||||
const es = require('../es');
|
||||
import chrome from 'ui/chrome';
|
||||
const chrome = require('ui/chrome');
|
||||
|
||||
const smartResize = require('../smart_resize');
|
||||
|
||||
|
@ -564,6 +564,7 @@ export default function SenseEditor($el) {
|
|||
const esMethod = req.method;
|
||||
const esData = req.data;
|
||||
|
||||
// this is the first url defined in elasticsearch.hosts
|
||||
const elasticsearchBaseUrl = chrome.getInjected('elasticsearchUrl');
|
||||
const url = es.constructESUrl(elasticsearchBaseUrl, esPath);
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ describe('plugins/console', function () {
|
|||
}
|
||||
};
|
||||
|
||||
server.config().get.withArgs('elasticsearch.url').returns('http://localhost:9200');
|
||||
server.config().get.withArgs('elasticsearch.hosts').returns(['http://localhost:9200']);
|
||||
server.config().get.withArgs('elasticsearch.ssl.verificationMode').returns('full');
|
||||
});
|
||||
|
||||
|
@ -58,20 +58,20 @@ describe('plugins/console', function () {
|
|||
});
|
||||
|
||||
it(`uses https.Agent when url's protocol is https`, function () {
|
||||
setElasticsearchConfig('url', 'https://localhost:9200');
|
||||
setElasticsearchConfig('hosts', ['https://localhost:9200']);
|
||||
const { agent } = getElasticsearchProxyConfig(server);
|
||||
expect(agent).to.be.a(https.Agent);
|
||||
});
|
||||
|
||||
it(`uses http.Agent when url's protocol is http`, function () {
|
||||
setElasticsearchConfig('url', 'http://localhost:9200');
|
||||
setElasticsearchConfig('hosts', ['http://localhost:9200']);
|
||||
const { agent } = getElasticsearchProxyConfig(server);
|
||||
expect(agent).to.be.a(http.Agent);
|
||||
});
|
||||
|
||||
describe('ssl', function () {
|
||||
beforeEach(function () {
|
||||
setElasticsearchConfig('url', 'https://localhost:9200');
|
||||
setElasticsearchConfig('hosts', ['https://localhost:9200']);
|
||||
});
|
||||
|
||||
it('sets rejectUnauthorized to false when verificationMode is none', function () {
|
||||
|
|
|
@ -27,8 +27,9 @@ const readFile = (file) => readFileSync(file, 'utf8');
|
|||
|
||||
const createAgent = (server) => {
|
||||
const config = server.config();
|
||||
const target = url.parse(config.get('elasticsearch.url'));
|
||||
|
||||
const target = url.parse(
|
||||
_.head(config.get('elasticsearch.hosts'))
|
||||
);
|
||||
if (!/^https/.test(target.protocol)) return new http.Agent();
|
||||
|
||||
const agentOptions = {};
|
||||
|
|
|
@ -25,11 +25,10 @@ import { createDataCluster } from './lib/create_data_cluster';
|
|||
import { createAdminCluster } from './lib/create_admin_cluster';
|
||||
import { clientLogger } from './lib/client_logger';
|
||||
import { createClusters } from './lib/create_clusters';
|
||||
import { createProxy } from './lib/create_proxy';
|
||||
import filterHeaders from './lib/filter_headers';
|
||||
|
||||
import { createProxy } from './lib/create_proxy';
|
||||
|
||||
const DEFAULT_REQUEST_HEADERS = [ 'authorization' ];
|
||||
const DEFAULT_REQUEST_HEADERS = ['authorization'];
|
||||
|
||||
export default function (kibana) {
|
||||
return new kibana.Plugin({
|
||||
|
@ -46,7 +45,10 @@ export default function (kibana) {
|
|||
|
||||
return Joi.object({
|
||||
enabled: Joi.boolean().default(true),
|
||||
url: Joi.string().uri({ scheme: ['http', 'https'] }).default('http://localhost:9200'),
|
||||
sniffOnStart: Joi.boolean().default(false),
|
||||
sniffInterval: Joi.number().allow(false).default(false),
|
||||
sniffOnConnectionFault: Joi.boolean().default(false),
|
||||
hosts: Joi.array().items(Joi.string().uri({ scheme: ['http', 'https'] })).single().default('http://localhost:9200'),
|
||||
preserveHost: Joi.boolean().default(true),
|
||||
username: Joi.string(),
|
||||
password: Joi.string(),
|
||||
|
@ -86,9 +88,27 @@ export default function (kibana) {
|
|||
};
|
||||
};
|
||||
|
||||
const url = () => {
|
||||
return (settings, log) => {
|
||||
const deprecatedUrl = get(settings, 'url');
|
||||
const hosts = get(settings, 'hosts.length');
|
||||
if (!deprecatedUrl) {
|
||||
return;
|
||||
}
|
||||
if (hosts) {
|
||||
log('Deprecated config key "elasticsearch.url" conflicts with "elasticsearch.hosts". Ignoring "elasticsearch.url"');
|
||||
} else {
|
||||
set(settings, 'hosts', [deprecatedUrl]);
|
||||
log('Config key "elasticsearch.url" is deprecated. It has been replaced with "elasticsearch.hosts"');
|
||||
}
|
||||
unset(settings, 'url');
|
||||
};
|
||||
};
|
||||
|
||||
return [
|
||||
rename('ssl.ca', 'ssl.certificateAuthorities'),
|
||||
rename('ssl.cert', 'ssl.certificate'),
|
||||
url(),
|
||||
sslVerify(),
|
||||
];
|
||||
},
|
||||
|
@ -115,8 +135,7 @@ export default function (kibana) {
|
|||
createDataCluster(server);
|
||||
createAdminCluster(server);
|
||||
|
||||
createProxy(server, 'POST', '/{index}/_search');
|
||||
createProxy(server, 'POST', '/_msearch');
|
||||
createProxy(server);
|
||||
|
||||
// Set up the health check service and start it.
|
||||
const { start, waitUntilReady } = healthCheck(this, server);
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from 'expect.js';
|
||||
import { createPath } from '../create_proxy';
|
||||
|
||||
describe('plugins/elasticsearch', function () {
|
||||
describe('lib/create_proxy', function () {
|
||||
describe('#createPath', function () {
|
||||
it('prepends /elasticsearch to route', function () {
|
||||
const path = createPath('/foobar', '/wat');
|
||||
expect(path).to.equal('/foobar/wat');
|
||||
});
|
||||
|
||||
it('ensures leading slash for prefix', function () {
|
||||
const path = createPath('foobar', '/wat');
|
||||
expect(path).to.equal('/foobar/wat');
|
||||
});
|
||||
|
||||
it('ensures leading slash for path', function () {
|
||||
const path = createPath('/foobar', 'wat');
|
||||
expect(path).to.equal('/foobar/wat');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -77,7 +77,7 @@ describe('plugins/elasticsearch', () => {
|
|||
|
||||
// setup the config().get()/.set() stubs
|
||||
const get = sinon.stub();
|
||||
get.withArgs('elasticsearch.url').returns(esUrl);
|
||||
get.withArgs('elasticsearch.hosts').returns([esUrl]);
|
||||
get.withArgs('kibana.index').returns('.my-kibana');
|
||||
get.withArgs('pkg.version').returns('1.0.0');
|
||||
|
||||
|
@ -156,7 +156,7 @@ describe('plugins/elasticsearch', () => {
|
|||
sinon.assert.calledOnce(plugin.status.red);
|
||||
sinon.assert.calledWithExactly(
|
||||
plugin.status.red,
|
||||
`Unable to connect to Elasticsearch at http://localhost:9220/.`
|
||||
`Unable to connect to Elasticsearch.`
|
||||
);
|
||||
|
||||
sinon.assert.calledTwice(ping);
|
||||
|
|
|
@ -1,159 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from 'expect.js';
|
||||
import mapUri from '../map_uri';
|
||||
import { defaults } from 'lodash';
|
||||
|
||||
describe('plugins/elasticsearch', function () {
|
||||
describe('lib/map_uri', function () {
|
||||
|
||||
let request;
|
||||
|
||||
function stubCluster(settings) {
|
||||
settings = defaults(settings || {}, {
|
||||
url: 'http://localhost:9200',
|
||||
requestHeadersWhitelist: ['authorization'],
|
||||
customHeaders: {}
|
||||
});
|
||||
|
||||
return {
|
||||
getUrl: () => settings.url,
|
||||
getCustomHeaders: () => settings.customHeaders,
|
||||
getRequestHeadersWhitelist: () => settings.requestHeadersWhitelist
|
||||
};
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
request = {
|
||||
path: '/elasticsearch/some/path',
|
||||
headers: {
|
||||
cookie: 'some_cookie_string',
|
||||
'accept-encoding': 'gzip, deflate',
|
||||
origin: 'https://localhost:5601',
|
||||
'content-type': 'application/json',
|
||||
'x-my-custom-header': '42',
|
||||
accept: 'application/json, text/plain, */*',
|
||||
authorization: '2343d322eda344390fdw42'
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('sends custom headers if set', function () {
|
||||
const settings = {
|
||||
customHeaders: { foo: 'bar' }
|
||||
};
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri, upstreamHeaders) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamHeaders).to.have.property('foo', 'bar');
|
||||
});
|
||||
});
|
||||
|
||||
it('sends configured custom headers even if the same named header exists in request', function () {
|
||||
const settings = {
|
||||
requestHeadersWhitelist: ['x-my-custom-header'],
|
||||
customHeaders: { 'x-my-custom-header': 'asconfigured' }
|
||||
};
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri, upstreamHeaders) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamHeaders).to.have.property('x-my-custom-header', 'asconfigured');
|
||||
});
|
||||
});
|
||||
|
||||
it('only proxies the whitelisted request headers', function () {
|
||||
const settings = {
|
||||
requestHeadersWhitelist: ['x-my-custom-HEADER', 'Authorization'],
|
||||
};
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri, upstreamHeaders) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamHeaders).to.have.property('authorization');
|
||||
expect(upstreamHeaders).to.have.property('x-my-custom-header');
|
||||
expect(Object.keys(upstreamHeaders).length).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
it('proxies no headers if whitelist is set to []', function () {
|
||||
const settings = {
|
||||
requestHeadersWhitelist: [],
|
||||
};
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri, upstreamHeaders) {
|
||||
expect(err).to.be(null);
|
||||
expect(Object.keys(upstreamHeaders).length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('proxies no headers if whitelist is set to no value', function () {
|
||||
const settings = {
|
||||
// joi converts `elasticsearch.requestHeadersWhitelist: null` into
|
||||
// an array with a null inside because of the `array().single()` rule.
|
||||
requestHeadersWhitelist: [ null ],
|
||||
};
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri, upstreamHeaders) {
|
||||
expect(err).to.be(null);
|
||||
expect(Object.keys(upstreamHeaders).length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('strips the /elasticsearch prefix from the path', () => {
|
||||
request.path = '/elasticsearch/es/path';
|
||||
|
||||
mapUri(stubCluster(), '/elasticsearch')(request, function (err, upstreamUri) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamUri).to.be('http://localhost:9200/es/path');
|
||||
});
|
||||
});
|
||||
|
||||
it('extends the es.url path', function () {
|
||||
request.path = '/elasticsearch/index/type';
|
||||
const settings = { url: 'https://localhost:9200/base-path' };
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamUri).to.be('https://localhost:9200/base-path/index/type');
|
||||
});
|
||||
});
|
||||
|
||||
it('extends the es.url query string', function () {
|
||||
request.path = '/elasticsearch/*';
|
||||
request.query = { foo: 'bar' };
|
||||
const settings = { url: 'https://localhost:9200/?base=query' };
|
||||
|
||||
mapUri(stubCluster(settings), '/elasticsearch')(request, function (err, upstreamUri) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamUri).to.be('https://localhost:9200/*?foo=bar&base=query');
|
||||
});
|
||||
});
|
||||
|
||||
it('filters the _ querystring param', function () {
|
||||
request.path = '/elasticsearch/*';
|
||||
request.query = { _: Date.now() };
|
||||
|
||||
mapUri(stubCluster(), '/elasticsearch')(request, function (err, upstreamUri) {
|
||||
expect(err).to.be(null);
|
||||
expect(upstreamUri).to.be('http://localhost:9200/*');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
|
@ -27,7 +27,7 @@ describe('plugins/elasticsearch', function () {
|
|||
|
||||
beforeEach(function () {
|
||||
serverConfig = {
|
||||
url: 'https://localhost:9200',
|
||||
hosts: ['https://localhost:9200'],
|
||||
ssl: {
|
||||
verificationMode: 'full'
|
||||
}
|
||||
|
@ -126,24 +126,24 @@ describe('plugins/elasticsearch', function () {
|
|||
|
||||
describe('port', () => {
|
||||
it('uses the specified port', () => {
|
||||
const config1 = parseConfig(serverConfig);
|
||||
expect(config1.host.port).to.be('9200');
|
||||
const config1 = parseConfig(serverConfig).hosts[0];
|
||||
expect(config1.port).to.be('9200');
|
||||
|
||||
serverConfig.url = 'https://localhost:555';
|
||||
const config2 = parseConfig(serverConfig);
|
||||
expect(config2.host.port).to.be('555');
|
||||
serverConfig.hosts = ['https://localhost:555'];
|
||||
const config2 = parseConfig(serverConfig).hosts[0];
|
||||
expect(config2.port).to.be('555');
|
||||
});
|
||||
|
||||
it('uses port 80 if http and no specified port', () => {
|
||||
serverConfig.url = 'http://localhost';
|
||||
const config2 = parseConfig(serverConfig);
|
||||
expect(config2.host.port).to.be('80');
|
||||
serverConfig.hosts = ['http://localhost'];
|
||||
const config2 = parseConfig(serverConfig).hosts[0];
|
||||
expect(config2.port).to.be('80');
|
||||
});
|
||||
|
||||
it ('uses port 443 if https and no specified port', () => {
|
||||
serverConfig.url = 'https://localhost';
|
||||
const config2 = parseConfig(serverConfig);
|
||||
expect(config2.host.port).to.be('443');
|
||||
serverConfig.hosts = ['https://localhost'];
|
||||
const config2 = parseConfig(serverConfig).hosts[0];
|
||||
expect(config2.port).to.be('443');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -61,7 +61,7 @@ export class Cluster {
|
|||
|
||||
getRequestTimeout = () => getClonedProperty(this._config, 'requestTimeout');
|
||||
|
||||
getUrl = () => getClonedProperty(this._config, 'url');
|
||||
getHosts = () => getClonedProperty(this._config, 'hosts');
|
||||
|
||||
getSsl = () => getClonedProperty(this._config, 'ssl');
|
||||
|
||||
|
@ -88,7 +88,7 @@ export class Cluster {
|
|||
|
||||
_getClientConfig = () => {
|
||||
return getClonedProperties(this._config, [
|
||||
'url',
|
||||
'hosts',
|
||||
'ssl',
|
||||
'username',
|
||||
'password',
|
||||
|
@ -98,6 +98,9 @@ export class Cluster {
|
|||
'keepAlive',
|
||||
'pingTimeout',
|
||||
'requestTimeout',
|
||||
'sniffOnStart',
|
||||
'sniffInterval',
|
||||
'sniffOnConnectionFault',
|
||||
'log'
|
||||
]);
|
||||
}
|
||||
|
|
|
@ -17,60 +17,48 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import createAgent from './create_agent';
|
||||
import mapUri from './map_uri';
|
||||
import { assign } from 'lodash';
|
||||
import Joi from 'joi';
|
||||
|
||||
export function createPath(prefix, path) {
|
||||
path = path[0] === '/' ? path : `/${path}`;
|
||||
prefix = prefix[0] === '/' ? prefix : `/${prefix}`;
|
||||
export function createProxy(server) {
|
||||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
|
||||
return `${prefix}${path}`;
|
||||
}
|
||||
|
||||
export function createProxy(server, method, path, config) {
|
||||
const proxies = new Map([
|
||||
['/elasticsearch', server.plugins.elasticsearch.getCluster('data')],
|
||||
]);
|
||||
|
||||
const responseHandler = function (err, upstreamResponse) {
|
||||
if (err) {
|
||||
throw err;
|
||||
server.route({
|
||||
method: 'POST',
|
||||
path: '/elasticsearch/_msearch',
|
||||
config: {
|
||||
payload: {
|
||||
parse: 'gunzip'
|
||||
}
|
||||
},
|
||||
async handler(req, h) {
|
||||
const { query, payload } = req;
|
||||
return callWithRequest(req, 'transport.request', {
|
||||
path: '/_msearch',
|
||||
method: 'POST',
|
||||
query,
|
||||
body: payload.toString('utf8')
|
||||
}).finally(r => h.response(r));
|
||||
}
|
||||
});
|
||||
|
||||
if (upstreamResponse.headers.location) {
|
||||
// TODO: Workaround for #8705 until hapi has been updated to >= 15.0.0
|
||||
upstreamResponse.headers.location = encodeURI(upstreamResponse.headers.location);
|
||||
server.route({
|
||||
method: 'POST',
|
||||
path: '/elasticsearch/{index}/_search',
|
||||
config: {
|
||||
validate: {
|
||||
params: Joi.object().keys({
|
||||
index: Joi.string().required()
|
||||
})
|
||||
}
|
||||
},
|
||||
handler(req, h) {
|
||||
const { query, payload: body } = req;
|
||||
return callWithRequest(req, 'transport.request', {
|
||||
path: `/${req.params.index}/_search`,
|
||||
method: 'POST',
|
||||
query,
|
||||
body
|
||||
}).finally(r => h.response(r));
|
||||
}
|
||||
|
||||
return upstreamResponse;
|
||||
};
|
||||
|
||||
for (const [proxyPrefix, cluster] of proxies) {
|
||||
const options = {
|
||||
method,
|
||||
path: createPath(proxyPrefix, path),
|
||||
config: {
|
||||
timeout: {
|
||||
socket: cluster.getRequestTimeout()
|
||||
}
|
||||
},
|
||||
handler: {
|
||||
proxy: {
|
||||
mapUri: mapUri(cluster, proxyPrefix),
|
||||
agent: createAgent({
|
||||
url: cluster.getUrl(),
|
||||
ssl: cluster.getSsl()
|
||||
}),
|
||||
xforward: true,
|
||||
timeout: cluster.getRequestTimeout(),
|
||||
onResponse: responseHandler
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
assign(options.config, config);
|
||||
|
||||
server.route(options);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import url from 'url';
|
||||
import Promise from 'bluebird';
|
||||
import elasticsearch from 'elasticsearch';
|
||||
import kibanaVersion from './kibana_version';
|
||||
|
@ -31,14 +30,11 @@ export default function (plugin, server) {
|
|||
const REQUEST_DELAY = config.get('elasticsearch.healthCheck.delay');
|
||||
|
||||
plugin.status.yellow('Waiting for Elasticsearch');
|
||||
function waitForPong(callWithInternalUser, elasticsearchUrl) {
|
||||
function waitForPong(callWithInternalUser) {
|
||||
return callWithInternalUser('ping').catch(function (err) {
|
||||
if (!(err instanceof NoConnections)) throw err;
|
||||
|
||||
const displayUrl = url.format({ ...url.parse(elasticsearchUrl), auth: undefined });
|
||||
plugin.status.red(`Unable to connect to Elasticsearch at ${displayUrl}.`);
|
||||
|
||||
return Promise.delay(REQUEST_DELAY).then(waitForPong.bind(null, callWithInternalUser, elasticsearchUrl));
|
||||
plugin.status.red(`Unable to connect to Elasticsearch.`);
|
||||
return Promise.delay(REQUEST_DELAY).then(waitForPong.bind(null, callWithInternalUser));
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -61,7 +57,7 @@ export default function (plugin, server) {
|
|||
|
||||
function check() {
|
||||
const healthCheck =
|
||||
waitForPong(callAdminAsKibanaUser, config.get('elasticsearch.url'))
|
||||
waitForPong(callAdminAsKibanaUser)
|
||||
.then(waitForEsVersion);
|
||||
|
||||
return healthCheck
|
||||
|
|
|
@ -38,7 +38,7 @@ export default function mapUri(cluster, proxyPrefix) {
|
|||
query: esUrlQuery
|
||||
} = parseUrl(cluster.getUrl(), true);
|
||||
|
||||
// copy most url components directly from the elasticsearch.url
|
||||
// copy most url components directly from elasticsearch.hosts
|
||||
const mappedUrlComponents = {
|
||||
protocol: esUrlProtocol,
|
||||
slashes: esUrlHasSlashes,
|
||||
|
|
|
@ -30,26 +30,35 @@ export function parseConfig(serverConfig = {}, { ignoreCertAndKey = false } = {}
|
|||
keepAlive: true,
|
||||
...pick(serverConfig, [
|
||||
'plugins', 'apiVersion', 'keepAlive', 'pingTimeout',
|
||||
'requestTimeout', 'log', 'logQueries'
|
||||
'requestTimeout', 'log', 'logQueries', 'sniffOnStart',
|
||||
'sniffInterval', 'sniffOnConnectionFault', 'hosts'
|
||||
])
|
||||
};
|
||||
|
||||
const uri = url.parse(serverConfig.url);
|
||||
const httpsURI = uri.protocol === 'https:';
|
||||
const httpURI = uri.protocol === 'http:';
|
||||
const protocolPort = httpsURI && '443' || httpURI && '80';
|
||||
config.host = {
|
||||
host: uri.hostname,
|
||||
port: uri.port || protocolPort,
|
||||
protocol: uri.protocol,
|
||||
path: uri.pathname,
|
||||
query: uri.query,
|
||||
headers: serverConfig.customHeaders
|
||||
const mapHost = nodeUrl => {
|
||||
const uri = url.parse(nodeUrl);
|
||||
const httpsURI = uri.protocol === 'https:';
|
||||
const httpURI = uri.protocol === 'http:';
|
||||
const protocolPort = httpsURI && '443' || httpURI && '80';
|
||||
return {
|
||||
host: uri.hostname,
|
||||
port: uri.port || protocolPort,
|
||||
protocol: uri.protocol,
|
||||
path: uri.pathname,
|
||||
query: uri.query,
|
||||
headers: serverConfig.customHeaders
|
||||
};
|
||||
};
|
||||
|
||||
if (serverConfig.hosts) {
|
||||
config.hosts = serverConfig.hosts.map(mapHost);
|
||||
}
|
||||
|
||||
// Auth
|
||||
if (serverConfig.auth !== false && serverConfig.username && serverConfig.password) {
|
||||
config.host.auth = util.format('%s:%s', serverConfig.username, serverConfig.password);
|
||||
config.hosts.forEach(host => {
|
||||
host.auth = util.format('%s:%s', serverConfig.username, serverConfig.password);
|
||||
});
|
||||
}
|
||||
|
||||
// SSL
|
||||
|
|
|
@ -22,10 +22,7 @@ import { transformDeprecations } from './transform_deprecations';
|
|||
import { unset, formatListAsProse, getFlattenedObject } from '../../utils';
|
||||
import { getTransform } from '../../deprecation';
|
||||
|
||||
|
||||
const getFlattenedKeys = object => (
|
||||
Object.keys(getFlattenedObject(object))
|
||||
);
|
||||
const getFlattenedKeys = object => Object.keys(getFlattenedObject(object));
|
||||
|
||||
async function getUnusedConfigKeys(
|
||||
coreHandledConfigPaths,
|
||||
|
@ -34,12 +31,19 @@ async function getUnusedConfigKeys(
|
|||
rawSettings,
|
||||
configValues
|
||||
) {
|
||||
// transform deprecated settings
|
||||
const transforms = [
|
||||
transformDeprecations,
|
||||
...await Promise.all(plugins.map(({ spec }) => getTransform(spec)))
|
||||
];
|
||||
const settings = transforms.reduce((a, c) => c(a), rawSettings);
|
||||
// transform deprecated core settings
|
||||
const settings = transformDeprecations(rawSettings);
|
||||
|
||||
// transform deprecated plugin settings
|
||||
for (let i = 0; i < plugins.length; i++) {
|
||||
const { spec } = plugins[i];
|
||||
const transform = await getTransform(spec);
|
||||
const prefix = spec.getConfigPrefix();
|
||||
const pluginSettings = settings[prefix];
|
||||
if (pluginSettings) {
|
||||
settings[prefix] = transform(pluginSettings);
|
||||
}
|
||||
}
|
||||
|
||||
// remove config values from disabled plugins
|
||||
for (const spec of disabledPluginSpecs) {
|
||||
|
@ -58,14 +62,14 @@ async function getUnusedConfigKeys(
|
|||
|
||||
// Filter out keys that are marked as used in the core (e.g. by new core plugins).
|
||||
return difference(inputKeys, appliedKeys).filter(
|
||||
unusedConfigKey => !coreHandledConfigPaths.some(
|
||||
usedInCoreConfigKey => unusedConfigKey.startsWith(usedInCoreConfigKey)
|
||||
)
|
||||
unusedConfigKey =>
|
||||
!coreHandledConfigPaths.some(usedInCoreConfigKey =>
|
||||
unusedConfigKey.startsWith(usedInCoreConfigKey)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export default async function (kbnServer, server, config) {
|
||||
|
||||
server.decorate('server', 'config', function () {
|
||||
return kbnServer.config;
|
||||
});
|
||||
|
@ -83,9 +87,7 @@ export default async function (kbnServer, server, config) {
|
|||
}
|
||||
|
||||
const formattedUnusedKeys = unusedKeys.map(key => `"${key}"`);
|
||||
const desc = formattedUnusedKeys.length === 1
|
||||
? 'setting was'
|
||||
: 'settings were';
|
||||
const desc = formattedUnusedKeys.length === 1 ? 'setting was' : 'settings were';
|
||||
|
||||
const error = new Error(
|
||||
`${formatListAsProse(formattedUnusedKeys)} ${desc} not applied. ` +
|
||||
|
|
|
@ -229,17 +229,22 @@ describe('server/config completeMixin()', function () {
|
|||
it('should transform deprecated plugin settings', async () => {
|
||||
const { callCompleteMixin } = setup({
|
||||
settings: {
|
||||
foo1: 'bar'
|
||||
foo: {
|
||||
foo1: 'bar'
|
||||
}
|
||||
},
|
||||
configValues: {
|
||||
foo2: 'bar'
|
||||
foo: {
|
||||
foo2: 'bar'
|
||||
}
|
||||
},
|
||||
plugins: [
|
||||
{
|
||||
spec: {
|
||||
getDeprecationsProvider() {
|
||||
return async ({ rename }) => [rename('foo1', 'foo2')];
|
||||
}
|
||||
},
|
||||
getConfigPrefix: () => 'foo'
|
||||
}
|
||||
}
|
||||
],
|
||||
|
|
|
@ -46,7 +46,7 @@ const DEFAULTS_SETTINGS = {
|
|||
const DEFAULT_SETTINGS_WITH_CORE_PLUGINS = {
|
||||
plugins: { scanDirs: [resolve(__dirname, '../legacy/core_plugins')] },
|
||||
elasticsearch: {
|
||||
url: esTestConfig.getUrl(),
|
||||
hosts: [esTestConfig.getUrl()],
|
||||
username: kibanaServerTestUser.username,
|
||||
password: kibanaServerTestUser.password,
|
||||
},
|
||||
|
|
|
@ -38,65 +38,5 @@ export default function ({ getService }) {
|
|||
.send('{"index":"logstash-2015.04.21","ignore_unavailable":true}\n{"size":500,"sort":{"@timestamp":"desc"},"query":{"bool":{"must":[{"query_string":{"analyze_wildcard":true,"query":"*"}},{"bool":{"must":[{"range":{"@timestamp":{"gte":1429577068175,"lte":1429577968175}}}],"must_not":[]}}],"must_not":[]}},"highlight":{"pre_tags":["@kibana-highlighted-field@"],"post_tags":["@/kibana-highlighted-field@"],"fields":{"*":{}}},"aggs":{"2":{"date_histogram":{"field":"@timestamp","interval":"30s","min_doc_count":0,"extended_bounds":{"min":1429577068175,"max":1429577968175}}}},"stored_fields":["*"],"_source": true,"script_fields":{},"docvalue_fields":["timestamp_offset","@timestamp","utc_time"]}\n') // eslint-disable-line max-len
|
||||
.expect(200)
|
||||
));
|
||||
|
||||
it('rejects nodes API', async () => (
|
||||
await supertest
|
||||
.get('/elasticsearch/_nodes')
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects requests to root', async () => (
|
||||
await supertest
|
||||
.get('/elasticsearch')
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects POST to .kibana', async () => (
|
||||
await supertest
|
||||
.post('/elasticsearch/.kibana')
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects PUT to .kibana', async () => (
|
||||
await supertest
|
||||
.put('/elasticsearch/.kibana')
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects DELETE to .kibana', async () => (
|
||||
await supertest
|
||||
.delete('/elasticsearch/.kibana')
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects GET to .kibana', async () => (
|
||||
await supertest
|
||||
.get('/elasticsearch/.kibana')
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects bulk requests to .kibana', async () => (
|
||||
await supertest
|
||||
.post('/elasticsearch/.kibana/_bulk')
|
||||
.set('content-type', 'application/json')
|
||||
.send({})
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects validate API', async () => (
|
||||
await supertest
|
||||
.post('/elasticsearch/.kibana/__kibanaQueryValidator/_validate/query?explain=true&ignore_unavailable=true')
|
||||
.set('content-type', 'application/json')
|
||||
.send({ query: { query_string: { analyze_wildcard: true, query: '*' } } })
|
||||
.expect(404)
|
||||
));
|
||||
|
||||
it('rejects mget', async () => (
|
||||
await supertest
|
||||
.post('/elasticsearch/_mget')
|
||||
.set('content-type', 'application/json')
|
||||
.send({ docs: [{ _index: 'elasticsearch', _id: 'somedocid' }] })
|
||||
.expect(404)
|
||||
));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -19,8 +19,10 @@
|
|||
|
||||
import expect from 'expect.js';
|
||||
|
||||
|
||||
export default function ({ getService }) {
|
||||
const supertest = getService('supertest');
|
||||
const es = getService('es');
|
||||
|
||||
const MILLISECOND_IN_WEEK = 1000 * 60 * 60 * 24 * 7;
|
||||
|
||||
|
@ -57,12 +59,11 @@ export default function ({ getService }) {
|
|||
});
|
||||
|
||||
it('should load elasticsearch index containing sample data with dates relative to current time', async () => {
|
||||
const resp = await supertest
|
||||
.post('/elasticsearch/kibana_sample_data_flights/_search')
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(200);
|
||||
const resp = await es.search({
|
||||
index: 'kibana_sample_data_flights'
|
||||
});
|
||||
|
||||
const doc = resp.body.hits.hits[0];
|
||||
const doc = resp.hits.hits[0];
|
||||
const docMilliseconds = Date.parse(doc._source.timestamp);
|
||||
const nowMilliseconds = Date.now();
|
||||
const delta = Math.abs(nowMilliseconds - docMilliseconds);
|
||||
|
@ -76,11 +77,11 @@ export default function ({ getService }) {
|
|||
.post(`/api/sample_data/flights?now=${nowString}`)
|
||||
.set('kbn-xsrf', 'kibana');
|
||||
|
||||
const resp = await supertest
|
||||
.post('/elasticsearch/kibana_sample_data_flights/_search')
|
||||
.set('kbn-xsrf', 'kibana');
|
||||
const resp = await es.search({
|
||||
index: 'kibana_sample_data_flights'
|
||||
});
|
||||
|
||||
const doc = resp.body.hits.hits[0];
|
||||
const doc = resp.hits.hits[0];
|
||||
const docMilliseconds = Date.parse(doc._source.timestamp);
|
||||
const nowMilliseconds = Date.parse(nowString);
|
||||
const delta = Math.abs(nowMilliseconds - docMilliseconds);
|
||||
|
@ -98,10 +99,10 @@ export default function ({ getService }) {
|
|||
});
|
||||
|
||||
it('should remove elasticsearch index containing sample data', async () => {
|
||||
await supertest
|
||||
.post('/elasticsearch/kibana_sample_data_flights/_search')
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.expect(404);
|
||||
const resp = await es.indices.exists({
|
||||
index: 'kibana_sample_data_flights'
|
||||
});
|
||||
expect(resp).to.be(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -56,7 +56,7 @@ export default function () {
|
|||
'--optimize.watchPrebuild=true',
|
||||
'--status.allowAnonymous=true',
|
||||
'--optimize.enabled=true',
|
||||
`--elasticsearch.url=${formatUrl(servers.elasticsearch)}`,
|
||||
`--elasticsearch.hosts=${formatUrl(servers.elasticsearch)}`,
|
||||
`--elasticsearch.username=${servers.elasticsearch.username}`,
|
||||
`--elasticsearch.password=${servers.elasticsearch.password}`,
|
||||
],
|
||||
|
|
|
@ -84,7 +84,6 @@ export const ml = (kibana) => {
|
|||
const config = server.config();
|
||||
return {
|
||||
kbnIndex: config.get('kibana.index'),
|
||||
esServerUrl: config.get('elasticsearch.url'),
|
||||
mlAnnotationsEnabled,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -107,5 +107,5 @@ cluster.
|
|||
|
||||
1. Start a Kibana instance connected to the Monitoring cluster (for running queries in Sense on Monitoring data):
|
||||
```
|
||||
% ./bin/kibana --config config/kibana.dev.yml --elasticsearch.url http://localhost:9210 --server.name monitoring-kibana --server.port 5611
|
||||
% ./bin/kibana --config config/kibana.dev.yml --elasticsearch.hosts http://localhost:9210 --server.name monitoring-kibana --server.port 5611
|
||||
```
|
||||
|
|
|
@ -65,7 +65,10 @@ export const config = (Joi) => {
|
|||
index_pattern: Joi.string().default('.monitoring-es-2-*,.monitoring-es-6-*'),
|
||||
logQueries: Joi.boolean().default(false),
|
||||
requestHeadersWhitelist: Joi.array().items().single().default(DEFAULT_REQUEST_HEADERS),
|
||||
url: Joi.string().uri({ scheme: ['http', 'https'] }), // if empty, use Kibana's connection config
|
||||
sniffOnStart: Joi.boolean().default(false),
|
||||
sniffInterval: Joi.number().allow(false).default(false),
|
||||
sniffOnConnectionFault: Joi.boolean().default(false),
|
||||
hosts: Joi.array().items(Joi.string().uri({ scheme: ['http', 'https'] })).single(), // if empty, use Kibana's connection config
|
||||
username: Joi.string(),
|
||||
password: Joi.string(),
|
||||
requestTimeout: Joi.number().default(30000),
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { get, has, set } from 'lodash';
|
||||
import { get, has, set, unset } from 'lodash';
|
||||
import { CLUSTER_ALERTS_ADDRESS_CONFIG_KEY } from './common/constants';
|
||||
|
||||
/**
|
||||
|
@ -45,5 +45,21 @@ export const deprecations = ({ rename }) => {
|
|||
log(`Config key "${CLUSTER_ALERTS_ADDRESS_CONFIG_KEY}" will be required for email notifications to work in 7.0."`);
|
||||
}
|
||||
},
|
||||
(settings, log) => {
|
||||
const deprecatedUrl = get(settings, 'url');
|
||||
const hosts = get(settings, 'hosts.length');
|
||||
if (!deprecatedUrl) {
|
||||
return;
|
||||
}
|
||||
if (hosts) {
|
||||
log('Deprecated config key "xpack.monitoring.elasticsearch.url" ' +
|
||||
'conflicts with "xpack.monitoring.elasticsearch.hosts". Ignoring "elasticsearch.url"');
|
||||
} else {
|
||||
set(settings, 'hosts', [deprecatedUrl]);
|
||||
log('Config key "xpack.monitoring.elasticsearch.url" is deprecated.' +
|
||||
'It has been replaced with "xpack.monitoring.elasticsearch.hosts"');
|
||||
}
|
||||
unset(settings, 'url');
|
||||
}
|
||||
];
|
||||
};
|
||||
|
|
|
@ -14,7 +14,7 @@ function getMockServerFromConnectionUrl(monitoringClusterUrl) {
|
|||
xpack: {
|
||||
monitoring: {
|
||||
elasticsearch: {
|
||||
url: monitoringClusterUrl,
|
||||
hosts: monitoringClusterUrl ? [monitoringClusterUrl] : [],
|
||||
username: 'monitoring-user-internal-test',
|
||||
password: 'monitoring-p@ssw0rd!-internal-test',
|
||||
ssl: {},
|
||||
|
@ -25,7 +25,7 @@ function getMockServerFromConnectionUrl(monitoringClusterUrl) {
|
|||
}
|
||||
},
|
||||
elasticsearch: {
|
||||
url: 'http://localhost:9200',
|
||||
hosts: ['http://localhost:9200'],
|
||||
username: 'user-internal-test',
|
||||
password: 'p@ssw0rd!-internal-test',
|
||||
ssl: {},
|
||||
|
@ -70,7 +70,7 @@ describe('Instantiate Client', () => {
|
|||
|
||||
expect(server.log.getCall(0).args).to.eql([
|
||||
[ 'monitoring-ui', 'es-client' ],
|
||||
'config sourced from: production cluster (http://localhost:9200)'
|
||||
'config sourced from: production cluster'
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -80,7 +80,7 @@ describe('Instantiate Client', () => {
|
|||
|
||||
expect(server.log.getCall(0).args).to.eql([
|
||||
[ 'monitoring-ui', 'es-client' ],
|
||||
'config sourced from: monitoring cluster (monitoring-cluster.test:9200)'
|
||||
'config sourced from: monitoring cluster'
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -128,7 +128,7 @@ describe('Instantiate Client', () => {
|
|||
|
||||
sinon.assert.calledOnce(createCluster);
|
||||
expect(createClusterCall.args[0]).to.be('monitoring');
|
||||
expect(createClientOptions.url).to.eql('http://localhost:9200');
|
||||
expect(createClientOptions.hosts[0]).to.eql('http://localhost:9200');
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -143,7 +143,7 @@ describe('Instantiate Client', () => {
|
|||
|
||||
sinon.assert.calledOnce(createCluster);
|
||||
expect(createClusterCall.args[0]).to.be('monitoring');
|
||||
expect(createClientOptions.url).to.eql('http://monitoring-cluster.test:9200');
|
||||
expect(createClientOptions.hosts[0]).to.eql('http://monitoring-cluster.test:9200');
|
||||
expect(createClientOptions.username).to.eql('monitoring-user-internal-test');
|
||||
expect(createClientOptions.password).to.eql('monitoring-p@ssw0rd!-internal-test');
|
||||
});
|
||||
|
|
|
@ -32,7 +32,7 @@ export function exposeClient(server) {
|
|||
};
|
||||
let configSource = 'monitoring';
|
||||
|
||||
if (!Boolean(config.url)) {
|
||||
if (!Boolean(config.hosts && config.hosts.length)) {
|
||||
config = server.config().get('elasticsearch');
|
||||
configSource = 'production';
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ export function exposeClient(server) {
|
|||
const cluster = esPlugin.createCluster('monitoring', config);
|
||||
server.events.on('stop', bindKey(cluster, 'close'));
|
||||
|
||||
server.log([LOGGING_TAG, 'es-client'], `config sourced from: ${configSource} cluster (${config.url})`);
|
||||
server.log([LOGGING_TAG, 'es-client'], `config sourced from: ${configSource} cluster`);
|
||||
}
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue