mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
Migrate Console to use Node http instead of Hapi to support GET requests with bodies (#46200) (#46953)
* Cleaned up use of es.send API - Converted Elasticsearch proxy config to TS (now we can see the types with https.AgentOptions) - Wrap request in util.promisify and refactor - Use 'url' lib for parsing URLs - Remove rejectUnauthorized from proxy_route.js (this is a TLS setting handled in agent setup) * Retained original proxying behavior * Re-enable support for setting rejectUnauthorized via proxy config settings * Updated tests.
This commit is contained in:
parent
e5219d7044
commit
3171a9a0a2
11 changed files with 271 additions and 104 deletions
|
@ -35,11 +35,6 @@ export function getContentType(body) {
|
|||
export function send(method, path, data) {
|
||||
const wrappedDfd = $.Deferred(); // eslint-disable-line new-cap
|
||||
|
||||
const isGetRequest = /^get$/i.test(method);
|
||||
if (data && isGetRequest) {
|
||||
method = 'POST';
|
||||
}
|
||||
|
||||
const options = {
|
||||
url: '../api/console/proxy?' + formatQueryString({ path, method }),
|
||||
data,
|
||||
|
@ -50,7 +45,6 @@ export function send(method, path, data) {
|
|||
dataType: 'text', // disable automatic guessing
|
||||
};
|
||||
|
||||
|
||||
$.ajax(options).then(
|
||||
function (data, textStatus, jqXHR) {
|
||||
wrappedDfd.resolveWith(this, [data, textStatus, jqXHR]);
|
||||
|
|
|
@ -267,7 +267,7 @@ function retrieveSettings(settingsKey, settingsToRetrieve) {
|
|||
|
||||
// Fetch autocomplete info if setting is set to true, and if user has made changes.
|
||||
if (currentSettings[settingsKey] && settingsToRetrieve[settingsKey]) {
|
||||
return es.send('GET', settingKeyToPathMap[settingsKey], null, null, true);
|
||||
return es.send('GET', settingKeyToPathMap[settingsKey], null);
|
||||
} else {
|
||||
const settingsPromise = new $.Deferred();
|
||||
// If a user has saved settings, but a field remains checked and unchanged, no need to make changes
|
||||
|
|
|
@ -18,23 +18,21 @@
|
|||
*/
|
||||
|
||||
import sinon from 'sinon';
|
||||
import Wreck from '@hapi/wreck';
|
||||
import expect from '@kbn/expect';
|
||||
import { Server } from 'hapi';
|
||||
|
||||
import { createResponseStub } from './stubs';
|
||||
import { createProxyRoute } from '../../';
|
||||
|
||||
import { createWreckResponseStub } from './stubs';
|
||||
import * as requestModule from '../../request';
|
||||
|
||||
describe('Console Proxy Route', () => {
|
||||
const sandbox = sinon.createSandbox();
|
||||
const teardowns = [];
|
||||
let request;
|
||||
|
||||
|
||||
beforeEach(() => {
|
||||
request = async (method, path, response) => {
|
||||
sandbox.stub(Wreck, 'request').callsFake(createWreckResponseStub(response));
|
||||
|
||||
sandbox.stub(requestModule, 'sendRequest').callsFake(createResponseStub(response));
|
||||
const server = new Server();
|
||||
server.route(
|
||||
createProxyRoute({
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
import { request } from 'http';
|
||||
|
||||
import sinon from 'sinon';
|
||||
import Wreck from '@hapi/wreck';
|
||||
import expect from '@kbn/expect';
|
||||
import { Server } from 'hapi';
|
||||
import * as requestModule from '../../request';
|
||||
|
||||
import { createProxyRoute } from '../../';
|
||||
|
||||
import { createWreckResponseStub } from './stubs';
|
||||
import { createResponseStub } from './stubs';
|
||||
|
||||
describe('Console Proxy Route', () => {
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
@ -34,7 +34,7 @@ describe('Console Proxy Route', () => {
|
|||
let setup;
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox.stub(Wreck, 'request').callsFake(createWreckResponseStub());
|
||||
sandbox.stub(requestModule, 'sendRequest').callsFake(createResponseStub());
|
||||
|
||||
setup = () => {
|
||||
const server = new Server();
|
||||
|
@ -77,8 +77,8 @@ describe('Console Proxy Route', () => {
|
|||
|
||||
resp.destroy();
|
||||
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
const { headers } = Wreck.request.getCall(0).args[2];
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
const { headers } = requestModule.sendRequest.getCall(0).args[0];
|
||||
expect(headers)
|
||||
.to.have.property('x-forwarded-for')
|
||||
.and.not.be('');
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
import { Agent } from 'http';
|
||||
|
||||
import sinon from 'sinon';
|
||||
import Wreck from '@hapi/wreck';
|
||||
import * as requestModule from '../../request';
|
||||
import expect from '@kbn/expect';
|
||||
import { Server } from 'hapi';
|
||||
|
||||
import { createProxyRoute } from '../../';
|
||||
|
||||
import { createWreckResponseStub } from './stubs';
|
||||
import { createResponseStub } from './stubs';
|
||||
|
||||
describe('Console Proxy Route', () => {
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
@ -34,7 +34,7 @@ describe('Console Proxy Route', () => {
|
|||
let setup;
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox.stub(Wreck, 'request').callsFake(createWreckResponseStub());
|
||||
sandbox.stub(requestModule, 'sendRequest').callsFake(createResponseStub());
|
||||
|
||||
setup = () => {
|
||||
const server = new Server();
|
||||
|
@ -72,6 +72,7 @@ describe('Console Proxy Route', () => {
|
|||
const { server } = setup();
|
||||
server.route(
|
||||
createProxyRoute({
|
||||
baseUrl: 'http://localhost:9200',
|
||||
pathFilters: [/^\/foo\//, /^\/bar\//],
|
||||
})
|
||||
);
|
||||
|
@ -82,7 +83,7 @@ describe('Console Proxy Route', () => {
|
|||
});
|
||||
|
||||
expect(statusCode).to.be(200);
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
describe('all match', () => {
|
||||
|
@ -90,6 +91,7 @@ describe('Console Proxy Route', () => {
|
|||
const { server } = setup();
|
||||
server.route(
|
||||
createProxyRoute({
|
||||
baseUrl: 'http://localhost:9200',
|
||||
pathFilters: [/^\/foo\//, /^\/bar\//],
|
||||
})
|
||||
);
|
||||
|
@ -100,7 +102,7 @@ describe('Console Proxy Route', () => {
|
|||
});
|
||||
|
||||
expect(statusCode).to.be(200);
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -111,7 +113,7 @@ describe('Console Proxy Route', () => {
|
|||
|
||||
const getConfigForReq = sinon.stub().returns({});
|
||||
|
||||
server.route(createProxyRoute({ getConfigForReq }));
|
||||
server.route(createProxyRoute({ baseUrl: 'http://localhost:9200', getConfigForReq }));
|
||||
await server.inject({
|
||||
method: 'POST',
|
||||
url: '/api/console/proxy?method=HEAD&path=/index/type/id',
|
||||
|
@ -124,10 +126,10 @@ describe('Console Proxy Route', () => {
|
|||
expect(args[0])
|
||||
.to.have.property('query')
|
||||
.eql({ method: 'HEAD', path: '/index/type/id' });
|
||||
expect(args[1]).to.be('/index/type/id?pretty');
|
||||
expect(args[1]).to.be('http://localhost:9200/index/type/id?pretty=true');
|
||||
});
|
||||
|
||||
it('sends the returned timeout, rejectUnauthorized, agent, and base headers to Wreck', async () => {
|
||||
it('sends the returned timeout, agent, and base headers to request', async () => {
|
||||
const { server } = setup();
|
||||
|
||||
const timeout = Math.round(Math.random() * 10000);
|
||||
|
@ -140,11 +142,12 @@ describe('Console Proxy Route', () => {
|
|||
|
||||
server.route(
|
||||
createProxyRoute({
|
||||
baseUrl: 'http://localhost:9200',
|
||||
getConfigForReq: () => ({
|
||||
timeout,
|
||||
agent,
|
||||
rejectUnauthorized,
|
||||
headers,
|
||||
rejectUnauthorized,
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
@ -154,8 +157,8 @@ describe('Console Proxy Route', () => {
|
|||
url: '/api/console/proxy?method=HEAD&path=/index/type/id',
|
||||
});
|
||||
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
const opts = Wreck.request.getCall(0).args[2];
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
const opts = requestModule.sendRequest.getCall(0).args[0];
|
||||
expect(opts).to.have.property('timeout', timeout);
|
||||
expect(opts).to.have.property('agent', agent);
|
||||
expect(opts).to.have.property('rejectUnauthorized', rejectUnauthorized);
|
||||
|
|
|
@ -18,13 +18,13 @@
|
|||
*/
|
||||
|
||||
import sinon from 'sinon';
|
||||
import Wreck from '@hapi/wreck';
|
||||
import * as requestModule from '../../request';
|
||||
import expect from '@kbn/expect';
|
||||
import { Server } from 'hapi';
|
||||
|
||||
import { createProxyRoute } from '../../';
|
||||
|
||||
import { createWreckResponseStub } from './stubs';
|
||||
import { createResponseStub } from './stubs';
|
||||
|
||||
describe('Console Proxy Route', () => {
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
@ -32,7 +32,7 @@ describe('Console Proxy Route', () => {
|
|||
let request;
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox.stub(Wreck, 'request').callsFake(createWreckResponseStub());
|
||||
sandbox.stub(requestModule, 'sendRequest').callsFake(createResponseStub());
|
||||
|
||||
request = async (method, path) => {
|
||||
const server = new Server();
|
||||
|
@ -64,40 +64,40 @@ describe('Console Proxy Route', () => {
|
|||
describe('contains full url', () => {
|
||||
it('treats the url as a path', async () => {
|
||||
await request('GET', 'http://evil.com/test');
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
const args = Wreck.request.getCall(0).args;
|
||||
expect(args[1]).to.be('http://localhost:9200/http://evil.com/test?pretty');
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
const args = requestModule.sendRequest.getCall(0).args;
|
||||
expect(args[0].uri.href).to.be('http://localhost:9200/http://evil.com/test?pretty=true');
|
||||
});
|
||||
});
|
||||
describe('is missing', () => {
|
||||
it('returns a 400 error', async () => {
|
||||
const { statusCode } = await request('GET', undefined);
|
||||
expect(statusCode).to.be(400);
|
||||
sinon.assert.notCalled(Wreck.request);
|
||||
sinon.assert.notCalled(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
describe('is empty', () => {
|
||||
it('returns a 400 error', async () => {
|
||||
const { statusCode } = await request('GET', '');
|
||||
expect(statusCode).to.be(400);
|
||||
sinon.assert.notCalled(Wreck.request);
|
||||
sinon.assert.notCalled(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
describe('starts with a slash', () => {
|
||||
it('combines well with the base url', async () => {
|
||||
await request('GET', '/index/type/id');
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
expect(Wreck.request.getCall(0).args[1]).to.be(
|
||||
'http://localhost:9200/index/type/id?pretty'
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
expect(requestModule.sendRequest.getCall(0).args[0].uri.href).to.be(
|
||||
'http://localhost:9200/index/type/id?pretty=true'
|
||||
);
|
||||
});
|
||||
});
|
||||
describe(`doesn't start with a slash`, () => {
|
||||
it('combines well with the base url', async () => {
|
||||
await request('GET', 'index/type/id');
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
expect(Wreck.request.getCall(0).args[1]).to.be(
|
||||
'http://localhost:9200/index/type/id?pretty'
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
expect(requestModule.sendRequest.getCall(0).args[0].uri.href).to.be(
|
||||
'http://localhost:9200/index/type/id?pretty=true'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -107,29 +107,29 @@ describe('Console Proxy Route', () => {
|
|||
it('returns a 400 error', async () => {
|
||||
const { statusCode } = await request(null, '/');
|
||||
expect(statusCode).to.be(400);
|
||||
sinon.assert.notCalled(Wreck.request);
|
||||
sinon.assert.notCalled(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
describe('is empty', () => {
|
||||
it('returns a 400 error', async () => {
|
||||
const { statusCode } = await request('', '/');
|
||||
expect(statusCode).to.be(400);
|
||||
sinon.assert.notCalled(Wreck.request);
|
||||
sinon.assert.notCalled(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
describe('is an invalid http method', () => {
|
||||
it('returns a 400 error', async () => {
|
||||
const { statusCode } = await request('foo', '/');
|
||||
expect(statusCode).to.be(400);
|
||||
sinon.assert.notCalled(Wreck.request);
|
||||
sinon.assert.notCalled(requestModule.sendRequest);
|
||||
});
|
||||
});
|
||||
describe('is mixed case', () => {
|
||||
it('sends a request with the exact method', async () => {
|
||||
const { statusCode } = await request('HeAd', '/');
|
||||
expect(statusCode).to.be(200);
|
||||
sinon.assert.calledOnce(Wreck.request);
|
||||
expect(Wreck.request.getCall(0).args[0]).to.be('HeAd');
|
||||
sinon.assert.calledOnce(requestModule.sendRequest);
|
||||
expect(requestModule.sendRequest.getCall(0).args[0].method).to.be('HeAd');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
import { Readable } from 'stream';
|
||||
|
||||
export function createWreckResponseStub(response) {
|
||||
export function createResponseStub(response) {
|
||||
return async () => {
|
||||
const resp = new Readable({
|
||||
read() {
|
||||
|
|
|
@ -23,13 +23,13 @@ import http from 'http';
|
|||
import https from 'https';
|
||||
import url from 'url';
|
||||
|
||||
const readFile = (file) => readFileSync(file, 'utf8');
|
||||
const readFile = (file: string) => readFileSync(file, 'utf8');
|
||||
|
||||
const createAgent = (legacyConfig) => {
|
||||
const createAgent = (legacyConfig: any) => {
|
||||
const target = url.parse(_.head(legacyConfig.hosts));
|
||||
if (!/^https/.test(target.protocol)) return new http.Agent();
|
||||
if (!/^https/.test(target.protocol || '')) return new http.Agent();
|
||||
|
||||
const agentOptions = {};
|
||||
const agentOptions: https.AgentOptions = {};
|
||||
|
||||
const verificationMode = legacyConfig.ssl && legacyConfig.ssl.verificationMode;
|
||||
switch (verificationMode) {
|
||||
|
@ -40,7 +40,7 @@ const createAgent = (legacyConfig) => {
|
|||
agentOptions.rejectUnauthorized = true;
|
||||
|
||||
// by default, NodeJS is checking the server identify
|
||||
agentOptions.checkServerIdentity = _.noop;
|
||||
agentOptions.checkServerIdentity = _.noop as any;
|
||||
break;
|
||||
case 'full':
|
||||
agentOptions.rejectUnauthorized = true;
|
||||
|
@ -49,8 +49,11 @@ const createAgent = (legacyConfig) => {
|
|||
throw new Error(`Unknown ssl verificationMode: ${verificationMode}`);
|
||||
}
|
||||
|
||||
if (legacyConfig.ssl && Array.isArray(legacyConfig.ssl.certificateAuthorities)
|
||||
&& legacyConfig.ssl.certificateAuthorities.length > 0) {
|
||||
if (
|
||||
legacyConfig.ssl &&
|
||||
Array.isArray(legacyConfig.ssl.certificateAuthorities) &&
|
||||
legacyConfig.ssl.certificateAuthorities.length > 0
|
||||
) {
|
||||
agentOptions.ca = legacyConfig.ssl.certificateAuthorities.map(readFile);
|
||||
}
|
||||
|
||||
|
@ -68,9 +71,9 @@ const createAgent = (legacyConfig) => {
|
|||
return new https.Agent(agentOptions);
|
||||
};
|
||||
|
||||
export const getElasticsearchProxyConfig = (legacyConfig) => {
|
||||
export const getElasticsearchProxyConfig = (legacyConfig: any) => {
|
||||
return {
|
||||
timeout: legacyConfig.requestTimeout.asMilliseconds(),
|
||||
agent: createAgent(legacyConfig)
|
||||
agent: createAgent(legacyConfig),
|
||||
};
|
||||
};
|
|
@ -19,22 +19,19 @@
|
|||
|
||||
import Joi from 'joi';
|
||||
import Boom from 'boom';
|
||||
import Wreck from '@hapi/wreck';
|
||||
import { trimLeft, trimRight } from 'lodash';
|
||||
import { sendRequest } from './request';
|
||||
import * as url from 'url';
|
||||
|
||||
function resolveUri(base, path) {
|
||||
let pathToUse = `${trimRight(base, '/')}/${trimLeft(path, '/')}`;
|
||||
const questionMarkIndex = pathToUse.indexOf('?');
|
||||
// no query string in pathToUse, append '?pretty'
|
||||
if (questionMarkIndex === -1) {
|
||||
pathToUse = `${pathToUse}?pretty`;
|
||||
} else {
|
||||
// pathToUse has query string, append '&pretty'
|
||||
pathToUse = `${pathToUse}&pretty`;
|
||||
} // appending pretty here to have Elasticsearch do the JSON formatting, as doing
|
||||
function toURL(base, path) {
|
||||
const urlResult = new url.URL(`${trimRight(base, '/')}/${trimLeft(path, '/')}`);
|
||||
// Appending pretty here to have Elasticsearch do the JSON formatting, as doing
|
||||
// in JS can lead to data loss (7.0 will get munged into 7, thus losing indication of
|
||||
// measurement precision)
|
||||
return pathToUse;
|
||||
if (!urlResult.searchParams.get('pretty')) {
|
||||
urlResult.searchParams.append('pretty', 'true');
|
||||
}
|
||||
return urlResult;
|
||||
}
|
||||
|
||||
function getProxyHeaders(req) {
|
||||
|
@ -73,7 +70,6 @@ export const createProxyRoute = ({
|
|||
output: 'stream',
|
||||
parse: false,
|
||||
},
|
||||
|
||||
validate: {
|
||||
query: Joi.object()
|
||||
.keys({
|
||||
|
@ -104,43 +100,46 @@ export const createProxyRoute = ({
|
|||
handler: async (req, h) => {
|
||||
const { payload, query } = req;
|
||||
const { path, method } = query;
|
||||
const uri = resolveUri(baseUrl, path);
|
||||
const uri = toURL(baseUrl, path);
|
||||
|
||||
const { timeout, rejectUnauthorized, agent, headers } = getConfigForReq(req, uri);
|
||||
const makeRequest = async payloadToSend => {
|
||||
const wreckOptions = {
|
||||
payload: payloadToSend,
|
||||
timeout,
|
||||
rejectUnauthorized,
|
||||
agent,
|
||||
headers: {
|
||||
...headers,
|
||||
...getProxyHeaders(req),
|
||||
},
|
||||
};
|
||||
// Because this can technically be provided by a settings-defined proxy config, we need to
|
||||
// preserve these property names to maintain BWC.
|
||||
const { timeout, agent, headers, rejectUnauthorized } = getConfigForReq(req, uri.toString());
|
||||
|
||||
const esResponse = await Wreck.request(method, uri, wreckOptions);
|
||||
|
||||
if (method.toUpperCase() !== 'HEAD') {
|
||||
return h
|
||||
.response(esResponse)
|
||||
.code(esResponse.statusCode)
|
||||
.header('warning', esResponse.headers.warning);
|
||||
}
|
||||
|
||||
return h
|
||||
.response(`${esResponse.statusCode} - ${esResponse.statusMessage}`)
|
||||
.code(esResponse.statusCode)
|
||||
.type('text/plain')
|
||||
.header('warning', esResponse.headers.warning);
|
||||
const requestHeaders = {
|
||||
...headers,
|
||||
...getProxyHeaders(req),
|
||||
};
|
||||
// Wreck assumes that DELETE requests will not have a body, and thus it does not
|
||||
// parse the payload to pass it along, so we have to do this manually here.
|
||||
if (method.toUpperCase() === 'DELETE') {
|
||||
const data = await Wreck.read(payload);
|
||||
return await makeRequest(data);
|
||||
|
||||
const esIncomingMessage = await sendRequest({
|
||||
method,
|
||||
headers: requestHeaders,
|
||||
uri,
|
||||
timeout,
|
||||
payload,
|
||||
rejectUnauthorized,
|
||||
agent,
|
||||
});
|
||||
|
||||
const {
|
||||
statusCode,
|
||||
statusMessage,
|
||||
headers: responseHeaders,
|
||||
} = esIncomingMessage;
|
||||
|
||||
const { warning } = responseHeaders;
|
||||
|
||||
if (method.toUpperCase() !== 'HEAD') {
|
||||
return h
|
||||
.response(esIncomingMessage)
|
||||
.code(statusCode)
|
||||
.header('warning', warning);
|
||||
} else {
|
||||
return await makeRequest(payload);
|
||||
return h
|
||||
.response(`${statusCode} - ${statusMessage}`)
|
||||
.code(statusCode)
|
||||
.type('text/plain')
|
||||
.header('warning', warning);
|
||||
}
|
||||
},
|
||||
},
|
||||
|
|
63
src/legacy/core_plugins/console/server/request.test.ts
Normal file
63
src/legacy/core_plugins/console/server/request.test.ts
Normal file
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import http from 'http';
|
||||
import * as sinon from 'sinon';
|
||||
import { sendRequest } from './request';
|
||||
import { URL } from 'url';
|
||||
import { fail } from 'assert';
|
||||
|
||||
describe(`Console's send request`, () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let stub: sinon.SinonStub;
|
||||
let fakeRequest: http.ClientRequest;
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox = sinon.createSandbox();
|
||||
stub = sandbox.stub(http, 'request').callsFake(() => {
|
||||
fakeRequest = {
|
||||
abort: sinon.stub(),
|
||||
on() {},
|
||||
once() {},
|
||||
} as any;
|
||||
return fakeRequest;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
stub.restore();
|
||||
fakeRequest = null as any;
|
||||
});
|
||||
|
||||
it('correctly implements timeout and abort mechanism', async () => {
|
||||
try {
|
||||
await sendRequest({
|
||||
agent: null as any,
|
||||
headers: {},
|
||||
method: 'get',
|
||||
payload: null as any,
|
||||
timeout: 0, // immediately timeout
|
||||
uri: new URL('http://noone.nowhere.com'),
|
||||
});
|
||||
fail('Should not reach here!');
|
||||
} catch (e) {
|
||||
expect(e.message).toEqual('Client request timeout');
|
||||
expect((fakeRequest.abort as sinon.SinonStub).calledOnce).toBe(true);
|
||||
}
|
||||
});
|
||||
});
|
107
src/legacy/core_plugins/console/server/request.ts
Normal file
107
src/legacy/core_plugins/console/server/request.ts
Normal file
|
@ -0,0 +1,107 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
import net from 'net';
|
||||
import stream from 'stream';
|
||||
import Boom from 'boom';
|
||||
import { URL } from 'url';
|
||||
|
||||
interface Args {
|
||||
method: 'get' | 'post' | 'put' | 'delete' | 'patch' | 'head';
|
||||
agent: http.Agent;
|
||||
uri: URL;
|
||||
payload: stream.Stream;
|
||||
timeout: number;
|
||||
headers: http.OutgoingHttpHeaders;
|
||||
rejectUnauthorized?: boolean;
|
||||
}
|
||||
|
||||
// We use a modified version of Hapi's Wreck because Hapi, Axios, and Superagent don't support GET requests
|
||||
// with bodies, but ES APIs do. Similarly with DELETE requests with bodies. Another library, `request`
|
||||
// diverged too much from current behaviour.
|
||||
export const sendRequest = ({
|
||||
method,
|
||||
headers,
|
||||
agent,
|
||||
uri,
|
||||
timeout,
|
||||
payload,
|
||||
rejectUnauthorized,
|
||||
}: Args) => {
|
||||
const { hostname, port, protocol, pathname, search } = uri;
|
||||
const client = uri.protocol === 'https:' ? https : http;
|
||||
let resolved = false;
|
||||
|
||||
let resolve: any;
|
||||
let reject: any;
|
||||
const reqPromise = new Promise<http.ServerResponse>((res, rej) => {
|
||||
resolve = res;
|
||||
reject = rej;
|
||||
});
|
||||
|
||||
const req = client.request({
|
||||
method: method.toUpperCase(),
|
||||
// We support overriding this on a per request basis to support legacy proxy config. See ./proxy_config.
|
||||
rejectUnauthorized: typeof rejectUnauthorized === 'boolean' ? rejectUnauthorized : undefined,
|
||||
host: hostname,
|
||||
port: port === '' ? undefined : Number(port),
|
||||
protocol,
|
||||
path: `${pathname}${search || ''}`,
|
||||
headers: {
|
||||
...headers,
|
||||
'content-type': 'application/json',
|
||||
'transfer-encoding': 'chunked',
|
||||
host: hostname,
|
||||
},
|
||||
agent,
|
||||
});
|
||||
|
||||
req.once('response', res => {
|
||||
resolved = true;
|
||||
resolve(res);
|
||||
});
|
||||
|
||||
req.once('socket', (socket: net.Socket) => {
|
||||
if (!socket.connecting) {
|
||||
payload.pipe(req);
|
||||
} else {
|
||||
socket.once('connect', () => {
|
||||
payload.pipe(req);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const onError = () => reject();
|
||||
req.once('error', onError);
|
||||
|
||||
const timeoutPromise = new Promise<any>((timeoutResolve, timeoutReject) => {
|
||||
setTimeout(() => {
|
||||
if (!req.aborted && !req.socket) req.abort();
|
||||
if (!resolved) {
|
||||
timeoutReject(Boom.gatewayTimeout('Client request timeout'));
|
||||
} else {
|
||||
timeoutResolve();
|
||||
}
|
||||
}, timeout);
|
||||
});
|
||||
|
||||
return Promise.race<http.ServerResponse>([reqPromise, timeoutPromise]);
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue