mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 01:13:23 -04:00
[ML] Explain Log Rate Spikes: Improve streaming headers for certain proxy configs. (#139637)
Updates response headers to make response streaming work with certain proxy configurations.
This commit is contained in:
parent
6ad09d6684
commit
6a2fa9f4b4
2 changed files with 36 additions and 11 deletions
|
@ -44,7 +44,12 @@ describe('streamFactory', () => {
|
|||
streamResult += chunk.toString('utf8');
|
||||
}
|
||||
|
||||
expect(responseWithHeaders.headers).toBe(undefined);
|
||||
expect(responseWithHeaders.headers).toStrictEqual({
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'Transfer-Encoding': 'chunked',
|
||||
'X-Accel-Buffering': 'no',
|
||||
});
|
||||
expect(streamResult).toBe('push1push2');
|
||||
});
|
||||
|
||||
|
@ -65,7 +70,12 @@ describe('streamFactory', () => {
|
|||
|
||||
const parsedItems = streamItems.map((d) => JSON.parse(d));
|
||||
|
||||
expect(responseWithHeaders.headers).toBe(undefined);
|
||||
expect(responseWithHeaders.headers).toStrictEqual({
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'Transfer-Encoding': 'chunked',
|
||||
'X-Accel-Buffering': 'no',
|
||||
});
|
||||
expect(parsedItems).toHaveLength(2);
|
||||
expect(parsedItems[0]).toStrictEqual(mockItem1);
|
||||
expect(parsedItems[1]).toStrictEqual(mockItem2);
|
||||
|
@ -105,7 +115,13 @@ describe('streamFactory', () => {
|
|||
|
||||
const streamResult = decoded.toString('utf8');
|
||||
|
||||
expect(responseWithHeaders.headers).toStrictEqual({ 'content-encoding': 'gzip' });
|
||||
expect(responseWithHeaders.headers).toStrictEqual({
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'content-encoding': 'gzip',
|
||||
'Transfer-Encoding': 'chunked',
|
||||
'X-Accel-Buffering': 'no',
|
||||
});
|
||||
expect(streamResult).toBe('push1push2');
|
||||
|
||||
done();
|
||||
|
@ -143,7 +159,13 @@ describe('streamFactory', () => {
|
|||
|
||||
const parsedItems = streamItems.map((d) => JSON.parse(d));
|
||||
|
||||
expect(responseWithHeaders.headers).toStrictEqual({ 'content-encoding': 'gzip' });
|
||||
expect(responseWithHeaders.headers).toStrictEqual({
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'content-encoding': 'gzip',
|
||||
'Transfer-Encoding': 'chunked',
|
||||
'X-Accel-Buffering': 'no',
|
||||
});
|
||||
expect(parsedItems).toHaveLength(2);
|
||||
expect(parsedItems[0]).toStrictEqual(mockItem1);
|
||||
expect(parsedItems[1]).toStrictEqual(mockItem2);
|
||||
|
|
|
@ -106,13 +106,16 @@ export function streamFactory<T = unknown>(
|
|||
|
||||
const responseWithHeaders: StreamFactoryReturnType['responseWithHeaders'] = {
|
||||
body: stream,
|
||||
...(isCompressed
|
||||
? {
|
||||
headers: {
|
||||
'content-encoding': 'gzip',
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
headers: {
|
||||
...(isCompressed ? { 'content-encoding': 'gzip' } : {}),
|
||||
|
||||
// This disables response buffering on proxy servers (Nginx, uwsgi, fastcgi, etc.)
|
||||
// Otherwise, those proxies buffer responses up to 4/8 KiB.
|
||||
'X-Accel-Buffering': 'no',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'Transfer-Encoding': 'chunked',
|
||||
},
|
||||
};
|
||||
|
||||
return { DELIMITER, end, push, responseWithHeaders };
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue