mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[ML] Explain log rate spikes: Move API stream demos to Kibana examples. (#132590)
This creates a response_stream plugin in the Kibana /examples section. The plugin demonstrates API endpoints that can stream data chunks with a single request with gzip/compression support. gzip-streams get decompressed natively by browsers. The plugin demonstrates two use cases to get started: Streaming a raw string as well as a more complex example that streams Redux-like actions to the client which update React state via useReducer().
This commit is contained in:
parent
c9b1832654
commit
c968e508f6
69 changed files with 1706 additions and 738 deletions
16
test/examples/response_stream/index.ts
Normal file
16
test/examples/response_stream/index.ts
Normal file
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { FtrProviderContext } from '../../functional/ftr_provider_context';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default function ({ getService, getPageObjects, loadTestFile }: FtrProviderContext) {
|
||||
describe('response stream', function () {
|
||||
loadTestFile(require.resolve('./reducer_stream'));
|
||||
});
|
||||
}
|
29
test/examples/response_stream/parse_stream.ts
Normal file
29
test/examples/response_stream/parse_stream.ts
Normal file
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export async function* parseStream(stream: NodeJS.ReadableStream) {
|
||||
let partial = '';
|
||||
|
||||
try {
|
||||
for await (const value of stream) {
|
||||
const full = `${partial}${value}`;
|
||||
const parts = full.split('\n');
|
||||
const last = parts.pop();
|
||||
|
||||
partial = last ?? '';
|
||||
|
||||
const actions = parts.map((p) => JSON.parse(p));
|
||||
|
||||
for (const action of actions) {
|
||||
yield action;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
yield { type: 'error', payload: error.toString() };
|
||||
}
|
||||
}
|
89
test/examples/response_stream/reducer_stream.ts
Normal file
89
test/examples/response_stream/reducer_stream.ts
Normal file
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import fetch from 'node-fetch';
|
||||
import { format as formatUrl } from 'url';
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { FtrProviderContext } from '../../functional/ftr_provider_context';
|
||||
|
||||
import { parseStream } from './parse_stream';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const supertest = getService('supertest');
|
||||
const config = getService('config');
|
||||
const kibanaServerUrl = formatUrl(config.get('servers.kibana'));
|
||||
|
||||
describe('POST /internal/response_stream/reducer_stream', () => {
|
||||
it('should return full data without streaming', async () => {
|
||||
const resp = await supertest
|
||||
.post('/internal/response_stream/reducer_stream')
|
||||
.set('kbn-xsrf', 'kibana')
|
||||
.send({
|
||||
timeout: 1,
|
||||
})
|
||||
.expect(200);
|
||||
|
||||
expect(Buffer.isBuffer(resp.body)).to.be(true);
|
||||
|
||||
const chunks: string[] = resp.body.toString().split('\n');
|
||||
|
||||
expect(chunks.length).to.be(201);
|
||||
|
||||
const lastChunk = chunks.pop();
|
||||
expect(lastChunk).to.be('');
|
||||
|
||||
let data: any[] = [];
|
||||
|
||||
expect(() => {
|
||||
data = chunks.map((c) => JSON.parse(c));
|
||||
}).not.to.throwError();
|
||||
|
||||
data.forEach((d) => {
|
||||
expect(typeof d.type).to.be('string');
|
||||
});
|
||||
|
||||
const progressData = data.filter((d) => d.type === 'update_progress');
|
||||
expect(progressData.length).to.be(100);
|
||||
expect(progressData[0].payload).to.be(1);
|
||||
expect(progressData[progressData.length - 1].payload).to.be(100);
|
||||
});
|
||||
|
||||
it('should return data in chunks with streaming', async () => {
|
||||
const response = await fetch(`${kibanaServerUrl}/internal/response_stream/reducer_stream`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'kbn-xsrf': 'stream',
|
||||
},
|
||||
body: JSON.stringify({ timeout: 1 }),
|
||||
});
|
||||
|
||||
const stream = response.body;
|
||||
|
||||
expect(stream).not.to.be(null);
|
||||
|
||||
if (stream !== null) {
|
||||
const progressData: any[] = [];
|
||||
|
||||
for await (const action of parseStream(stream)) {
|
||||
expect(action.type).not.to.be('error');
|
||||
if (action.type === 'update_progress') {
|
||||
progressData.push(action);
|
||||
}
|
||||
}
|
||||
|
||||
expect(progressData.length).to.be(100);
|
||||
expect(progressData[0].payload).to.be(1);
|
||||
expect(progressData[progressData.length - 1].payload).to.be(100);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue