[ML] AIOps: Use Kibana's http service instead of fetch, fix throttling. (#162335)

- Originally Kibana's `http` service did not support receiving streams,
that's why we used plain `fetch` for this. This has been fixed in
#158678, so this PR updates the streaming helpers to use Kibana's `http`
service from now on.
- The PR also breaks out the response stream code into its own package
and restructures it to separate client and server side code. This brings
down the `aiops` bundle size by `~300KB`! 🥳
- The approach to client side throttling/buffering was also revamped:
There was an issue doing the throttling inside the generator function,
it always waited for the timeout. The buffering is now removed from
`fetchStream`, instead `useThrottle` from `react-use` is used on the
reduced `data` in `useFetchStream`. Loading log rate analysis results
got a lot snappier with this update!
This commit is contained in:
Walter Rafelsberger 2023-07-27 08:57:10 +02:00 committed by GitHub
parent 682c772e09
commit 0ab24e566c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
33 changed files with 385 additions and 453 deletions

View file

@ -6,7 +6,7 @@ To run Kibana with the described examples, use `yarn start --run-examples`.
The `response_stream` plugin demonstrates API endpoints that can stream data chunks with a single request with gzip/compression support. gzip-streams get decompressed natively by browsers. The plugin demonstrates two use cases to get started: Streaming a raw string as well as a more complex example that streams Redux-like actions to the client which update React state via `useReducer()`.
Code in `@kbn/aiops-utils` contains helpers to set up a stream on the server side (`streamFactory()`) and consume it on the client side via a custom hook (`useFetchStream()`). The utilities make use of TS generics in a way that allows to have type safety for both request related options as well as the returned data.
Code in `@kbn/ml-response-stream` contains helpers to set up a stream on the server side (`streamFactory()`) and consume it on the client side via a custom hook (`useFetchStream()`). The utilities make use of TS generics in a way that allows to have type safety for both request related options as well as the returned data.
No additional third party libraries are used in the helpers to make it work. On the server, they integrate with `Hapi` and use node's own `gzip`. On the client, the custom hook abstracts away the necessary logic to consume the stream, internally it makes use of a generator function and `useReducer()` to update React state.
@ -21,8 +21,12 @@ The request's headers get passed on to automatically identify if compression is
On the client, the custom hook is used like this:
```ts
const { errors, start, cancel, data, isRunning } = useFetchStream<
ApiSimpleStringStream, typeof basePath
>(`${basePath}/internal/response_stream/simple_string_stream`);
const {
errors,
start,
cancel,
data,
isRunning
} = useFetchStream('/internal/response_stream/simple_string_stream');
```

View file

@ -6,31 +6,7 @@
* Side Public License, v 1.
*/
import type {
UseFetchStreamCustomReducerParams,
UseFetchStreamParamsDefault,
} from '@kbn/aiops-utils';
import {
reducerStreamReducer,
ReducerStreamRequestBodySchema,
ReducerStreamApiAction,
} from './reducer_stream';
import { SimpleStringStreamRequestBodySchema } from './simple_string_stream';
export const API_ENDPOINT = {
export const RESPONSE_STREAM_API_ENDPOINT = {
REDUCER_STREAM: '/internal/response_stream/reducer_stream',
SIMPLE_STRING_STREAM: '/internal/response_stream/simple_string_stream',
} as const;
export interface ApiReducerStream extends UseFetchStreamCustomReducerParams {
endpoint: typeof API_ENDPOINT.REDUCER_STREAM;
reducer: typeof reducerStreamReducer;
body: ReducerStreamRequestBodySchema;
actions: ReducerStreamApiAction;
}
export interface ApiSimpleStringStream extends UseFetchStreamParamsDefault {
endpoint: typeof API_ENDPOINT.SIMPLE_STRING_STREAM;
body: SimpleStringStreamRequestBodySchema;
}

View file

@ -21,14 +21,14 @@ import {
EuiText,
} from '@elastic/eui';
import { useFetchStream } from '@kbn/aiops-utils';
import { useFetchStream } from '@kbn/ml-response-stream/client';
import { ApiReducerStream } from '../../../../../common/api';
import {
initialState,
resetStream,
reducerStreamReducer,
} from '../../../../../common/api/reducer_stream/reducer';
import { RESPONSE_STREAM_API_ENDPOINT } from '../../../../../common/api';
import { Page } from '../../../../components/page';
@ -41,16 +41,12 @@ export const PageReducerStream: FC = () => {
core: { http, notifications },
} = useDeps();
const basePath = http?.basePath.get() ?? '';
const [simulateErrors, setSimulateErrors] = useState(false);
const [compressResponse, setCompressResponse] = useState(true);
const { dispatch, start, cancel, data, errors, isCancelled, isRunning } = useFetchStream<
ApiReducerStream,
typeof basePath
>(
`${basePath}/internal/response_stream/reducer_stream`,
const { dispatch, start, cancel, data, errors, isCancelled, isRunning } = useFetchStream(
http,
RESPONSE_STREAM_API_ENDPOINT.REDUCER_STREAM,
'1',
{ compressResponse, simulateErrors },
{ reducer: reducerStreamReducer, initialState }

View file

@ -18,26 +18,27 @@ import {
EuiText,
} from '@elastic/eui';
import { useFetchStream } from '@kbn/aiops-utils';
import { useFetchStream } from '@kbn/ml-response-stream/client';
import { ApiSimpleStringStream } from '../../../../../common/api';
import { RESPONSE_STREAM_API_ENDPOINT } from '../../../../../common/api';
import { useDeps } from '../../../../hooks/use_deps';
import { Page } from '../../../../components/page';
export const PageSimpleStringStream: FC = () => {
const { core } = useDeps();
const basePath = core.http?.basePath.get() ?? '';
const [compressResponse, setCompressResponse] = useState(true);
const { dispatch, errors, start, cancel, data, isRunning } = useFetchStream<
ApiSimpleStringStream,
typeof basePath
>(`${basePath}/internal/response_stream/simple_string_stream`, '1', {
compressResponse,
timeout: 500,
});
const { dispatch, errors, start, cancel, data, isRunning } = useFetchStream(
core.http,
RESPONSE_STREAM_API_ENDPOINT.SIMPLE_STRING_STREAM,
'1',
{
compressResponse,
timeout: 500,
}
);
const onClickHandler = async () => {
if (isRunning) {

View file

@ -7,7 +7,7 @@
*/
import type { IRouter, Logger } from '@kbn/core/server';
import { streamFactory } from '@kbn/aiops-utils';
import { streamFactory } from '@kbn/ml-response-stream/server';
import {
errorAction,
@ -17,12 +17,12 @@ import {
deleteEntityAction,
ReducerStreamApiAction,
} from '../../common/api/reducer_stream';
import { API_ENDPOINT } from '../../common/api';
import { RESPONSE_STREAM_API_ENDPOINT } from '../../common/api';
export const defineReducerStreamRoute = (router: IRouter, logger: Logger) => {
router.versioned
.post({
path: API_ENDPOINT.REDUCER_STREAM,
path: RESPONSE_STREAM_API_ENDPOINT.REDUCER_STREAM,
access: 'internal',
})
.addVersion(

View file

@ -7,10 +7,10 @@
*/
import type { IRouter, Logger } from '@kbn/core/server';
import { streamFactory } from '@kbn/aiops-utils';
import { streamFactory } from '@kbn/ml-response-stream/server';
import { simpleStringStreamRequestBodySchema } from '../../common/api/simple_string_stream';
import { API_ENDPOINT } from '../../common/api';
import { RESPONSE_STREAM_API_ENDPOINT } from '../../common/api';
function timeout(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
@ -19,7 +19,7 @@ function timeout(ms: number) {
export const defineSimpleStringStreamRoute = (router: IRouter, logger: Logger) => {
router.versioned
.post({
path: API_ENDPOINT.SIMPLE_STRING_STREAM,
path: RESPONSE_STREAM_API_ENDPOINT.SIMPLE_STRING_STREAM,
access: 'internal',
})
.addVersion(

View file

@ -19,8 +19,8 @@
"@kbn/developer-examples-plugin",
"@kbn/data-plugin",
"@kbn/kibana-react-plugin",
"@kbn/aiops-utils",
"@kbn/config-schema",
"@kbn/shared-ux-router",
"@kbn/ml-response-stream",
]
}