[Search] [Playground] updating AI SDK to latest (#204659)

## Summary

Upgrading AI SDK to latest version


### Checklist

Check the PR satisfies following conditions. 

Reviewers should verify this PR satisfies this list as well.

- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [ ] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [ ] If a plugin configuration key changed, check if it needs to be
allowlisted in the cloud and added to the [docker
list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker)
- [ ] This was checked for breaking HTTP API changes, and any breaking
changes have been approved by the breaking-change committee. The
`release_note:breaking` label should be applied in these situations.
- [ ] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [ ] The PR description includes the appropriate Release Notes section,
and the correct `release_note:*` label is applied per the
[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)

### Identify risks

Does this PR introduce any risks? For example, consider risks like hard
to test bugs, performance regression, potential of data loss.

Describe the risk, its severity, and mitigation for each identified
risk. Invite stakeholders and evaluate how to proceed before merging.

- [ ] [See some risk
examples](https://github.com/elastic/kibana/blob/main/RISK_MATRIX.mdx)
- [ ] ...

---------

Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Joe McElroy 2025-01-06 16:20:06 +00:00 committed by GitHub
parent 8113ddfb30
commit bf4c7da5c3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 350 additions and 292 deletions

View file

@ -1087,7 +1087,7 @@
"@xstate5/react": "npm:@xstate/react@^4.1.2",
"@xyflow/react": "^12.3.5",
"adm-zip": "^0.5.9",
"ai": "^2.2.33",
"ai": "^4.0.18",
"ajv": "^8.17.1",
"ansi-regex": "^6.1.0",
"antlr4": "^4.13.1-patch-1",

View file

@ -107,7 +107,7 @@ module.exports = {
transformIgnorePatterns: [
// ignore all node_modules except monaco-editor, monaco-yaml which requires babel transforms to handle dynamic import()
// since ESM modules are not natively supported in Jest yet (https://github.com/facebook/jest/issues/4842)
'[/\\\\]node_modules(?![\\/\\\\](byte-size|monaco-editor|monaco-yaml|monaco-languageserver-types|monaco-marker-data-provider|monaco-worker-manager|vscode-languageserver-types|d3-interpolate|d3-color|langchain|langsmith|@cfworker|gpt-tokenizer|flat|@langchain))[/\\\\].+\\.js$',
'[/\\\\]node_modules(?![\\/\\\\](byte-size|monaco-editor|monaco-yaml|monaco-languageserver-types|monaco-marker-data-provider|monaco-worker-manager|vscode-languageserver-types|d3-interpolate|d3-color|langchain|langsmith|@cfworker|gpt-tokenizer|flat|@langchain|eventsource-parser))[/\\\\].+\\.js$',
'packages/kbn-pm/dist/index.js',
'[/\\\\]node_modules(?![\\/\\\\](langchain|langsmith|@langchain))/dist/[/\\\\].+\\.js$',
'[/\\\\]node_modules(?![\\/\\\\](langchain|langsmith|@langchain))/dist/util/[/\\\\].+\\.js$',

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
module.exports = {
preset: '@kbn/test',
rootDir: '../../../../../..',
roots: ['<rootDir>/x-pack/solutions/search/plugins/search_playground/common'],
coverageDirectory:
'<rootDir>/target/kibana-coverage/jest/x-pack/solutions/search/plugins/search_playground/common',
coverageReporters: ['text', 'html'],
collectCoverageFrom: [
'<rootDir>/x-pack/solutions/search/plugins/search_playground/{common}/**/*.{ts,tsx}',
],
};

View file

@ -59,7 +59,6 @@ const getStreamedResponse = async (
role: MessageRole.system,
createdAt: new Date(),
};
// concating the last question and error message with existing chat history
mutate([...previousMessages, chatRequest.messages.slice(-1)[0], systemErrorMessage], false);
},
onUpdate(merged) {

View file

@ -7,12 +7,12 @@
module.exports = {
preset: '@kbn/test',
rootDir: '../../../../..',
roots: ['<rootDir>/x-pack/solutions/search/plugins/search_playground'],
rootDir: '../../../../../..',
roots: ['<rootDir>/x-pack/solutions/search/plugins/search_playground/public'],
coverageDirectory:
'<rootDir>/target/kibana-coverage/jest/x-pack/solutions/search/plugins/search_playground',
coverageReporters: ['text', 'html'],
collectCoverageFrom: [
'<rootDir>/x-pack/solutions/search/plugins/search_playground/{public,server}/**/*.{ts,tsx}',
'<rootDir>/x-pack/solutions/search/plugins/search_playground/{public}/**/*.{ts,tsx}',
],
};

View file

@ -61,6 +61,7 @@ export async function fetchApi({
return await parseDataStream({
reader,
abortControllerRef: abortController != null ? { current: abortController() } : undefined,
handleFailure,
update: onUpdate,
});
}
@ -81,6 +82,7 @@ export async function parseDataStream({
reader,
abortControllerRef,
update,
handleFailure,
generateId = uuidv4,
getCurrentDate = () => new Date(),
}: {
@ -89,6 +91,7 @@ export async function parseDataStream({
current: AbortController | null;
};
update: (mergedMessages: Message[]) => void;
handleFailure: (error: string) => void;
generateId?: () => string;
getCurrentDate?: () => Date;
}) {
@ -113,6 +116,9 @@ export async function parseDataStream({
createdAt,
};
}
} else if (type === 'error') {
handleFailure(value);
break;
}
let responseMessage = prefixMap.text;

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
module.exports = {
preset: '@kbn/test/jest_node',
rootDir: '../../../../../..',
roots: ['<rootDir>/x-pack/solutions/search/plugins/search_playground/server'],
coverageDirectory:
'<rootDir>/target/kibana-coverage/jest/x-pack/solutions/search/plugins/search_playground/server',
coverageReporters: ['text', 'html'],
collectCoverageFrom: [
'<rootDir>/x-pack/solutions/search/plugins/search_playground/server/**/*.{ts,tsx}',
],
setupFilesAfterEnv: [
'<rootDir>/x-pack/solutions/search/plugins/search_playground/server/setup.ts',
],
};

View file

@ -14,17 +14,21 @@ import { ConversationalChain, contextLimitCheck } from './conversational_chain';
import { ChatMessage, MessageRole } from '../types';
describe('conversational chain', () => {
beforeEach(() => {
jest.resetAllMocks();
});
const createTestChain = async ({
responses,
chat,
expectedFinalAnswer,
expectedDocs,
expectedTokens,
expectedErrorMessage,
expectedSearchRequest,
contentField = { index: 'field', website: 'body_content' },
isChatModel = true,
docs,
expectedHasClipped = false,
modelLimit,
}: {
responses: string[];
@ -32,6 +36,7 @@ describe('conversational chain', () => {
expectedFinalAnswer?: string;
expectedDocs?: any;
expectedTokens?: any;
expectedErrorMessage?: string;
expectedSearchRequest?: any;
contentField?: Record<string, string>;
isChatModel?: boolean;
@ -39,10 +44,6 @@ describe('conversational chain', () => {
expectedHasClipped?: boolean;
modelLimit?: number;
}) => {
if (expectedHasClipped) {
expect.assertions(1);
}
const searchMock = jest.fn().mockImplementation(() => {
return {
hits: {
@ -109,7 +110,6 @@ describe('conversational chain', () => {
const streamToValue: string[] = await new Promise((resolve, reject) => {
const reader = stream.getReader();
const textDecoder = new TextDecoder();
const chunks: string[] = [];
const read = () => {
@ -117,7 +117,7 @@ describe('conversational chain', () => {
if (done) {
resolve(chunks);
} else {
chunks.push(textDecoder.decode(value));
chunks.push(value);
read();
}
}, reject);
@ -125,10 +125,11 @@ describe('conversational chain', () => {
read();
});
const textValue = streamToValue
.filter((v) => v[0] === '0')
.reduce((acc, v) => acc + v.replace(/0:"(.*)"\n/, '$1'), '');
expect(textValue).toEqual(expectedFinalAnswer);
const textValue =
streamToValue
.filter((v) => v[0] === '0')
.reduce((acc, v) => acc + v.replace(/0:"(.*)"\n/, '$1'), '') || '';
expect(textValue).toEqual(expectedFinalAnswer || '');
const annotations = streamToValue
.filter((v) => v[0] === '8')
@ -136,19 +137,19 @@ describe('conversational chain', () => {
.map((entry) => JSON.parse(entry))
.reduce((acc, v) => acc.concat(v), []);
const error =
streamToValue
.filter((v) => v[0] === '3')
.reduce((acc, v) => acc + v.replace(/3:"(.*)"\n/, '$1'), '') || '';
const docValues = annotations.filter((v: { type: string }) => v.type === 'retrieved_docs');
const tokens = annotations.filter((v: { type: string }) => v.type.endsWith('_token_count'));
const hasClipped = !!annotations.some((v: { type: string }) => v.type === 'context_clipped');
expect(docValues).toEqual(expectedDocs);
expect(tokens).toEqual(expectedTokens);
expect(hasClipped).toEqual(expectedHasClipped);
if (expectedErrorMessage) expect(error).toEqual(expectedErrorMessage);
expect(searchMock.mock.calls[0]).toEqual(expectedSearchRequest);
} catch (error) {
if (expectedHasClipped) {
expect(error).toMatchInlineSnapshot(`[ContextLimitError: Context exceeds the model limit]`);
} else {
throw error;
}
throw error;
}
};
@ -444,7 +445,7 @@ describe('conversational chain', () => {
});
}, 10000);
it('should clip the conversation', async () => {
it('should error the conversation when over model limit', async () => {
await createTestChain({
responses: ['rewrite "the" question', 'the final answer'],
chat: [
@ -480,9 +481,30 @@ describe('conversational chain', () => {
},
},
],
expectedDocs: [
{
documents: [
{ metadata: { _id: '1', _index: 'index' }, pageContent: '' },
{
metadata: { _id: '1', _index: 'website' },
pageContent: Array.from({ length: 1000 }, (_, i) => `${i}value\n `).join(' '),
},
],
type: 'retrieved_docs',
},
],
expectedSearchRequest: [
{
method: 'POST',
path: '/index,website/_search',
body: { query: { match: { field: 'rewrite "the" question' } }, size: 3 },
},
],
expectedTokens: [],
modelLimit: 100,
expectedHasClipped: true,
isChatModel: false,
expectedErrorMessage: 'Context exceeds the model limit',
});
}, 10000);
@ -501,11 +523,9 @@ describe('conversational chain', () => {
question: 'This is a test question.',
chat_history: 'This is a test chat history.',
};
jest.spyOn(prompt, 'format');
const result = await contextLimitCheck(undefined, prompt)(input);
expect(result).toBe(input);
expect(prompt.format).not.toHaveBeenCalled();
});
it('should return the input if within modelLimit', async () => {
@ -514,10 +534,8 @@ describe('conversational chain', () => {
question: 'This is a test question.',
chat_history: 'This is a test chat history.',
};
jest.spyOn(prompt, 'format');
const result = await contextLimitCheck(10000, prompt)(input);
expect(result).toEqual(input);
expect(prompt.format).toHaveBeenCalledWith(input);
});
it('should clip context if exceeds modelLimit', async () => {
@ -527,7 +545,6 @@ describe('conversational chain', () => {
question: 'This is a test question.',
chat_history: 'This is a test chat history.',
};
await expect(contextLimitCheck(33, prompt)(input)).rejects.toMatchInlineSnapshot(
`[ContextLimitError: Context exceeds the model limit]`
);

View file

@ -13,8 +13,10 @@ import {
SystemMessagePromptTemplate,
} from '@langchain/core/prompts';
import { Runnable, RunnableLambda, RunnableSequence } from '@langchain/core/runnables';
import { BytesOutputParser, StringOutputParser } from '@langchain/core/output_parsers';
import { createStreamDataTransformer, experimental_StreamData } from 'ai';
import { StringOutputParser } from '@langchain/core/output_parsers';
import { createDataStream, LangChainAdapter } from 'ai';
import type { DataStreamWriter } from 'ai';
import type { DataStreamString } from '@ai-sdk/ui-utils';
import { BaseLanguageModel } from '@langchain/core/language_models/base';
import { BaseMessage } from '@langchain/core/messages';
import { HumanMessage, AIMessage } from '@langchain/core/messages';
@ -112,9 +114,9 @@ export function contextLimitCheck(
};
}
export function registerContextTokenCounts(data: experimental_StreamData) {
export function registerContextTokenCounts(data: DataStreamWriter) {
return (input: ContextInputs) => {
data.appendMessageAnnotation({
data.writeMessageAnnotation({
type: 'context_token_count',
count: getTokenEstimate(input.context),
});
@ -130,154 +132,159 @@ class ConversationalChainFn {
this.options = options;
}
async stream(client: AssistClient, msgs: ChatMessage[]) {
const data = new experimental_StreamData();
async stream(
client: AssistClient,
msgs: ChatMessage[]
): Promise<ReadableStream<DataStreamString>> {
return createDataStream({
execute: async (dataStream) => {
const messages = msgs ?? [];
const lcMessages = getMessages(messages);
const previousMessages = lcMessages.slice(0, -1);
const question = lcMessages[lcMessages.length - 1]!.content;
const retrievedDocs: Document[] = [];
const messages = msgs ?? [];
const lcMessages = getMessages(messages);
const previousMessages = lcMessages.slice(0, -1);
const question = lcMessages[lcMessages.length - 1]!.content;
const retrievedDocs: Document[] = [];
let retrievalChain: Runnable = RunnableLambda.from(() => '');
const chatHistory = getSerialisedMessages(previousMessages);
let retrievalChain: Runnable = RunnableLambda.from(() => '');
const chatHistory = getSerialisedMessages(previousMessages);
if (this.options.rag) {
const retriever = new ElasticsearchRetriever({
retriever: this.options.rag.retriever,
index: this.options.rag.index,
client: client.getClient(),
content_field: this.options.rag.content_field,
hit_doc_mapper: this.options.rag.hit_doc_mapper ?? undefined,
k: this.options.rag.size ?? 3,
});
if (this.options.rag) {
const retriever = new ElasticsearchRetriever({
retriever: this.options.rag.retriever,
index: this.options.rag.index,
client: client.getClient(),
content_field: this.options.rag.content_field,
hit_doc_mapper: this.options.rag.hit_doc_mapper ?? undefined,
k: this.options.rag.size ?? 3,
});
retrievalChain = retriever.pipe(buildContext);
}
retrievalChain = retriever.pipe(buildContext);
}
let standaloneQuestionChain: Runnable = RunnableLambda.from((input) => {
return input.question;
});
if (lcMessages.length > 1) {
const questionRewritePromptTemplate = PromptTemplate.fromTemplate(
this.options.questionRewritePrompt
);
standaloneQuestionChain = RunnableSequence.from([
{
context: (input) => input.chat_history,
question: (input) => input.question,
},
questionRewritePromptTemplate,
this.options.model,
new StringOutputParser(),
]).withConfig({
metadata: {
type: 'standalone_question',
},
});
}
const prompt = ChatPromptTemplate.fromMessages([
SystemMessagePromptTemplate.fromTemplate(this.options.prompt),
...lcMessages,
]);
const answerChain = RunnableSequence.from([
{
context: RunnableSequence.from([(input) => input.question, retrievalChain]),
question: (input) => input.question,
},
RunnableLambda.from((inputs) => {
data.appendMessageAnnotation({
type: 'search_query',
question: inputs.question,
let standaloneQuestionChain: Runnable = RunnableLambda.from((input) => {
return input.question;
});
return inputs;
}),
RunnableLambda.from(contextLimitCheck(this.options?.rag?.inputTokensLimit, prompt)),
RunnableLambda.from(registerContextTokenCounts(data)),
prompt,
this.options.model.withConfig({ metadata: { type: 'question_answer_qa' } }),
]);
const conversationalRetrievalQAChain = RunnableSequence.from([
{
question: standaloneQuestionChain,
chat_history: (input) => input.chat_history,
},
answerChain,
new BytesOutputParser(),
]);
if (lcMessages.length > 1) {
const questionRewritePromptTemplate = PromptTemplate.fromTemplate(
this.options.questionRewritePrompt
);
standaloneQuestionChain = RunnableSequence.from([
{
context: (input) => input.chat_history,
question: (input) => input.question,
},
questionRewritePromptTemplate,
this.options.model,
new StringOutputParser(),
]).withConfig({
metadata: {
type: 'standalone_question',
},
});
}
const stream = await conversationalRetrievalQAChain.stream(
{
question,
chat_history: chatHistory,
},
{
callbacks: [
const prompt = ChatPromptTemplate.fromMessages([
SystemMessagePromptTemplate.fromTemplate(this.options.prompt),
...lcMessages,
]);
const answerChain = RunnableSequence.from([
{
// callback for chat based models (OpenAI)
handleChatModelStart(
llm,
msg: BaseMessage[][],
runId,
parentRunId,
extraParams,
tags,
metadata: Record<string, string>
) {
if (metadata?.type === 'question_answer_qa') {
data.appendMessageAnnotation({
type: 'prompt_token_count',
count: getTokenEstimateFromMessages(msg),
});
data.appendMessageAnnotation({
type: 'search_query',
question,
});
}
},
// callback for prompt based models (Bedrock uses ActionsClientLlm)
handleLLMStart(llm, input, runId, parentRunId, extraParams, tags, metadata) {
if (metadata?.type === 'question_answer_qa') {
data.appendMessageAnnotation({
type: 'prompt_token_count',
count: getTokenEstimate(input[0]),
});
}
},
handleRetrieverEnd(documents) {
retrievedDocs.push(...documents);
data.appendMessageAnnotation({
type: 'retrieved_docs',
documents: documents as any,
});
},
handleChainEnd(outputs, runId, parentRunId) {
if (outputs?.constructor?.name === 'AIMessageChunk') {
data.appendMessageAnnotation({
type: 'citations',
documents: getCitations(
outputs.content as string,
'inline',
retrievedDocs
) as any,
});
}
// check that main chain (without parent) is finished:
if (parentRunId == null) {
data.close().catch(() => {});
}
},
context: RunnableSequence.from([(input) => input.question, retrievalChain]),
question: (input) => input.question,
},
],
}
);
RunnableLambda.from((inputs) => {
dataStream.writeMessageAnnotation({
type: 'search_query',
question: inputs.question,
});
return inputs;
}),
RunnableLambda.from(contextLimitCheck(this.options?.rag?.inputTokensLimit, prompt)),
RunnableLambda.from(registerContextTokenCounts(dataStream)),
prompt,
this.options.model.withConfig({ metadata: { type: 'question_answer_qa' } }),
]);
return stream.pipeThrough(createStreamDataTransformer(true)).pipeThrough(data.stream);
const conversationalRetrievalQAChain = RunnableSequence.from([
{
question: standaloneQuestionChain,
chat_history: (input) => input.chat_history,
},
answerChain,
]);
const lcStream = await conversationalRetrievalQAChain.stream(
{
question,
chat_history: chatHistory,
},
{
callbacks: [
{
// callback for chat based models (OpenAI)
handleChatModelStart(
llm,
msg: BaseMessage[][],
runId,
parentRunId,
extraParams,
tags,
metadata: Record<string, string>
) {
if (metadata?.type === 'question_answer_qa') {
dataStream.writeMessageAnnotation({
type: 'prompt_token_count',
count: getTokenEstimateFromMessages(msg),
});
dataStream.writeMessageAnnotation({
type: 'search_query',
question,
});
}
},
// callback for prompt based models (Bedrock uses ActionsClientLlm)
handleLLMStart(llm, input, runId, parentRunId, extraParams, tags, metadata) {
if (metadata?.type === 'question_answer_qa') {
dataStream.writeMessageAnnotation({
type: 'prompt_token_count',
count: getTokenEstimate(input[0]),
});
}
},
handleRetrieverEnd(documents) {
retrievedDocs.push(...documents);
dataStream.writeMessageAnnotation({
type: 'retrieved_docs',
documents: documents as any,
});
},
handleChainEnd(outputs, runId, parentRunId) {
if (outputs?.constructor?.name === 'AIMessageChunk') {
dataStream.writeMessageAnnotation({
type: 'citations',
documents: getCitations(
outputs.content as string,
'inline',
retrievedDocs
) as any,
});
}
},
},
],
}
);
return LangChainAdapter.mergeIntoDataStream(lcStream, { dataStream });
},
onError: (error: unknown) => {
if (error instanceof Error) {
return error.message;
}
return 'An error occurred while processing the request';
},
});
}
}

View file

@ -155,10 +155,8 @@ export function defineRoutes({
questionRewritePrompt,
});
let stream: ReadableStream<Uint8Array>;
try {
stream = await chain.stream(aiClient, messages);
const stream = await chain.stream(aiClient, messages);
analytics.reportEvent<SendMessageEventData>(sendMessageEvent.eventType, {
connectorType:

View file

@ -0,0 +1,12 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
// webstream-polyfill doesn't fully support the whole spec. This is a workaround to rely on node native streaming support.
// see /kibana/packages/kbn-test/src/jest/setup/setup_test.js
const webStream = jest.requireActual('node:stream/web');
Object.assign(global, webStream);

View file

@ -47,6 +47,10 @@ describe('handleStreamResponse', () => {
};
});
afterEach(() => {
jest.restoreAllMocks();
});
it('should handle stream correctly', async () => {
expect.assertions(3);
const data = ['Hello', 'World'];
@ -67,32 +71,4 @@ describe('handleStreamResponse', () => {
expect(response.ok).toHaveBeenCalled();
expect(logger.error).not.toHaveBeenCalled();
});
it('should handle decoding errors', async () => {
expect.assertions(3);
// @ts-ignore
jest.spyOn(global, 'TextDecoder').mockImplementation(() => ({
decode: jest.fn(() => {
throw new Error('Test error');
}),
}));
const reader = {
read: jest
.fn()
.mockResolvedValueOnce({ done: false, value: null })
.mockResolvedValueOnce({ done: true }),
};
stream.getReader.mockReturnValue(reader);
// @ts-ignore
await handleStreamResponse({ stream, request, response, logger, maxTimeoutMs: 0 });
await new Promise((r) => setTimeout(r, 100));
expect(reader.read).toHaveBeenCalledTimes(2);
expect(logger.error).toHaveBeenCalled();
expect(response.ok).toHaveBeenCalled();
jest.restoreAllMocks();
});
});

View file

@ -25,7 +25,6 @@ export const handleStreamResponse = async ({
}) => {
const { end, push, responseWithHeaders } = streamFactory(logger, isCloud);
const reader = stream.getReader();
const textDecoder = new TextDecoder();
const abortController = new AbortController();
@ -38,21 +37,13 @@ export const handleStreamResponse = async ({
async function pushStreamUpdate() {
try {
const { done, value }: { done: boolean; value?: Uint8Array } = await reader.read();
const { done, value }: { done: boolean; value?: string } = await reader.read();
if (done || abortController.signal.aborted) {
end();
return;
}
let decodedValue;
try {
decodedValue = textDecoder.decode(value);
} catch (e) {
decodedValue = '';
logger.error(`Could not decode the data: ${e.toString()}`);
}
push(decodedValue);
if (value) push(value);
void pushStreamUpdate();
} catch (error) {

View file

@ -16,7 +16,7 @@ describe('streamFactory', () => {
debug: jest.fn(),
} as unknown as Logger;
const { DELIMITER, responseWithHeaders } = streamFactory(logger);
const { DELIMITER, responseWithHeaders, end } = streamFactory(logger);
expect(DELIMITER).toBe('\n');
expect(responseWithHeaders.headers).toEqual({
@ -27,6 +27,7 @@ describe('streamFactory', () => {
'Transfer-Encoding': 'chunked',
});
expect(responseWithHeaders.body).toBeInstanceOf(PassThrough);
end();
});
it('should push data to the stream correctly', () => {
@ -35,7 +36,7 @@ describe('streamFactory', () => {
debug: jest.fn(),
} as unknown as Logger;
const { push, responseWithHeaders } = streamFactory(logger);
const { push, responseWithHeaders, end } = streamFactory(logger);
const data = 'test data';
push(data);
@ -47,6 +48,7 @@ describe('streamFactory', () => {
responseWithHeaders.body.end(() => {
expect(output).toContain(data);
end();
});
});
@ -56,7 +58,7 @@ describe('streamFactory', () => {
debug: jest.fn(),
} as unknown as Logger;
const { push, responseWithHeaders } = streamFactory(logger);
const { push, responseWithHeaders, end } = streamFactory(logger);
const data = 'short';
push(data);
@ -68,6 +70,7 @@ describe('streamFactory', () => {
responseWithHeaders.body.end(() => {
expect(output).toContain(data);
end();
});
});
@ -77,7 +80,7 @@ describe('streamFactory', () => {
debug: jest.fn(),
} as unknown as Logger;
const { push, responseWithHeaders } = streamFactory(logger);
const { push, responseWithHeaders, end } = streamFactory(logger);
// Mock the write method to simulate backpressure
const originalWrite = responseWithHeaders.body.write.bind(responseWithHeaders.body);
@ -103,6 +106,7 @@ describe('streamFactory', () => {
responseWithHeaders.body.on('end', () => {
expect(output).toContain(data);
end();
done();
});

157
yarn.lock
View file

@ -27,6 +27,42 @@
resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.4.0.tgz#728c484f4e10df03d5a3acd0d8adcbbebff8ad63"
integrity sha512-Ff9+ksdQQB3rMncgqDK78uLznstjyfIf2Arnh22pW8kBpLs6rpKDwgnZT46hin5Hl1WzazzK64DOrhSwYpS7bQ==
"@ai-sdk/provider-utils@2.0.4":
version "2.0.4"
resolved "https://registry.yarnpkg.com/@ai-sdk/provider-utils/-/provider-utils-2.0.4.tgz#e76ab8b592fb656013ada954cbbc81f41e3003a9"
integrity sha512-GMhcQCZbwM6RoZCri0MWeEWXRt/T+uCxsmHEsTwNvEH3GDjNzchfX25C8ftry2MeEOOn6KfqCLSKomcgK6RoOg==
dependencies:
"@ai-sdk/provider" "1.0.2"
eventsource-parser "^3.0.0"
nanoid "^3.3.8"
secure-json-parse "^2.7.0"
"@ai-sdk/provider@1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@ai-sdk/provider/-/provider-1.0.2.tgz#1d3fa62f4d242868523549eb6cdefc2452b9bfde"
integrity sha512-YYtP6xWQyaAf5LiWLJ+ycGTOeBLWrED7LUrvc+SQIWhGaneylqbaGsyQL7VouQUeQ4JZ1qKYZuhmi3W56HADPA==
dependencies:
json-schema "^0.4.0"
"@ai-sdk/react@1.0.6":
version "1.0.6"
resolved "https://registry.yarnpkg.com/@ai-sdk/react/-/react-1.0.6.tgz#8757e7bd21c6496e007d73f878bce103a483667a"
integrity sha512-8Hkserq0Ge6AEi7N4hlv2FkfglAGbkoAXEZ8YSp255c3PbnZz6+/5fppw+aROmZMOfNwallSRuy1i/iPa2rBpQ==
dependencies:
"@ai-sdk/provider-utils" "2.0.4"
"@ai-sdk/ui-utils" "1.0.5"
swr "^2.2.5"
throttleit "2.1.0"
"@ai-sdk/ui-utils@1.0.5":
version "1.0.5"
resolved "https://registry.yarnpkg.com/@ai-sdk/ui-utils/-/ui-utils-1.0.5.tgz#096db17d070131851e5dc27dfe2131d658e00906"
integrity sha512-DGJSbDf+vJyWmFNexSPUsS1AAy7gtsmFmoSyNbNbJjwl9hRIf2dknfA1V0ahx6pg3NNklNYFm53L8Nphjovfvg==
dependencies:
"@ai-sdk/provider" "1.0.2"
"@ai-sdk/provider-utils" "2.0.4"
zod-to-json-schema "^3.23.5"
"@ampproject/remapping@^2.2.0":
version "2.2.0"
resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d"
@ -8843,7 +8879,7 @@
dependencies:
"@opentelemetry/api" "^1.0.0"
"@opentelemetry/api@1.x", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1":
"@opentelemetry/api@1.9.0", "@opentelemetry/api@1.x", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1":
version "1.9.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe"
integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==
@ -11477,6 +11513,11 @@
resolved "https://registry.yarnpkg.com/@types/deep-freeze-strict/-/deep-freeze-strict-1.1.0.tgz#447a6a2576191344aa42310131dd3df5c41492c4"
integrity sha1-RHpqJXYZE0SqQjEBMd099cQUksQ=
"@types/diff-match-patch@^1.0.36":
version "1.0.36"
resolved "https://registry.yarnpkg.com/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz#dcef10a69d357fe9d43ac4ff2eca6b85dbf466af"
integrity sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==
"@types/diff@^5.0.8":
version "5.0.8"
resolved "https://registry.yarnpkg.com/@types/diff/-/diff-5.0.8.tgz#28dc501cc3e7c62d4c5d096afe20755170acf276"
@ -13316,18 +13357,18 @@ aggregate-error@^3.0.0, aggregate-error@^3.1.0:
clean-stack "^2.0.0"
indent-string "^4.0.0"
ai@^2.2.33:
version "2.2.37"
resolved "https://registry.yarnpkg.com/ai/-/ai-2.2.37.tgz#49bae60229937bde351d72d3ff721ccb8060edb2"
integrity sha512-JIYm5N1muGVqBqWnvkt29FmXhESoO5TcDxw74OE41SsM+uIou6NPDDs0XWb/ABcd1gmp6k5zym64KWMPM2xm0A==
ai@^4.0.18:
version "4.0.18"
resolved "https://registry.yarnpkg.com/ai/-/ai-4.0.18.tgz#94a270165133a9327d73a1327ffefdc7b5581832"
integrity sha512-BTWzalLNE1LQphEka5xzJXDs5v4xXy1Uzr7dAVk+C/CnO3WNpuMBgrCymwUv0VrWaWc8xMQuh+OqsT7P7JyekQ==
dependencies:
eventsource-parser "1.0.0"
nanoid "3.3.6"
solid-swr-store "0.10.7"
sswr "2.0.0"
swr "2.2.0"
swr-store "0.10.6"
swrv "1.0.4"
"@ai-sdk/provider" "1.0.2"
"@ai-sdk/provider-utils" "2.0.4"
"@ai-sdk/react" "1.0.6"
"@ai-sdk/ui-utils" "1.0.5"
"@opentelemetry/api" "1.9.0"
jsondiffpatch "0.6.0"
zod-to-json-schema "^3.23.5"
airbnb-js-shims@^2.2.1:
version "2.2.1"
@ -15132,9 +15173,9 @@ chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2, chalk@~4.1
ansi-styles "^4.1.0"
supports-color "^7.1.0"
chalk@^5.1.2:
chalk@^5.1.2, chalk@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385"
resolved "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz"
integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==
chance@1.0.18:
@ -18691,11 +18732,6 @@ events@^3.0.0, events@^3.2.0, events@^3.3.0:
resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
eventsource-parser@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/eventsource-parser/-/eventsource-parser-1.0.0.tgz#6332e37fd5512e3c8d9df05773b2bf9e152ccc04"
integrity sha512-9jgfSCa3dmEme2ES3mPByGXfgZ87VbP97tng1G2nWwWx6bV2nYxm2AWCrbQjXToSe+yYlqaZNtxffR9IeQr95g==
eventsource-parser@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/eventsource-parser/-/eventsource-parser-3.0.0.tgz#9303e303ef807d279ee210a17ce80f16300d9f57"
@ -22896,9 +22932,9 @@ json-schema-typed@^8.0.1:
resolved "https://registry.yarnpkg.com/json-schema-typed/-/json-schema-typed-8.0.1.tgz#826ee39e3b6cef536f85412ff048d3ff6f19dfa0"
integrity sha512-XQmWYj2Sm4kn4WeTYvmpKEbyPsL7nBsb647c7pMe6l02/yx2+Jfc4dT6UZkEXnIUb5LhD55r2HPsJ1milQ4rDg==
json-schema@0.4.0:
json-schema@0.4.0, json-schema@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5"
resolved "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz"
integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==
json-stable-stringify-without-jsonify@^1.0.1:
@ -22960,6 +22996,15 @@ jsondiffpatch@0.4.1:
chalk "^2.3.0"
diff-match-patch "^1.0.0"
jsondiffpatch@0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz#daa6a25bedf0830974c81545568d5f671c82551f"
integrity sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==
dependencies:
"@types/diff-match-patch" "^1.0.36"
chalk "^5.3.0"
diff-match-patch "^1.0.5"
jsonfile@^2.1.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-2.4.0.tgz#3736a2b428b87bbda0cc83b53fa3d633a35c2ae8"
@ -24944,15 +24989,10 @@ nano-css@^5.2.1:
stacktrace-js "^2.0.0"
stylis "3.5.0"
nanoid@3.3.6:
version "3.3.6"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c"
integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==
nanoid@^3.3.1, nanoid@^3.3.6:
version "3.3.7"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
nanoid@^3.3.1, nanoid@^3.3.6, nanoid@^3.3.8:
version "3.3.8"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf"
integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==
nanomatch@^1.2.9:
version "1.2.9"
@ -29316,10 +29356,10 @@ screenfull@^5.0.0:
resolved "https://registry.yarnpkg.com/screenfull/-/screenfull-5.0.0.tgz#5c2010c0e84fd4157bf852877698f90b8cbe96f6"
integrity sha512-yShzhaIoE9OtOhWVyBBffA6V98CDCoyHTsp8228blmqYy1Z5bddzE/4FPiJKlr8DVR4VBiiUyfPzIQPIYDkeMA==
secure-json-parse@^2.4.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/secure-json-parse/-/secure-json-parse-2.6.0.tgz#95d89f84adf32d76ff7800e68a673b129fe918b0"
integrity sha512-B9osKohb6L+EZ6Kve3wHKfsAClzOC/iISA2vSuCe5Jx5NAKiwitfxx8ZKYapHXr0sYRj7UZInT7pLb3rp2Yx6A==
secure-json-parse@^2.4.0, secure-json-parse@^2.7.0:
version "2.7.0"
resolved "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz"
integrity sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==
seedrandom@^3.0.5:
version "3.0.5"
@ -29890,11 +29930,6 @@ socks@^2.7.1:
ip-address "^9.0.5"
smart-buffer "^4.2.0"
solid-swr-store@0.10.7:
version "0.10.7"
resolved "https://registry.yarnpkg.com/solid-swr-store/-/solid-swr-store-0.10.7.tgz#9511308f01250a1509efbfaad5b481be7517e436"
integrity sha512-A6d68aJmRP471aWqKKPE2tpgOiR5fH4qXQNfKIec+Vap+MGQm3tvXlT8n0I8UgJSlNAsSAUuw2VTviH2h3Vv5g==
sonic-boom@^1.0.2:
version "1.3.0"
resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-1.3.0.tgz#5c77c846ce6c395dddf2eb8e8e65f9cc576f2e76"
@ -30220,13 +30255,6 @@ ssri@^8.0.1:
dependencies:
minipass "^3.1.1"
sswr@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/sswr/-/sswr-2.0.0.tgz#db5e1f7c44addb8316de8e7efe23b7ea2cba090d"
integrity sha512-mV0kkeBHcjcb0M5NqKtKVg/uTIYNlIIniyDfSGrSfxpEdM9C365jK0z55pl9K0xAkNTJi2OAOVFQpgMPUk+V0w==
dependencies:
swrev "^4.0.0"
stable@^0.1.8:
version "0.1.8"
resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf"
@ -30934,20 +30962,6 @@ swagger2openapi@^7.0.8:
yaml "^1.10.0"
yargs "^17.0.1"
swr-store@0.10.6:
version "0.10.6"
resolved "https://registry.yarnpkg.com/swr-store/-/swr-store-0.10.6.tgz#1856bda886e87dbed40c8c9874c1b1624d2e502d"
integrity sha512-xPjB1hARSiRaNNlUQvWSVrG5SirCjk2TmaUyzzvk69SZQan9hCJqw/5rG9iL7xElHU784GxRPISClq4488/XVw==
dependencies:
dequal "^2.0.3"
swr@2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/swr/-/swr-2.2.0.tgz#575c6ac1bec087847f4c86a39ccbc0043c834d6a"
integrity sha512-AjqHOv2lAhkuUdIiBu9xbuettzAzWXmCEcLONNKJRba87WAefz8Ca9d6ds/SzrPc235n1IxWYdhJ2zF3MNUaoQ==
dependencies:
use-sync-external-store "^1.2.0"
swr@^2.2.5:
version "2.2.5"
resolved "https://registry.yarnpkg.com/swr/-/swr-2.2.5.tgz#063eea0e9939f947227d5ca760cc53696f46446b"
@ -30956,16 +30970,6 @@ swr@^2.2.5:
client-only "^0.0.1"
use-sync-external-store "^1.2.0"
swrev@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/swrev/-/swrev-4.0.0.tgz#83da6983c7ef9d71ac984a9b169fc197cbf18ff8"
integrity sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==
swrv@1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/swrv/-/swrv-1.0.4.tgz#278b4811ed4acbb1ae46654972a482fd1847e480"
integrity sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==
symbol-observable@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
@ -31280,6 +31284,11 @@ throttle-debounce@^3.0.1:
resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb"
integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg==
throttleit@2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-2.1.0.tgz#a7e4aa0bf4845a5bd10daa39ea0c783f631a07b4"
integrity sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==
throttleit@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/throttleit/-/throttleit-1.0.0.tgz#9e785836daf46743145a5984b6268d828528ac6c"
@ -33941,10 +33950,10 @@ zip-stream@^6.0.1:
compress-commons "^6.0.2"
readable-stream "^4.0.0"
zod-to-json-schema@^3.22.3, zod-to-json-schema@^3.22.4, zod-to-json-schema@^3.22.5, zod-to-json-schema@^3.23.0:
version "3.23.2"
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.23.2.tgz#bc7e379c8050462538383e382964c03d8fe008f9"
integrity sha512-uSt90Gzc/tUfyNqxnjlfBs8W6WSGpNBv0rVsNxP/BVSMHMKGdthPYff4xtCHYloJGM0CFxFsb3NbC0eqPhfImw==
zod-to-json-schema@^3.22.3, zod-to-json-schema@^3.22.4, zod-to-json-schema@^3.22.5, zod-to-json-schema@^3.23.0, zod-to-json-schema@^3.23.5:
version "3.24.1"
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.24.1.tgz#f08c6725091aadabffa820ba8d50c7ab527f227a"
integrity sha512-3h08nf3Vw3Wl3PK+q3ow/lIil81IT2Oa7YpQyUUDsEWbXveMesdfK1xBd2RhCkynwZndAxixji/7SYJJowr62w==
zod@3.23.8, zod@^3.22.3, zod@^3.22.4, zod@^3.23.8:
version "3.23.8"