[Obs AI Assistant] Add Langtrace instrumentation (#182835)

Implements support for [Langtrace](https://langtrace.ai/) tracing, in
order for us to more easily inspect LLM requests/responses. Previously
this depended on Kibana trace logs which are verbose and somewhat
ephemeral.
This commit is contained in:
Dario Gieselaar 2024-06-03 15:11:09 +02:00 committed by GitHub
parent ee4b8eaa77
commit 9528a22276
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 878 additions and 303 deletions

View file

@ -922,6 +922,8 @@
"@langchain/community": "^0.0.44",
"@langchain/core": "^0.1.53",
"@langchain/openai": "^0.0.25",
"@langtrase/trace-attributes": "^3.0.8",
"@langtrase/typescript-sdk": "^2.2.1",
"@loaders.gl/core": "^3.4.7",
"@loaders.gl/json": "^3.4.7",
"@loaders.gl/shapefile": "^3.4.7",
@ -936,6 +938,7 @@
"@opentelemetry/exporter-prometheus": "^0.31.0",
"@opentelemetry/resources": "^1.4.0",
"@opentelemetry/sdk-metrics-base": "^0.31.0",
"@opentelemetry/sdk-trace-base": "^1.24.0",
"@opentelemetry/semantic-conventions": "^1.4.0",
"@paralleldrive/cuid2": "^2.2.2",
"@reduxjs/toolkit": "1.9.7",

View file

@ -0,0 +1,13 @@
### Tracing
For investigating LLM requests and responses, you can use Langtrace, [an open-source Observability tool for LLMs](https://langtrace.ai/). It's based on OTel. It allows us to easily see what requests are being sent to the LLM and what responses are received.
Here's how to set up locally:
- checkout out [the Langtrace repository](https://github.com/Scale3-Labs/langtrace) locally
- run `docker-compose up` (for further instructions, check the repository)
- go to http://localhost:3000 (or where you've configured Langtrace to run)
- create a project, and generate an API key
- run Kibana with the following command: `$ LANGTRACE_API_KEY=<my-api-key> LANGTRACE_API_HOST=<langtrace-host> npm start
If you use the AI Assistant, you should then start seeing traces and metrics in the project you've configured.

View file

@ -34,6 +34,7 @@ import {
import { addLensDocsToKb } from './service/knowledge_base_service/kb_docs/lens';
import { registerFunctions } from './functions';
import { recallRankingEvent } from './analytics/recall_ranking';
import { initLangtrace } from './service/client/instrumentation/init_langtrace';
import { aiAssistantCapabilities } from '../common/capabilities';
export class ObservabilityAIAssistantPlugin
@ -50,6 +51,7 @@ export class ObservabilityAIAssistantPlugin
constructor(context: PluginInitializerContext<ObservabilityAIAssistantConfig>) {
this.logger = context.logger.get();
initLangtrace();
}
public setup(
core: CoreSetup<

View file

@ -10,6 +10,7 @@ import * as t from 'io-ts';
import { Readable } from 'stream';
import type { PluginStartContract as ActionsPluginStart } from '@kbn/actions-plugin/server';
import { KibanaRequest } from '@kbn/core/server';
import { context as otelContext } from '@opentelemetry/api';
import { aiAssistantSimulatedFunctionCalling } from '../..';
import { flushBuffer } from '../../service/util/flush_buffer';
import { observableIntoOpenAIStream } from '../../service/util/observable_into_openai_stream';
@ -18,6 +19,7 @@ import { createObservabilityAIAssistantServerRoute } from '../create_observabili
import { screenContextRt, messageRt, functionRt } from '../runtime_types';
import { ObservabilityAIAssistantRouteHandlerResources } from '../types';
import { withAssistantSpan } from '../../service/util/with_assistant_span';
import { LangTracer } from '../../service/client/instrumentation/lang_tracer';
const chatCompleteBaseRt = t.type({
body: t.intersection([
@ -144,6 +146,7 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
}
: {}),
simulateFunctionCalling,
tracer: new LangTracer(otelContext.active()),
});
return observableIntoStream(response$.pipe(flushBuffer(!!cloudStart?.isCloudEnabled)));

View file

@ -10,22 +10,23 @@ import type { ActionsClient } from '@kbn/actions-plugin/server';
import type { ElasticsearchClient, IUiSettingsClient } from '@kbn/core/server';
import type { Logger } from '@kbn/logging';
import type { PublicMethodsOf } from '@kbn/utility-types';
import { SpanKind, context } from '@opentelemetry/api';
import { merge, omit } from 'lodash';
import {
catchError,
combineLatest,
defer,
filter,
forkJoin,
from,
merge as mergeOperator,
map,
merge as mergeOperator,
Observable,
of,
shareReplay,
switchMap,
throwError,
combineLatest,
tap,
catchError,
defer,
throwError,
} from 'rxjs';
import { Readable } from 'stream';
import { v4 } from 'uuid';
@ -68,11 +69,16 @@ import { failOnNonExistingFunctionCall } from './adapters/fail_on_non_existing_f
import { createOpenAiAdapter } from './adapters/openai_adapter';
import { LlmApiAdapter } from './adapters/types';
import { getContextFunctionRequestIfNeeded } from './get_context_function_request_if_needed';
import { LangTracer } from './instrumentation/lang_tracer';
import { continueConversation } from './operators/continue_conversation';
import { extractMessages } from './operators/extract_messages';
import { extractTokenCount } from './operators/extract_token_count';
import { instrumentAndCountTokens } from './operators/instrument_and_count_tokens';
import { continueConversation } from './operators/continue_conversation';
import { getGeneratedTitle } from './operators/get_generated_title';
import { instrumentAndCountTokens } from './operators/instrument_and_count_tokens';
import {
LangtraceServiceProvider,
withLangtraceChatCompleteSpan,
} from './operators/with_langtrace_chat_complete_span';
const MAX_FUNCTION_CALLS = 8;
@ -167,272 +173,287 @@ export class ObservabilityAIAssistantClient {
simulateFunctionCalling?: boolean;
disableFunctions?: boolean;
}): Observable<Exclude<StreamingChatResponseEvent, ChatCompletionErrorEvent>> => {
const {
functionClient,
connectorId,
simulateFunctionCalling,
instructions: requestInstructions = [],
messages: initialMessages,
signal,
responseLanguage = 'English',
persist,
kibanaPublicUrl,
isPublic,
title: predefinedTitle,
conversationId: predefinedConversationId,
disableFunctions = false,
} = params;
return new LangTracer(context.active()).startActiveSpan(
'complete',
({ tracer: completeTracer }) => {
const {
functionClient,
connectorId,
simulateFunctionCalling,
instructions: requestInstructions = [],
messages: initialMessages,
signal,
responseLanguage = 'English',
persist,
kibanaPublicUrl,
isPublic,
title: predefinedTitle,
conversationId: predefinedConversationId,
disableFunctions = false,
} = params;
if (responseLanguage) {
requestInstructions.push(
`You MUST respond in the users preferred language which is: ${responseLanguage}.`
);
}
if (responseLanguage) {
requestInstructions.push(
`You MUST respond in the users preferred language which is: ${responseLanguage}.`
);
}
const isConversationUpdate = persist && !!predefinedConversationId;
const isConversationUpdate = persist && !!predefinedConversationId;
const conversationId = persist ? predefinedConversationId || v4() : '';
const conversationId = persist ? predefinedConversationId || v4() : '';
if (persist && !isConversationUpdate && kibanaPublicUrl) {
requestInstructions.push(
`This conversation will be persisted in Kibana and available at this url: ${
kibanaPublicUrl + `/app/observabilityAIAssistant/conversations/${conversationId}`
}.`
);
}
if (persist && !isConversationUpdate && kibanaPublicUrl) {
requestInstructions.push(
`This conversation will be persisted in Kibana and available at this url: ${
kibanaPublicUrl + `/app/observabilityAIAssistant/conversations/${conversationId}`
}.`
);
}
const kbInstructions$ = from(this.fetchKnowledgeBaseInstructions()).pipe(shareReplay());
const kbInstructions$ = from(this.fetchKnowledgeBaseInstructions()).pipe(shareReplay());
// from the initial messages, override any system message with
// the one that is based on the instructions (registered, request, kb)
const messagesWithUpdatedSystemMessage$ = kbInstructions$.pipe(
map((knowledgeBaseInstructions) => {
// this is what we eventually store in the conversation
const messagesWithUpdatedSystemMessage = replaceSystemMessage(
getSystemMessageFromInstructions({
registeredInstructions: functionClient.getInstructions(),
knowledgeBaseInstructions,
requestInstructions,
availableFunctionNames: functionClient.getFunctions().map((fn) => fn.definition.name),
// from the initial messages, override any system message with
// the one that is based on the instructions (registered, request, kb)
const messagesWithUpdatedSystemMessage$ = kbInstructions$.pipe(
map((knowledgeBaseInstructions) => {
// this is what we eventually store in the conversation
const messagesWithUpdatedSystemMessage = replaceSystemMessage(
getSystemMessageFromInstructions({
registeredInstructions: functionClient.getInstructions(),
knowledgeBaseInstructions,
requestInstructions,
availableFunctionNames: functionClient
.getFunctions()
.map((fn) => fn.definition.name),
}),
initialMessages
);
return messagesWithUpdatedSystemMessage;
}),
initialMessages
shareReplay()
);
return messagesWithUpdatedSystemMessage;
}),
shareReplay()
);
// if it is:
// - a new conversation
// - no predefined title is given
// - we need to store the conversation
// we generate a title
// if not, we complete with an empty string
const title$ =
predefinedTitle || isConversationUpdate || !persist
? of(predefinedTitle || '').pipe(shareReplay())
: messagesWithUpdatedSystemMessage$.pipe(
switchMap((messages) =>
getGeneratedTitle({
messages,
responseLanguage,
logger: this.dependencies.logger,
chat: (name, chatParams) => {
return this.chat(name, {
...chatParams,
simulateFunctionCalling,
connectorId,
signal,
});
},
tracer: completeTracer,
})
),
shareReplay()
);
// if it is:
// - a new conversation
// - no predefined title is given
// - we need to store the conversation
// we generate a title
// if not, we complete with an empty string
const title$ =
predefinedTitle || isConversationUpdate || !persist
? of(predefinedTitle || '').pipe(shareReplay())
: messagesWithUpdatedSystemMessage$.pipe(
switchMap((messages) =>
getGeneratedTitle({
messages,
responseLanguage,
logger: this.dependencies.logger,
// we continue the conversation here, after resolving both the materialized
// messages and the knowledge base instructions
const nextEvents$ = combineLatest([
messagesWithUpdatedSystemMessage$,
kbInstructions$,
]).pipe(
switchMap(([messagesWithUpdatedSystemMessage, knowledgeBaseInstructions]) => {
// if needed, inject a context function request here
const contextRequest = functionClient.hasFunction('context')
? getContextFunctionRequestIfNeeded(messagesWithUpdatedSystemMessage)
: undefined;
return mergeOperator(
// if we have added a context function request, also emit
// the messageAdd event for it, so we can notify the consumer
// and add it to the conversation
...(contextRequest ? [of(contextRequest)] : []),
continueConversation({
messages: [
...messagesWithUpdatedSystemMessage,
...(contextRequest ? [contextRequest.message] : []),
],
chat: (name, chatParams) => {
// inject a chat function with predefined parameters
return this.chat(name, {
...chatParams,
signal,
simulateFunctionCalling,
connectorId,
signal,
});
},
})
),
shareReplay()
);
// we continue the conversation here, after resolving both the materialized
// messages and the knowledge base instructions
const nextEvents$ = combineLatest([messagesWithUpdatedSystemMessage$, kbInstructions$]).pipe(
switchMap(([messagesWithUpdatedSystemMessage, knowledgeBaseInstructions]) => {
// if needed, inject a context function request here
const contextRequest = functionClient.hasFunction('context')
? getContextFunctionRequestIfNeeded(messagesWithUpdatedSystemMessage)
: undefined;
return mergeOperator(
// if we have added a context function request, also emit
// the messageAdd event for it, so we can notify the consumer
// and add it to the conversation
...(contextRequest ? [of(contextRequest)] : []),
continueConversation({
messages: [
...messagesWithUpdatedSystemMessage,
...(contextRequest ? [contextRequest.message] : []),
],
chat: (name, chatParams) => {
// inject a chat function with predefined parameters
return this.chat(name, {
...chatParams,
// start out with the max number of function calls
functionCallsLeft: MAX_FUNCTION_CALLS,
functionClient,
knowledgeBaseInstructions,
requestInstructions,
signal,
simulateFunctionCalling,
connectorId,
});
},
// start out with the max number of function calls
functionCallsLeft: MAX_FUNCTION_CALLS,
functionClient,
knowledgeBaseInstructions,
requestInstructions,
signal,
logger: this.dependencies.logger,
disableFunctions,
})
logger: this.dependencies.logger,
disableFunctions,
tracer: completeTracer,
})
);
}),
shareReplay()
);
}),
shareReplay()
);
const output$ = mergeOperator(
// get all the events from continuing the conversation
nextEvents$,
// wait until all dependencies have completed
forkJoin([
messagesWithUpdatedSystemMessage$,
// get just the new messages
nextEvents$.pipe(withoutTokenCountEvents(), extractMessages()),
// count all the token count events emitted during completion
mergeOperator(
const output$ = mergeOperator(
// get all the events from continuing the conversation
nextEvents$,
title$.pipe(filter((value): value is TokenCountEvent => typeof value !== 'string'))
).pipe(extractTokenCount()),
// get just the title, and drop the token count events
title$.pipe(filter((value): value is string => typeof value === 'string')),
]).pipe(
switchMap(([messagesWithUpdatedSystemMessage, addedMessages, tokenCountResult, title]) => {
const initialMessagesWithAddedMessages =
messagesWithUpdatedSystemMessage.concat(addedMessages);
// wait until all dependencies have completed
forkJoin([
messagesWithUpdatedSystemMessage$,
// get just the new messages
nextEvents$.pipe(withoutTokenCountEvents(), extractMessages()),
// count all the token count events emitted during completion
mergeOperator(
nextEvents$,
title$.pipe(filter((value): value is TokenCountEvent => typeof value !== 'string'))
).pipe(extractTokenCount()),
// get just the title, and drop the token count events
title$.pipe(filter((value): value is string => typeof value === 'string')),
]).pipe(
switchMap(
([messagesWithUpdatedSystemMessage, addedMessages, tokenCountResult, title]) => {
const initialMessagesWithAddedMessages =
messagesWithUpdatedSystemMessage.concat(addedMessages);
const lastMessage =
initialMessagesWithAddedMessages[initialMessagesWithAddedMessages.length - 1];
const lastMessage =
initialMessagesWithAddedMessages[initialMessagesWithAddedMessages.length - 1];
// if a function request is at the very end, close the stream to consumer
// without persisting or updating the conversation. we need to wait
// on the function response to have a valid conversation
const isFunctionRequest = lastMessage.message.function_call?.name;
// if a function request is at the very end, close the stream to consumer
// without persisting or updating the conversation. we need to wait
// on the function response to have a valid conversation
const isFunctionRequest = lastMessage.message.function_call?.name;
if (!persist || isFunctionRequest) {
return of();
}
if (!persist || isFunctionRequest) {
return of();
}
if (isConversationUpdate) {
return from(this.getConversationWithMetaFields(conversationId))
.pipe(
switchMap((conversation) => {
if (!conversation) {
return throwError(() => createConversationNotFoundError());
}
const persistedTokenCount = conversation._source?.conversation.token_count ?? {
prompt: 0,
completion: 0,
total: 0,
};
return from(
this.update(
conversationId,
merge(
{},
// base conversation without messages
omit(conversation._source, 'messages'),
// update messages
{ messages: initialMessagesWithAddedMessages },
// update token count
{
conversation: {
title: title || conversation._source?.conversation.title,
token_count: {
prompt: persistedTokenCount.prompt + tokenCountResult.prompt,
completion:
persistedTokenCount.completion + tokenCountResult.completion,
total: persistedTokenCount.total + tokenCountResult.total,
},
},
if (isConversationUpdate) {
return from(this.getConversationWithMetaFields(conversationId))
.pipe(
switchMap((conversation) => {
if (!conversation) {
return throwError(() => createConversationNotFoundError());
}
)
const persistedTokenCount = conversation._source?.conversation
.token_count ?? {
prompt: 0,
completion: 0,
total: 0,
};
return from(
this.update(
conversationId,
merge(
{},
// base conversation without messages
omit(conversation._source, 'messages'),
// update messages
{ messages: initialMessagesWithAddedMessages },
// update token count
{
conversation: {
title: title || conversation._source?.conversation.title,
token_count: {
prompt: persistedTokenCount.prompt + tokenCountResult.prompt,
completion:
persistedTokenCount.completion + tokenCountResult.completion,
total: persistedTokenCount.total + tokenCountResult.total,
},
},
}
)
)
);
})
)
.pipe(
map((conversation): ConversationUpdateEvent => {
return {
conversation: conversation.conversation,
type: StreamingChatResponseEventType.ConversationUpdate,
};
})
);
}
return from(
this.create({
'@timestamp': new Date().toISOString(),
conversation: {
title,
id: conversationId,
token_count: tokenCountResult,
},
public: !!isPublic,
labels: {},
numeric_labels: {},
messages: initialMessagesWithAddedMessages,
})
).pipe(
map((conversation): ConversationCreateEvent => {
return {
conversation: conversation.conversation,
type: StreamingChatResponseEventType.ConversationCreate,
};
})
);
}
)
)
);
return output$.pipe(
instrumentAndCountTokens('complete'),
withoutTokenCountEvents(),
catchError((error) => {
this.dependencies.logger.error(error);
return throwError(() => error);
}),
tap((event) => {
if (this.dependencies.logger.isLevelEnabled('debug')) {
switch (event.type) {
case StreamingChatResponseEventType.MessageAdd:
this.dependencies.logger.debug(`Added message: ${JSON.stringify(event.message)}`);
break;
case StreamingChatResponseEventType.ConversationCreate:
this.dependencies.logger.debug(
`Created conversation: ${JSON.stringify(event.conversation)}`
);
})
)
.pipe(
map((conversation): ConversationUpdateEvent => {
return {
conversation: conversation.conversation,
type: StreamingChatResponseEventType.ConversationUpdate,
};
})
);
}
break;
return from(
this.create({
'@timestamp': new Date().toISOString(),
conversation: {
title,
id: conversationId,
token_count: tokenCountResult,
},
public: !!isPublic,
labels: {},
numeric_labels: {},
messages: initialMessagesWithAddedMessages,
})
).pipe(
map((conversation): ConversationCreateEvent => {
return {
conversation: conversation.conversation,
type: StreamingChatResponseEventType.ConversationCreate,
};
})
);
})
)
);
return output$.pipe(
instrumentAndCountTokens('complete'),
withoutTokenCountEvents(),
catchError((error) => {
this.dependencies.logger.error(error);
return throwError(() => error);
}),
tap((event) => {
if (this.dependencies.logger.isLevelEnabled('debug')) {
switch (event.type) {
case StreamingChatResponseEventType.MessageAdd:
this.dependencies.logger.debug(`Added message: ${JSON.stringify(event.message)}`);
break;
case StreamingChatResponseEventType.ConversationCreate:
this.dependencies.logger.debug(
`Created conversation: ${JSON.stringify(event.conversation)}`
);
break;
case StreamingChatResponseEventType.ConversationUpdate:
this.dependencies.logger.debug(
`Updated conversation: ${JSON.stringify(event.conversation)}`
);
break;
}
}
}),
shareReplay()
case StreamingChatResponseEventType.ConversationUpdate:
this.dependencies.logger.debug(
`Updated conversation: ${JSON.stringify(event.conversation)}`
);
break;
}
}
}),
shareReplay()
);
}
);
};
@ -445,6 +466,7 @@ export class ObservabilityAIAssistantClient {
functionCall,
signal,
simulateFunctionCalling,
tracer,
}: {
messages: Message[];
connectorId: string;
@ -452,6 +474,7 @@ export class ObservabilityAIAssistantClient {
functionCall?: string;
signal: AbortSignal;
simulateFunctionCalling?: boolean;
tracer: LangTracer;
}
): Observable<ChatCompletionChunkEvent | TokenCountEvent> => {
return defer(() =>
@ -527,7 +550,26 @@ export class ObservabilityAIAssistantClient {
signal.addEventListener('abort', () => response.destroy());
return adapter.streamIntoObservable(response);
return tracer.startActiveSpan(
'/chat/completions',
{
kind: SpanKind.CLIENT,
},
({ span }) => {
return adapter.streamIntoObservable(response).pipe(
withLangtraceChatCompleteSpan({
span,
messages,
functions,
model: connector.name,
serviceProvider:
connector.actionTypeId === ObservabilityAIAssistantConnectorType.OpenAI
? LangtraceServiceProvider.OpenAI
: LangtraceServiceProvider.Anthropic,
})
);
}
);
})
);
}),

View file

@ -0,0 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export function getLangtraceSpanAttributes() {
return {
'langtrace.sdk.name': '@langtrase/typescript-sdk',
'langtrace.service.type': 'llm',
'langtrace.service.version': 'unknown',
'langtrace.version': '2.1.0',
};
}

View file

@ -0,0 +1,12 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { trace } from '@opentelemetry/api';
export function getLangtraceTracer() {
return trace.getTracer('langtrace');
}

View file

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { init } from '@langtrase/typescript-sdk';
export function initLangtrace() {
const apiKey = process.env.LANGTRACE_API_KEY;
const apiHost = process.env.LANGTRACE_API_HOST;
if (apiKey && apiHost) {
init({
api_host: apiHost,
api_key: apiKey,
write_to_langtrace_cloud: true,
disable_instrumentations: {
only: [],
},
});
}
}

View file

@ -0,0 +1,111 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
InMemorySpanExporter,
BasicTracerProvider,
SimpleSpanProcessor,
ReadableSpan,
} from '@opentelemetry/sdk-trace-base';
import { context } from '@opentelemetry/api';
import { LangTracer } from './lang_tracer';
import { lastValueFrom, of, throwError } from 'rxjs';
describe('langTracer', () => {
const provider = new BasicTracerProvider();
const memoryExporter = new InMemorySpanExporter();
provider.addSpanProcessor(new SimpleSpanProcessor(memoryExporter));
provider.register();
beforeEach(() => {
memoryExporter.reset();
});
describe('when creating a span against an observable', () => {
let tracer: LangTracer;
beforeEach(() => {
tracer = new LangTracer(context.active());
});
it('calls the callback with the created span', async () => {
const spanCallback = jest.fn().mockImplementation(() => of('my_value'));
await lastValueFrom(tracer.startActiveSpan('my_span', spanCallback));
const { span } = spanCallback.mock.calls[0][0] as {
span: ReadableSpan;
};
expect(span.name).toEqual('my_span');
expect(span.attributes).toEqual({
'langtrace.sdk.name': '@langtrase/typescript-sdk',
'langtrace.service.type': 'llm',
'langtrace.service.version': 'unknown',
'langtrace.version': '2.1.0',
});
// OK
expect(span.status.code).toBe(1);
});
it('returns the observable', async () => {
const spanCallback = jest.fn().mockImplementation(() => of('my_value'));
const value = await lastValueFrom(tracer.startActiveSpan('my_span', spanCallback));
expect(value).toEqual('my_value');
});
it('ends the span with an error status code when the observable', async () => {
const spanCallback = jest
.fn()
.mockImplementation(() => throwError(() => new Error('Unexpected error')));
const errorHandler = jest.fn();
await lastValueFrom(tracer.startActiveSpan('my_span', spanCallback)).catch(errorHandler);
const { span } = spanCallback.mock.calls[0][0] as {
span: ReadableSpan;
};
expect(span.status).toEqual({
// Error
code: 2,
message: 'Unexpected error',
});
});
});
describe('when creating a child span', () => {
it('sets the first span as the parent of the second span', async () => {
const tracer = new LangTracer(context.active());
const value = await lastValueFrom(
tracer.startActiveSpan('parent', ({ span, tracer: nextTracer }) => {
return nextTracer.startActiveSpan('child', () => of('my_value'));
})
);
expect(value).toEqual('my_value');
const mappedSpans = memoryExporter.getFinishedSpans().map((span) => ({
name: span.name,
id: span.spanContext().spanId,
parentId: span.parentSpanId,
}));
const parentSpan = mappedSpans.find((span) => span.name === 'parent');
const childSpan = mappedSpans.find((span) => span.name === 'child');
expect(parentSpan).not.toBeUndefined();
expect(childSpan).not.toBeUndefined();
expect(childSpan?.parentId).toEqual(parentSpan?.id);
});
});
});

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { LLMSpanAttributes } from '@langtrase/trace-attributes';
import { Context, Span, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api';
import { finalize, Observable, tap } from 'rxjs';
import { getLangtraceSpanAttributes } from './get_langtrace_span_attributes';
import { getLangtraceTracer } from './get_langtrace_tracer';
type SpanCallback<T> = ({}: { span: Span; tracer: LangTracer }) => Observable<T>;
interface Options {
attributes?: Partial<LLMSpanAttributes>;
kind?: SpanKind;
}
export class LangTracer {
private tracer = getLangtraceTracer();
constructor(private context: Context) {}
startActiveSpan<T>(name: string, callback: SpanCallback<T>): Observable<T>;
startActiveSpan<T>(name: string, options: Options, callback: SpanCallback<T>): Observable<T>;
startActiveSpan<T>(
name: string,
...rest: [Options, SpanCallback<T>] | [SpanCallback<T>]
): Observable<T> {
let [options, callback] = rest;
if (typeof options === 'function') {
callback = options;
options = {};
}
const span = this.tracer.startSpan(
name,
{
...options,
attributes: {
...getLangtraceSpanAttributes(),
...(options.attributes || {}),
},
},
this.context
);
const nextContext = trace.setSpan(this.context, span);
const nextTracer = new LangTracer(nextContext);
return callback!({ span, tracer: nextTracer }).pipe(
tap({
error: (error) => {
span.recordException(error);
span.setStatus({ code: SpanStatusCode.ERROR, message: error.message });
span.end();
},
complete: () => {
span.setStatus({ code: SpanStatusCode.OK });
span.end();
},
}),
finalize(() => {
span.end();
})
);
}
}

View file

@ -37,6 +37,7 @@ import type { ChatFunctionWithoutConnector } from '../../types';
import { createServerSideFunctionResponseError } from '../../util/create_server_side_function_response_error';
import { getSystemMessageFromInstructions } from '../../util/get_system_message_from_instructions';
import { replaceSystemMessage } from '../../util/replace_system_message';
import { LangTracer } from '../instrumentation/lang_tracer';
import { catchFunctionNotFoundError } from './catch_function_not_found_error';
import { extractMessages } from './extract_messages';
import { hideTokenCountEvents } from './hide_token_count_events';
@ -51,6 +52,7 @@ function executeFunctionAndCatchError({
chat,
signal,
logger,
tracer,
}: {
name: string;
args: string | undefined;
@ -59,62 +61,69 @@ function executeFunctionAndCatchError({
chat: ChatFunctionWithoutConnector;
signal: AbortSignal;
logger: Logger;
tracer: LangTracer;
}): Observable<MessageOrChatEvent> {
// hide token count events from functions to prevent them from
// having to deal with it as well
return hideTokenCountEvents((hide) => {
const executeFunctionResponse$ = from(
functionClient.executeFunction({
name,
chat: (operationName, params) => {
return chat(operationName, params).pipe(hide());
},
args,
signal,
messages,
})
);
return executeFunctionResponse$.pipe(
catchError((error) => {
logger.error(`Encountered error running function ${name}: ${JSON.stringify(error)}`);
// We want to catch the error only when a promise occurs
// if it occurs in the Observable, we cannot easily recover
// from it because the function may have already emitted
// values which could lead to an invalid conversation state,
// so in that case we let the stream fail.
return of(createServerSideFunctionResponseError({ name, error }));
}),
switchMap((response) => {
if (isObservable(response)) {
return response;
}
return tracer.startActiveSpan(`execute_function ${name}`, ({ tracer: nextTracer }) =>
hideTokenCountEvents((hide) => {
const executeFunctionResponse$ = from(
functionClient.executeFunction({
name,
chat: (operationName, params) => {
return chat(operationName, {
...params,
tracer: nextTracer,
}).pipe(hide());
},
args,
signal,
messages,
})
);
// is messageAdd event
if ('type' in response) {
return of(response);
}
return executeFunctionResponse$.pipe(
catchError((error) => {
logger.error(`Encountered error running function ${name}: ${JSON.stringify(error)}`);
// We want to catch the error only when a promise occurs
// if it occurs in the Observable, we cannot easily recover
// from it because the function may have already emitted
// values which could lead to an invalid conversation state,
// so in that case we let the stream fail.
return of(createServerSideFunctionResponseError({ name, error }));
}),
switchMap((response) => {
if (isObservable(response)) {
return response;
}
const encoded = encode(JSON.stringify(response.content || {}));
// is messageAdd event
if ('type' in response) {
return of(response);
}
const exceededTokenLimit = encoded.length >= MAX_FUNCTION_RESPONSE_TOKEN_COUNT;
const encoded = encode(JSON.stringify(response.content || {}));
return of(
createFunctionResponseMessage({
name,
content: exceededTokenLimit
? {
message:
'Function response exceeded the maximum length allowed and was truncated',
truncated: decode(take(encoded, MAX_FUNCTION_RESPONSE_TOKEN_COUNT)),
}
: response.content,
data: response.data,
})
);
})
);
});
const exceededTokenLimit = encoded.length >= MAX_FUNCTION_RESPONSE_TOKEN_COUNT;
return of(
createFunctionResponseMessage({
name,
content: exceededTokenLimit
? {
message:
'Function response exceeded the maximum length allowed and was truncated',
truncated: decode(take(encoded, MAX_FUNCTION_RESPONSE_TOKEN_COUNT)),
}
: response.content,
data: response.data,
})
);
})
);
})
);
}
function getFunctionDefinitions({
@ -158,6 +167,7 @@ export function continueConversation({
knowledgeBaseInstructions,
logger,
disableFunctions,
tracer,
}: {
messages: Message[];
functionClient: ChatFunctionClient;
@ -168,6 +178,7 @@ export function continueConversation({
knowledgeBaseInstructions: UserInstruction[];
logger: Logger;
disableFunctions: boolean;
tracer: LangTracer;
}): Observable<MessageOrChatEvent> {
let nextFunctionCallsLeft = functionCallsLeft;
@ -206,6 +217,7 @@ export function continueConversation({
return chat(operationName, {
messages: messagesWithUpdatedSystemMessage,
functions: definitions,
tracer,
}).pipe(
emitWithConcatenatedMessage(),
functionLimitExceeded ? catchFunctionNotFoundError() : identity
@ -282,6 +294,7 @@ export function continueConversation({
messages: messagesWithUpdatedSystemMessage,
signal,
logger,
tracer,
});
}
@ -308,6 +321,7 @@ export function continueConversation({
requestInstructions,
logger,
disableFunctions,
tracer,
});
})
)

View file

@ -12,6 +12,7 @@ import {
StreamingChatResponseEventType,
} from '../../../../common';
import { ChatEvent } from '../../../../common/conversation_complete';
import { LangTracer } from '../instrumentation/lang_tracer';
import { getGeneratedTitle } from './get_generated_title';
describe('getGeneratedTitle', () => {
@ -52,6 +53,9 @@ describe('getGeneratedTitle', () => {
error: jest.fn(),
},
messages,
tracer: {
startActiveSpan: jest.fn(),
} as unknown as LangTracer,
...options,
});
@ -204,6 +208,9 @@ describe('getGeneratedTitle', () => {
chat: chatSpy,
logger,
messages,
tracer: {
startActiveSpan: jest.fn(),
} as unknown as LangTracer,
});
const title = await lastValueFrom(title$);

View file

@ -12,6 +12,7 @@ import { Message, MessageRole } from '../../../../common';
import { concatenateChatCompletionChunks } from '../../../../common/utils/concatenate_chat_completion_chunks';
import { hideTokenCountEvents } from './hide_token_count_events';
import { ChatEvent, TokenCountEvent } from '../../../../common/conversation_complete';
import { LangTracer } from '../instrumentation/lang_tracer';
type ChatFunctionWithoutConnectorAndTokenCount = (
name: string,
@ -26,11 +27,13 @@ export function getGeneratedTitle({
messages,
chat,
logger,
tracer,
}: {
responseLanguage?: string;
messages: Message[];
chat: ChatFunctionWithoutConnectorAndTokenCount;
logger: Pick<Logger, 'debug' | 'error'>;
tracer: LangTracer;
}): Observable<string | TokenCountEvent> {
return hideTokenCountEvents((hide) =>
chat('generate_title', {
@ -71,6 +74,7 @@ export function getGeneratedTitle({
},
],
functionCall: 'title_conversation',
tracer,
}).pipe(
hide(),
concatenateChatCompletionChunks(),

View file

@ -0,0 +1,117 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { Event, LLMSpanAttributes } from '@langtrase/trace-attributes';
import { Span } from '@opentelemetry/api';
import { FunctionDefinition } from 'openai/resources';
import { ignoreElements, last, merge, OperatorFunction, share, tap } from 'rxjs';
import { Message, StreamingChatResponseEventType } from '../../../../common';
import { ChatEvent } from '../../../../common/conversation_complete';
import { concatenateChatCompletionChunks } from '../../../../common/utils/concatenate_chat_completion_chunks';
import { withoutTokenCountEvents } from '../../../../common/utils/without_token_count_events';
import { getLangtraceSpanAttributes } from '../instrumentation/get_langtrace_span_attributes';
export enum LangtraceServiceProvider {
OpenAI = 'OpenAI',
Azure = 'Azure',
Anthropic = 'Anthropic',
}
export function withLangtraceChatCompleteSpan<T extends ChatEvent>({
span,
model,
messages,
serviceProvider,
functions,
}: {
span: Span;
model: string;
messages: Message[];
serviceProvider: LangtraceServiceProvider;
functions?: Array<Pick<FunctionDefinition, 'name' | 'description' | 'parameters'>>;
}): OperatorFunction<T, T> {
const attributes: LLMSpanAttributes = {
...getLangtraceSpanAttributes(),
'langtrace.service.name': serviceProvider,
'llm.api': '/chat/completions',
'http.max.retries': 0,
// dummy URL
'url.full': 'http://localhost:3000/chat/completions',
'http.timeout': 120 * 1000,
'llm.prompts': JSON.stringify(
messages.map((message) => ({
role: message.message.role,
content: [
message.message.content,
message.message.function_call ? JSON.stringify(message.message.function_call) : '',
]
.filter(Boolean)
.join('\n\n'),
}))
),
'llm.model': model,
'llm.stream': true,
...(functions
? {
'llm.tools': JSON.stringify(
functions.map((fn) => ({
function: fn,
type: 'function',
}))
),
}
: {}),
};
span.setAttributes(attributes);
return (source$) => {
const shared$ = source$.pipe(share());
span.addEvent(Event.STREAM_START);
const passThrough$ = shared$.pipe(
tap((value) => {
if (value.type === StreamingChatResponseEventType.ChatCompletionChunk) {
span.addEvent(Event.STREAM_OUTPUT, {
response: value.message.content,
});
return;
}
span.setAttributes({
'llm.token.counts': JSON.stringify({
input_tokens: value.tokens.prompt,
output_tokens: value.tokens.completion,
total_tokens: value.tokens.total,
}),
});
})
);
return merge(
passThrough$,
shared$.pipe(
withoutTokenCountEvents(),
concatenateChatCompletionChunks(),
last(),
tap((message) => {
span.setAttribute(
'llm.responses',
JSON.stringify([
{
role: 'assistant',
content: message.message.content,
},
])
);
}),
ignoreElements()
)
);
};
}

View file

@ -369,8 +369,8 @@ export class KnowledgeBaseService {
return customSearchConnectorIndex.split(',');
}
const response = (await responsePromise) as { results: Array<{ index_name: string }> };
const connectorIndices = response.results.map((result) => result.index_name);
const response = (await responsePromise) as { results?: Array<{ index_name: string }> };
const connectorIndices = response.results?.map((result) => result.index_name);
// preserve backwards compatibility with 8.14 (may not be needed in the future)
if (isEmpty(connectorIndices)) {

View file

@ -44,7 +44,7 @@ export type FunctionCallChatFunction = (
name: string,
params: Omit<
Parameters<ObservabilityAIAssistantClient['chat']>[1],
'connectorId' | 'simulateFunctionCalling'
'connectorId' | 'simulateFunctionCalling' | 'tracer'
>
) => Observable<ChatCompletionChunkEvent>;

151
yarn.lock
View file

@ -6922,6 +6922,27 @@
zod "^3.22.4"
zod-to-json-schema "^3.22.3"
"@langtrase/trace-attributes@^3.0.8":
version "3.0.8"
resolved "https://registry.yarnpkg.com/@langtrase/trace-attributes/-/trace-attributes-3.0.8.tgz#ff6ae44cfc048a9da10a7949664b2060a71b6304"
integrity sha512-GXUH+a0EiO8YgrZR2fkqM0B/xrf3Db1OHDlJUOGVuwacC+LJp89MbJQBlmeu3BBJLMMHG0+q4ERYEt8enCHjHw==
dependencies:
ncp "^2.0.0"
"@langtrase/typescript-sdk@^2.2.1":
version "2.2.1"
resolved "https://registry.yarnpkg.com/@langtrase/typescript-sdk/-/typescript-sdk-2.2.1.tgz#8ec60290b9882e161bc42bd51cf0a72396438633"
integrity sha512-CRzhOcGMLv68ICvENDBQ/PWIHyBNlc6lNBaqF9EM/s+HUusSPnBmvQ+nGObqpPR+uo7m9hDRRUmUXOlAooj0PQ==
dependencies:
"@langtrase/trace-attributes" "^3.0.8"
"@opentelemetry/api" "^1.7.0"
"@opentelemetry/instrumentation" "^0.49.1"
"@opentelemetry/sdk-trace-base" "^1.22.0"
"@opentelemetry/sdk-trace-node" "^1.22.0"
axios "^1.6.7"
node-loader "^2.0.0"
tiktoken "^1.0.13"
"@leichtgewicht/ip-codec@^2.0.1":
version "2.0.4"
resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b"
@ -7549,6 +7570,13 @@
dependencies:
"@octokit/openapi-types" "^18.0.0"
"@opentelemetry/api-logs@0.49.1":
version "0.49.1"
resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.49.1.tgz#51a66ed5eb5eeeafffbd36c1713aa91cbfdd5259"
integrity sha512-kaNl/T7WzyMUQHQlVq7q0oV4Kev6+0xFwqzofryC66jgGMacd0QH5TwfpbUwSTby+SdAdprAe5UKMvBw4tKS5Q==
dependencies:
"@opentelemetry/api" "^1.0.0"
"@opentelemetry/api-metrics@0.31.0", "@opentelemetry/api-metrics@^0.31.0":
version "0.31.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/api-metrics/-/api-metrics-0.31.0.tgz#0ed4cf4d7c731f968721c2b303eaf5e9fd42f736"
@ -7556,12 +7584,17 @@
dependencies:
"@opentelemetry/api" "^1.0.0"
"@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1":
version "1.4.1"
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.4.1.tgz#ff22eb2e5d476fbc2450a196e40dd243cc20c28f"
integrity sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==
"@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1", "@opentelemetry/api@^1.7.0":
version "1.8.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.8.0.tgz#5aa7abb48f23f693068ed2999ae627d2f7d902ec"
integrity sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==
"@opentelemetry/core@1.15.0", "@opentelemetry/core@^1.11.0":
"@opentelemetry/context-async-hooks@1.24.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/context-async-hooks/-/context-async-hooks-1.24.0.tgz#f5f8cc15038d293a8e9b570543c1f36aa4ee17ec"
integrity sha512-s7xaQ9ifDpJvwbWRLkZD/J5hY35w+MECm4TQUkg6szRcny9lf6oVhWij4w3JJFQgvHQMXU7oXOpX8Z05HxV/8g==
"@opentelemetry/core@1.15.0":
version "1.15.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.15.0.tgz#2ba928df0443732825a72a766c2edae9a7f9863f"
integrity sha512-GGTS6BytfaN8OgbCUOnxg/a9WVsVUj0484zXHZuBzvIXx7V4Tmkb0IHnnhS7Q0cBLNLgjNuvrCpQaP8fIvO4bg==
@ -7569,6 +7602,13 @@
"@opentelemetry/semantic-conventions" "1.15.0"
tslib "^2.3.1"
"@opentelemetry/core@1.24.0", "@opentelemetry/core@^1.11.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.24.0.tgz#5568b6c1328a6b9c94a77f9b2c7f872b852bba40"
integrity sha512-FP2oN7mVPqcdxJDTTnKExj4mi91EH+DNuArKfHTjPuJWe2K1JfMIVXNfahw1h3onJxQnxS8K0stKkogX05s+Aw==
dependencies:
"@opentelemetry/semantic-conventions" "1.24.0"
"@opentelemetry/core@1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.5.0.tgz#717bceee15d4c69d4c7321c1fe0f5a562b60eb81"
@ -7616,6 +7656,18 @@
"@opentelemetry/core" "1.5.0"
"@opentelemetry/sdk-metrics-base" "0.31.0"
"@opentelemetry/instrumentation@^0.49.1":
version "0.49.1"
resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.49.1.tgz#1b95e5f9448a96e7af97e03846772829439a9a91"
integrity sha512-0DLtWtaIppuNNRRllSD4bjU8ZIiLp1cDXvJEbp752/Zf+y3gaLNaoGRGIlX4UHhcsrmtL+P2qxi3Hodi8VuKiQ==
dependencies:
"@opentelemetry/api-logs" "0.49.1"
"@types/shimmer" "^1.0.2"
import-in-the-middle "1.7.1"
require-in-the-middle "^7.1.1"
semver "^7.5.2"
shimmer "^1.2.1"
"@opentelemetry/otlp-exporter-base@0.34.0":
version "0.34.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.34.0.tgz#c6020b63590d4b8ac3833eda345a6f582fa014b1"
@ -7643,7 +7695,21 @@
"@opentelemetry/sdk-metrics" "1.8.0"
"@opentelemetry/sdk-trace-base" "1.8.0"
"@opentelemetry/resources@1.15.0", "@opentelemetry/resources@^1.4.0":
"@opentelemetry/propagator-b3@1.24.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-b3/-/propagator-b3-1.24.0.tgz#88a2ffede42ac6df7c409bacec0c9f9cc181bc13"
integrity sha512-7TMIDE4+NO5vnkor+zned42wqca+hmhW5gWKhmYjUHC5B5uojo1PvtmBrd7kigFu96XvL4ZUWVzibWRWIQ/++Q==
dependencies:
"@opentelemetry/core" "1.24.0"
"@opentelemetry/propagator-jaeger@1.24.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.24.0.tgz#e67fe6f8f2f1d74335909a7f7352d0761039ab79"
integrity sha512-r3MX3AmJiUeiWTXSDOdwBeaO+ahvWcFCpuKxmhhsH8Q8LqDnjhNd3krqBh4Qsq9wa0WhWtiQaDs/NOCWoMOlOw==
dependencies:
"@opentelemetry/core" "1.24.0"
"@opentelemetry/resources@1.15.0":
version "1.15.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.15.0.tgz#748a6ae9017636b8b30f5dee1fff3e166e51f63d"
integrity sha512-Sb8A6ZXHXDlgHv32UNRE3y8McWE3vkb5dsSttYArYa5ZpwjiF5ge0vnnKUUnG7bY0AgF9VBIOORZE8gsrnD2WA==
@ -7652,6 +7718,14 @@
"@opentelemetry/semantic-conventions" "1.15.0"
tslib "^2.3.1"
"@opentelemetry/resources@1.24.0", "@opentelemetry/resources@^1.4.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.24.0.tgz#f27911af0917986da5716775021eae0a872ba98e"
integrity sha512-mxC7E7ocUS1tLzepnA7O9/G8G6ZTdjCH2pXme1DDDuCuk6n2/53GADX+GWBuyX0dfIxeMInIbJAdjlfN9GNr6A==
dependencies:
"@opentelemetry/core" "1.24.0"
"@opentelemetry/semantic-conventions" "1.24.0"
"@opentelemetry/resources@1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.5.0.tgz#ce7fbdaec3494e41bc279ddbed3c478ee2570b03"
@ -7697,6 +7771,15 @@
lodash.merge "^4.6.2"
tslib "^2.3.1"
"@opentelemetry/sdk-trace-base@1.24.0", "@opentelemetry/sdk-trace-base@^1.22.0", "@opentelemetry/sdk-trace-base@^1.24.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.24.0.tgz#e2de869e33fd224f6d9f39bafa4172074d1086c8"
integrity sha512-H9sLETZ4jw9UJ3totV8oM5R0m4CW0ZIOLfp4NV3g0CM8HD5zGZcaW88xqzWDgiYRpctFxd+WmHtGX/Upoa2vRg==
dependencies:
"@opentelemetry/core" "1.24.0"
"@opentelemetry/resources" "1.24.0"
"@opentelemetry/semantic-conventions" "1.24.0"
"@opentelemetry/sdk-trace-base@1.8.0":
version "1.8.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.8.0.tgz#70713aab90978a16dea188c8335209f857be7384"
@ -7706,13 +7789,30 @@
"@opentelemetry/resources" "1.8.0"
"@opentelemetry/semantic-conventions" "1.8.0"
"@opentelemetry/semantic-conventions@1.15.0", "@opentelemetry/semantic-conventions@^1.4.0":
"@opentelemetry/sdk-trace-node@^1.22.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.24.0.tgz#34c56f092f98a16e0e045152c9a4baf50ed8dcee"
integrity sha512-QgByHmM9uloTpcYEEyW9YJEIMKHFSIM677RH9pJPWWwtM2NQFbEp/8HIJw80Ymtaz6cAxg1Kay1ByqIVzq3t5g==
dependencies:
"@opentelemetry/context-async-hooks" "1.24.0"
"@opentelemetry/core" "1.24.0"
"@opentelemetry/propagator-b3" "1.24.0"
"@opentelemetry/propagator-jaeger" "1.24.0"
"@opentelemetry/sdk-trace-base" "1.24.0"
semver "^7.5.2"
"@opentelemetry/semantic-conventions@1.15.0":
version "1.15.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.15.0.tgz#e6173daa5fd61f353b02c858001388bf26e9d059"
integrity sha512-f3wwFrFyCpGrFBrFs7lCUJSCSCGyeKG52c+EKeobs3Dd29M75yO6GYkt6PkYPfDawxSlV5p+4yJPPk8tPObzTQ==
dependencies:
tslib "^2.3.1"
"@opentelemetry/semantic-conventions@1.24.0", "@opentelemetry/semantic-conventions@^1.4.0":
version "1.24.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.24.0.tgz#f074db930a7feb4d64103a9a576c5fbad046fcac"
integrity sha512-yL0jI6Ltuz8R+Opj7jClGrul6pOoYrdfVmzQS4SITXRPH7I5IRZbrwe/6/v8v4WYMa6MYZG480S1+uc/IGfqsA==
"@opentelemetry/semantic-conventions@1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.5.0.tgz#cea9792bfcf556c87ded17c6ac729348697bb632"
@ -10797,6 +10897,11 @@
resolved "https://registry.yarnpkg.com/@types/set-value/-/set-value-2.0.0.tgz#63d386b103926dcf49b50e16e0f6dd49983046be"
integrity sha512-k8dCJEC80F/mbsIOZ5Hj3YSzTVVVBwMdtP/M9Rtc2TM4F5etVd+2UG8QUiAUfbXm4fABedL2tBZnrBheY7UwpA==
"@types/shimmer@^1.0.2":
version "1.0.5"
resolved "https://registry.yarnpkg.com/@types/shimmer/-/shimmer-1.0.5.tgz#491d8984d4510e550bfeb02d518791d7f59d2b88"
integrity sha512-9Hp0ObzwwO57DpLFF0InUjUm/II8GmKAvzbefxQTihCb7KI6yc9yzf0nLc4mVdby5N4DRCgQM2wCup9KTieeww==
"@types/sinon@^7.0.13":
version "7.0.13"
resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-7.0.13.tgz#ca039c23a9e27ebea53e0901ef928ea2a1a6d313"
@ -19408,6 +19513,16 @@ import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0:
parent-module "^1.0.0"
resolve-from "^4.0.0"
import-in-the-middle@1.7.1:
version "1.7.1"
resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.1.tgz#3e111ff79c639d0bde459bd7ba29dd9fdf357364"
integrity sha512-1LrZPDtW+atAxH42S6288qyDFNQ2YCty+2mxEPRtfazH6Z5QwkaBSTS2ods7hnVJioF6rkRfNoA6A/MstpFXLg==
dependencies:
acorn "^8.8.2"
acorn-import-assertions "^1.9.0"
cjs-module-lexer "^1.2.2"
module-details-from-path "^1.0.3"
import-in-the-middle@1.7.4:
version "1.7.4"
resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.4.tgz#508da6e91cfa84f210dcdb6c0a91ab0c9e8b3ebc"
@ -23310,6 +23425,11 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
ncp@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ncp/-/ncp-2.0.0.tgz#195a21d6c46e361d2fb1281ba38b91e9df7bdbb3"
integrity sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==
nearley@^2.7.10:
version "2.16.0"
resolved "https://registry.yarnpkg.com/nearley/-/nearley-2.16.0.tgz#77c297d041941d268290ec84b739d0ee297e83a7"
@ -23551,6 +23671,13 @@ node-libs-browser@^2.2.1:
util "^0.11.0"
vm-browserify "^1.0.1"
node-loader@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/node-loader/-/node-loader-2.0.0.tgz#9109a6d828703fd3e0aa03c1baec12a798071562"
integrity sha512-I5VN34NO4/5UYJaUBtkrODPWxbobrE4hgDqPrjB25yPkonFhCmZ146vTH+Zg417E9Iwoh1l/MbRs1apc5J295Q==
dependencies:
loader-utils "^2.0.0"
node-preload@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301"
@ -28035,6 +28162,11 @@ shelljs@^0.8.5:
interpret "^1.0.0"
rechoir "^0.6.2"
shimmer@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.1.tgz#610859f7de327b587efebf501fb43117f9aff337"
integrity sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==
should-equal@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/should-equal/-/should-equal-2.0.0.tgz#6072cf83047360867e68e98b09d71143d04ee0c3"
@ -29735,6 +29867,11 @@ thunky@^1.0.2:
resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.2.tgz#a862e018e3fb1ea2ec3fce5d55605cf57f247371"
integrity sha1-qGLgGOP7HqLsP85dVWBc9X8kc3E=
tiktoken@^1.0.13:
version "1.0.14"
resolved "https://registry.yarnpkg.com/tiktoken/-/tiktoken-1.0.14.tgz#1263821f4ba0a4ec71604db8608a3accd43001c9"
integrity sha512-g5zd5r/DoH8Kw0fiYbYpVhb6WO8BHO1unXqmBBWKwoT17HwSounnDtMDFUKm2Pko8U47sjQarOe+9aUrnqmmTg==
time-stamp@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3"