mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 10:40:07 -04:00
[Inference] Instrument inference with OpenTelemetry (#218694)
Instrument the inference chatComplete API with OpenTelemetry, and export helper functions to create spans w/ the right semconv attributes. Additionally, optionally export to Langfuse or Phoenix. ## Centralizes OpenTelemetry setup As this is the first instance of OpenTelemetry based _tracing_ (we already have metrics in the MonitoringCollection plugin), some bootstrapping code is necessary to centrally configure OpenTelemetry. To this end, I've added the following config settings: - `telemetry.tracing.enabled`: whether OpenTelemetry tracing is enabled (defaults to undefined, if undefined, falls back to `telemetry.enabled`) - `telemetry.tracing.sample_rate` (defaults to 1) The naming of these configuration settings is mostly in-line with [the Elasticsearch tracing settings](https://github.com/elastic/elasticsearch/blob/main/TRACING.md). The following packages (containing bootstrapping logic, utility functions, types and config schemas) were added: - `@kbn/telemetry` - `@kbn/telemetry-config` - `@kbn/tracing` The OpenTelemetry bootstrapping depends on @kbn/apm-config-loader, as it has the same constraints - it needs to run before any other code, and it needs to read the raw config. Additionally, a root `telemetry` logger was added that captures OpenTelemetry logs. Note that there is no default exporter for spans, which means that although spans are being recorded, they do not get exported. ## Instrument chatComplete calls Calls to `chatComplete` now create OpenTelemetry spans, roughly following semantic conventions (which for GenAI are very much in flux). Some helper functions were added to create other inference spans. These helper functions use baggage to determine whether the created inference span is the "root" of an inference trace. This allows us to export these spans as if it were root spans - something that is needed to be able to easily visualize these in other tools. Leveraging these inference spans, two exporters are added. One for [Phoenix](https://github.com/Arize-ai/phoenix) and one for [Langfuse](https://github.com/langfuse/langfuse/tree/main): two open-source LLM Observability suites. This allows engineers that use the Inference plugin to be able to inspect and improve their LLM-based workflows with much less effort. For both Phoenix and Langfuse, two service scripts were added. Run `node scripts/phoenix` or `node scripts/langfuse` to get started. Both scripts work with zero-config - they will log generated Kibana config to stdout. --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
89c91350cc
commit
2387e3b88c
96 changed files with 3095 additions and 605 deletions
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
|
@ -534,12 +534,15 @@ src/platform/packages/shared/kbn-sse-utils-server @elastic/obs-knowledge-team
|
|||
src/platform/packages/shared/kbn-std @elastic/kibana-core
|
||||
src/platform/packages/shared/kbn-storage-adapter @elastic/observability-ui
|
||||
src/platform/packages/shared/kbn-storybook @elastic/kibana-operations
|
||||
src/platform/packages/shared/kbn-telemetry @elastic/kibana-core @elastic/obs-ai-assistant
|
||||
src/platform/packages/shared/kbn-telemetry-config @elastic/kibana-core
|
||||
src/platform/packages/shared/kbn-test @elastic/kibana-operations @elastic/appex-qa
|
||||
src/platform/packages/shared/kbn-test-jest-helpers @elastic/kibana-operations @elastic/appex-qa
|
||||
src/platform/packages/shared/kbn-test-subj-selector @elastic/kibana-operations @elastic/appex-qa
|
||||
src/platform/packages/shared/kbn-timerange @elastic/obs-ux-logs-team
|
||||
src/platform/packages/shared/kbn-tooling-log @elastic/kibana-operations
|
||||
src/platform/packages/shared/kbn-traced-es-client @elastic/observability-ui
|
||||
src/platform/packages/shared/kbn-tracing @elastic/kibana-core @elastic/obs-ai-assistant
|
||||
src/platform/packages/shared/kbn-triggers-actions-ui-types @elastic/response-ops
|
||||
src/platform/packages/shared/kbn-try-in-console @elastic/search-kibana
|
||||
src/platform/packages/shared/kbn-typed-react-router-config @elastic/obs-knowledge-team @elastic/obs-ux-infra_services-team
|
||||
|
|
20
package.json
20
package.json
|
@ -108,6 +108,7 @@
|
|||
"dependencies": {
|
||||
"@apidevtools/json-schema-ref-parser": "^11.9.1",
|
||||
"@appland/sql-parser": "^1.5.1",
|
||||
"@arizeai/openinference-semantic-conventions": "^1.1.0",
|
||||
"@aws-crypto/sha256-js": "^5.2.0",
|
||||
"@aws-crypto/util": "^5.2.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.744.0",
|
||||
|
@ -960,8 +961,10 @@
|
|||
"@kbn/task-manager-fixture-plugin": "link:x-pack/test/alerting_api_integration/common/plugins/task_manager_fixture",
|
||||
"@kbn/task-manager-performance-plugin": "link:x-pack/test/plugin_api_perf/plugins/task_manager_performance",
|
||||
"@kbn/task-manager-plugin": "link:x-pack/platform/plugins/shared/task_manager",
|
||||
"@kbn/telemetry": "link:src/platform/packages/shared/kbn-telemetry",
|
||||
"@kbn/telemetry-collection-manager-plugin": "link:src/platform/plugins/shared/telemetry_collection_manager",
|
||||
"@kbn/telemetry-collection-xpack-plugin": "link:x-pack/platform/plugins/private/telemetry_collection_xpack",
|
||||
"@kbn/telemetry-config": "link:src/platform/packages/shared/kbn-telemetry-config",
|
||||
"@kbn/telemetry-management-section-plugin": "link:src/platform/plugins/shared/telemetry_management_section",
|
||||
"@kbn/telemetry-plugin": "link:src/platform/plugins/shared/telemetry",
|
||||
"@kbn/telemetry-test-plugin": "link:src/platform/test/plugin_functional/plugins/telemetry",
|
||||
|
@ -975,6 +978,7 @@
|
|||
"@kbn/timerange": "link:src/platform/packages/shared/kbn-timerange",
|
||||
"@kbn/tinymath": "link:src/platform/packages/private/kbn-tinymath",
|
||||
"@kbn/traced-es-client": "link:src/platform/packages/shared/kbn-traced-es-client",
|
||||
"@kbn/tracing": "link:src/platform/packages/shared/kbn-tracing",
|
||||
"@kbn/transform-plugin": "link:x-pack/platform/plugins/private/transform",
|
||||
"@kbn/translations-plugin": "link:x-pack/platform/plugins/private/translations",
|
||||
"@kbn/transpose-utils": "link:src/platform/packages/private/kbn-transpose-utils",
|
||||
|
@ -1080,14 +1084,22 @@
|
|||
"@openfeature/launchdarkly-client-provider": "^0.3.2",
|
||||
"@openfeature/server-sdk": "^1.18.0",
|
||||
"@openfeature/web-sdk": "^1.5.0",
|
||||
"@opentelemetry/api": "^1.1.0",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@opentelemetry/api-metrics": "^0.31.0",
|
||||
"@opentelemetry/context-async-hooks": "^2.0.0",
|
||||
"@opentelemetry/core": "^2.0.0",
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc": "^0.34.0",
|
||||
"@opentelemetry/exporter-prometheus": "^0.31.0",
|
||||
"@opentelemetry/resources": "^1.4.0",
|
||||
"@opentelemetry/exporter-trace-otlp-grpc": "^0.200.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.200.0",
|
||||
"@opentelemetry/exporter-trace-otlp-proto": "^0.200.0",
|
||||
"@opentelemetry/otlp-exporter-base": "^0.200.0",
|
||||
"@opentelemetry/resources": "^2.0.0",
|
||||
"@opentelemetry/sdk-metrics-base": "^0.31.0",
|
||||
"@opentelemetry/sdk-trace-base": "^1.24.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.4.0",
|
||||
"@opentelemetry/sdk-node": "^0.200.0",
|
||||
"@opentelemetry/sdk-trace-base": "^2.0.0",
|
||||
"@opentelemetry/sdk-trace-node": "^2.0.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.32.0",
|
||||
"@paralleldrive/cuid2": "^2.2.2",
|
||||
"@reduxjs/toolkit": "1.9.7",
|
||||
"@slack/webhook": "^7.0.1",
|
||||
|
|
|
@ -4294,14 +4294,24 @@
|
|||
"@grpc/grpc-js",
|
||||
"@opentelemetry/api",
|
||||
"@opentelemetry/api-metrics",
|
||||
"@opentelemetry/core",
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc",
|
||||
"@opentelemetry/exporter-prometheus",
|
||||
"@opentelemetry/resources",
|
||||
"@opentelemetry/sdk-metrics-base",
|
||||
"@opentelemetry/semantic-conventions"
|
||||
"@opentelemetry/semantic-conventions",
|
||||
"@arizeai/openinference-semantic-conventions",
|
||||
"@opentelemetry/context-async-hooks",
|
||||
"@opentelemetry/exporter-trace-otlp-grpc",
|
||||
"@opentelemetry/exporter-trace-otlp-http",
|
||||
"@opentelemetry/exporter-trace-otlp-proto",
|
||||
"@opentelemetry/otlp-exporter-base",
|
||||
"@opentelemetry/sdk-node",
|
||||
"@opentelemetry/sdk-trace-node"
|
||||
],
|
||||
"reviewers": [
|
||||
"team:stack-monitoring"
|
||||
"team:stack-monitoring",
|
||||
"team:kibana-core"
|
||||
],
|
||||
"matchBaseBranches": [
|
||||
"main"
|
||||
|
|
11
scripts/langfuse.js
Normal file
11
scripts/langfuse.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
require('../src/setup_node_env');
|
||||
require('@kbn/inference-cli/scripts/langfuse');
|
11
scripts/phoenix.js
Normal file
11
scripts/phoenix.js
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
require('../src/setup_node_env');
|
||||
require('@kbn/inference-cli/scripts/phoenix');
|
|
@ -10,10 +10,17 @@
|
|||
const { join } = require('path');
|
||||
const { name, build } = require('../../package.json');
|
||||
const { initApm } = require('@kbn/apm-config-loader');
|
||||
const { once } = require('lodash');
|
||||
const { initTelemetry } = require('@kbn/telemetry');
|
||||
|
||||
const rootDir = join(__dirname, '../..');
|
||||
const isKibanaDistributable = Boolean(build && build.distributable === true);
|
||||
|
||||
module.exports = function (serviceName = name) {
|
||||
initApm(process.argv, rootDir, isKibanaDistributable, serviceName);
|
||||
const shutdown = once(initTelemetry(process.argv, rootDir, isKibanaDistributable, serviceName));
|
||||
|
||||
process.on('SIGTERM', shutdown);
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('beforeExit', shutdown);
|
||||
};
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
"@kbn/dev-utils",
|
||||
"@kbn/apm-config-loader",
|
||||
"@kbn/projects-solutions-groups",
|
||||
"@kbn/telemetry",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -17,6 +17,7 @@ import { isEqual } from 'lodash';
|
|||
import type { ElasticConfigType } from './elastic_config';
|
||||
import { Server } from '../server';
|
||||
import { MIGRATION_EXCEPTION_CODE } from '../constants';
|
||||
import { setDiagLogger } from './set_diag_logger';
|
||||
|
||||
/**
|
||||
* Top-level entry point to kick off the app and start the Kibana server.
|
||||
|
@ -45,6 +46,7 @@ export class Root {
|
|||
try {
|
||||
this.server.setupCoreConfig();
|
||||
this.setupApmLabelSync();
|
||||
|
||||
await this.setupLogging();
|
||||
|
||||
this.log.debug('prebooting root');
|
||||
|
@ -135,6 +137,10 @@ export class Root {
|
|||
const update$ = configService.getConfig$().pipe(
|
||||
// always read the logging config when the underlying config object is re-read
|
||||
switchMap(() => configService.atPath<LoggingConfigType>('logging')),
|
||||
tap((config) => {
|
||||
const telemetry = config.loggers?.find((loggerConfig) => loggerConfig.name === 'telemetry');
|
||||
setDiagLogger(this.loggingSystem.get('telemetry'), telemetry?.level);
|
||||
}),
|
||||
concatMap((config) => this.loggingSystem.upgrade(config)),
|
||||
// This specifically console.logs because we were not able to configure the logger.
|
||||
// eslint-disable-next-line no-console
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { DiagLogLevel, DiagLogger, diag } from '@opentelemetry/api';
|
||||
import { LogLevelId, Logger } from '@kbn/logging';
|
||||
import { format } from 'util';
|
||||
|
||||
export function setDiagLogger(logger: Logger, logLevel?: LogLevelId) {
|
||||
const diagLogger: DiagLogger = {
|
||||
debug: (message, ...args) => {
|
||||
return logger.debug(() => format(message, ...args));
|
||||
},
|
||||
error: (message, ...args) => {
|
||||
return logger.error(() => format(message, ...args));
|
||||
},
|
||||
info: (message, ...args) => {
|
||||
return logger.info(() => format(message, ...args));
|
||||
},
|
||||
verbose: (message, ...args) => {
|
||||
return logger.trace(() => format(message, ...args));
|
||||
},
|
||||
warn: (message, ...args) => {
|
||||
return logger.warn(() => format(message, ...args));
|
||||
},
|
||||
};
|
||||
|
||||
let level: DiagLogLevel;
|
||||
switch (logLevel) {
|
||||
case 'off':
|
||||
level = DiagLogLevel.NONE;
|
||||
break;
|
||||
case 'fatal':
|
||||
case 'error':
|
||||
level = DiagLogLevel.ERROR;
|
||||
break;
|
||||
case 'warn':
|
||||
level = DiagLogLevel.WARN;
|
||||
break;
|
||||
|
||||
default:
|
||||
case 'info':
|
||||
level = DiagLogLevel.INFO;
|
||||
break;
|
||||
case 'debug':
|
||||
level = DiagLogLevel.DEBUG;
|
||||
break;
|
||||
case 'trace':
|
||||
level = DiagLogLevel.VERBOSE;
|
||||
break;
|
||||
case 'all':
|
||||
level = DiagLogLevel.ALL;
|
||||
break;
|
||||
}
|
||||
|
||||
diag.setLogger(diagLogger, {
|
||||
suppressOverrideMessage: true,
|
||||
logLevel: level,
|
||||
});
|
||||
}
|
|
@ -7,7 +7,7 @@
|
|||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { getConfiguration } from './src/config_loader';
|
||||
export { getConfiguration, loadConfiguration } from './src/config_loader';
|
||||
export { initApm } from './src/init_apm';
|
||||
export { shouldInstrumentClient } from './src/rum_agent_configuration';
|
||||
export type { ApmConfiguration } from './src/config';
|
||||
|
|
|
@ -15,6 +15,7 @@ import { readFileSync } from 'fs';
|
|||
import type { AgentConfigOptions } from 'elastic-apm-node';
|
||||
import type { AgentConfigOptions as RUMAgentConfigOptions } from '@elastic/apm-rum';
|
||||
import { getFlattenedObject } from '@kbn/std';
|
||||
import type { TelemetryConfig } from '@kbn/telemetry-config';
|
||||
import type { ApmConfigSchema } from './apm_config';
|
||||
|
||||
// https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html
|
||||
|
@ -52,6 +53,7 @@ const CENTRALIZED_SERVICE_DIST_CONFIG: AgentConfigOptions = {
|
|||
};
|
||||
|
||||
interface KibanaRawConfig {
|
||||
telemetry?: TelemetryConfig;
|
||||
elastic?: {
|
||||
apm?: ApmConfigSchema;
|
||||
};
|
||||
|
@ -96,6 +98,10 @@ export class ApmConfiguration {
|
|||
return baseConfig;
|
||||
}
|
||||
|
||||
public getTelemetryConfig(): TelemetryConfig | undefined {
|
||||
return this.rawKibanaConfig.telemetry;
|
||||
}
|
||||
|
||||
public isUsersRedactionEnabled(): boolean {
|
||||
const { redactUsers = true } = this.getConfigFromKibanaConfig();
|
||||
return redactUsers;
|
||||
|
|
|
@ -17,15 +17,16 @@ export const initApm = (
|
|||
) => {
|
||||
const apmConfigLoader = loadConfiguration(argv, rootDir, isDistributable);
|
||||
const apmConfig = apmConfigLoader.getConfig(serviceName);
|
||||
|
||||
const shouldRedactUsers = apmConfigLoader.isUsersRedactionEnabled();
|
||||
|
||||
// we want to only load the module when effectively used
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const apm = require('elastic-apm-node');
|
||||
const apm = require('elastic-apm-node') as typeof import('elastic-apm-node');
|
||||
|
||||
// Filter out all user PII
|
||||
if (shouldRedactUsers) {
|
||||
apm.addFilter((payload: Record<string, any>) => {
|
||||
apm.addFilter((payload) => {
|
||||
try {
|
||||
if (payload.context?.user && typeof payload.context.user === 'object') {
|
||||
Object.keys(payload.context.user).forEach((key) => {
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
"@kbn/utils",
|
||||
"@kbn/config-schema",
|
||||
"@kbn/std",
|
||||
"@kbn/telemetry-config",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -32,3 +32,6 @@ export {
|
|||
export { ensureDeepObject, ensureValidObjectPath } from './src/ensure_deep_object';
|
||||
export { Semaphore } from './src/semaphore';
|
||||
export { stripVersionQualifier } from './src/strip_version_qualifier';
|
||||
|
||||
export { safeJsonParse } from './src/safe_json_parse';
|
||||
export { safeJsonStringify } from './src/safe_json_stringify';
|
||||
|
|
35
src/platform/packages/shared/kbn-std/src/safe_json_parse.ts
Normal file
35
src/platform/packages/shared/kbn-std/src/safe_json_parse.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
const noop = <T = unknown>(): T => {
|
||||
return undefined as T;
|
||||
};
|
||||
|
||||
/**
|
||||
* Safely parses a JSON string. If the string cannot be parsed, for instance
|
||||
* if it is not valid JSON, it will return `undefined`. If `handleError` is
|
||||
* defined, it will be called with the error, and the response from the callback
|
||||
* will be returned. This allows consumers to wrap the JSON.parse error.
|
||||
*
|
||||
* @param value The JSON string to parse.
|
||||
* @param handleError Optional callback that is called when an error
|
||||
* during parsing. Its return value is returned from the
|
||||
* function.
|
||||
* @returns The parsed object, or `undefined` if an error occurs.
|
||||
*/
|
||||
export function safeJsonParse<T = unknown>(
|
||||
value: string,
|
||||
handleError: (error: Error) => T = noop
|
||||
): T {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
return handleError(error);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
const noop = (): string | undefined => {
|
||||
return undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Safely stringifies a value to JSON. If the value cannot be stringified,
|
||||
* for instance if it contains circular references, it will return `undefined`.
|
||||
* If `handleError` is defined, it will be called with the error, and the
|
||||
* response will be returned. This allows consumers to wrap the JSON.stringify
|
||||
* error.
|
||||
*
|
||||
* @param value The value to stringify.
|
||||
* @param handleError Optional callback that is called when an error occurs during
|
||||
* stringifying.
|
||||
* @returns The JSON string representation of the value, or `undefined`
|
||||
* if an error occurs.
|
||||
*/
|
||||
export function safeJsonStringify(
|
||||
value: unknown,
|
||||
handleError: (error: Error) => string | undefined = noop
|
||||
): string | undefined {
|
||||
try {
|
||||
return JSON.stringify(value);
|
||||
} catch (error) {
|
||||
return handleError(error);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
# @kbn/telemetry-config
|
||||
|
||||
Contains the configuration schema and types for Telemetry (as in, OpenTelemetry).
|
11
src/platform/packages/shared/kbn-telemetry-config/index.ts
Normal file
11
src/platform/packages/shared/kbn-telemetry-config/index.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { telemetryTracingSchema } from './src/config_schema';
|
||||
export type { TelemetryConfig, TracingConfig } from './src/types';
|
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../../../../..',
|
||||
roots: ['<rootDir>/src/platform/packages/shared/kbn-telemetry-config'],
|
||||
};
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "shared-common",
|
||||
"id": "@kbn/telemetry-config",
|
||||
"owner": "@elastic/kibana-core",
|
||||
"group": "platform",
|
||||
"visibility": "shared"
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"name": "@kbn/telemetry-config",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"license": "Elastic License 2.0 OR AGPL-3.0-only OR SSPL-1.0"
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
import { Type, schema } from '@kbn/config-schema';
|
||||
import { TracingConfig } from './types';
|
||||
|
||||
/**
|
||||
* The tracing config schema that is exposed by the Telemetry plugin.
|
||||
*/
|
||||
export const telemetryTracingSchema: Type<TracingConfig> = schema.object({
|
||||
enabled: schema.maybe(schema.boolean()),
|
||||
sample_rate: schema.number({ defaultValue: 1, min: 0, max: 1 }),
|
||||
});
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
/**
|
||||
* Configuration for OpenTelemetry
|
||||
*/
|
||||
export interface TelemetryConfig {
|
||||
/**
|
||||
* Tracing config. See {@link TracingConfig}.
|
||||
*/
|
||||
tracing?: TracingConfig;
|
||||
/**
|
||||
* Whether telemetry collection is enabled.
|
||||
*/
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for OpenTelemetry tracing
|
||||
*/
|
||||
export interface TracingConfig {
|
||||
/**
|
||||
* Whether OpenTelemetry tracing is enabled.
|
||||
*/
|
||||
enabled?: boolean;
|
||||
/**
|
||||
* At which rate spans get sampled if a sampling decision
|
||||
* needs to be made. Should be between 0-1.
|
||||
*/
|
||||
sample_rate: number;
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"extends": "../../../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "target/types",
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/config-schema",
|
||||
]
|
||||
}
|
3
src/platform/packages/shared/kbn-telemetry/README.md
Normal file
3
src/platform/packages/shared/kbn-telemetry/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/telemetry
|
||||
|
||||
Contains initialization functions and utilities for Telemetry (as in, OpenTelemetry).
|
9
src/platform/packages/shared/kbn-telemetry/index.ts
Normal file
9
src/platform/packages/shared/kbn-telemetry/index.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
export { initTelemetry } from './src/init_telemetry';
|
14
src/platform/packages/shared/kbn-telemetry/jest.config.js
Normal file
14
src/platform/packages/shared/kbn-telemetry/jest.config.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../../../../..',
|
||||
roots: ['<rootDir>/src/platform/packages/shared/kbn-telemetry'],
|
||||
};
|
7
src/platform/packages/shared/kbn-telemetry/kibana.jsonc
Normal file
7
src/platform/packages/shared/kbn-telemetry/kibana.jsonc
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "shared-server",
|
||||
"id": "@kbn/telemetry",
|
||||
"owner": ["@elastic/kibana-core", "@elastic/obs-ai-assistant"],
|
||||
"group": "platform",
|
||||
"visibility": "shared"
|
||||
}
|
6
src/platform/packages/shared/kbn-telemetry/package.json
Normal file
6
src/platform/packages/shared/kbn-telemetry/package.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"name": "@kbn/telemetry",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"license": "Elastic License 2.0 OR AGPL-3.0-only OR SSPL-1.0"
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
import { loadConfiguration } from '@kbn/apm-config-loader';
|
||||
import { initTracing } from '@kbn/tracing';
|
||||
/**
|
||||
*
|
||||
* Initializes OpenTelemetry (currently only tracing)
|
||||
*
|
||||
* @param argv Process arguments
|
||||
* @param rootDir Root dir of Kibana repo
|
||||
* @param isDistributable Whether this is a distributable build
|
||||
* @param serviceName The service name used in resource attributes
|
||||
* @returns A function that can be called on shutdown and allows exporters to flush their queue.
|
||||
*/
|
||||
export const initTelemetry = (
|
||||
argv: string[],
|
||||
rootDir: string,
|
||||
isDistributable: boolean,
|
||||
serviceName: string
|
||||
) => {
|
||||
const apmConfigLoader = loadConfiguration(argv, rootDir, isDistributable);
|
||||
|
||||
const apmConfig = apmConfigLoader.getConfig(serviceName);
|
||||
|
||||
const telemetryConfig = apmConfigLoader.getTelemetryConfig();
|
||||
|
||||
// explicitly check for enabled == false, as the default in the schema
|
||||
// is true, but it's not parsed through @kbn/config-schema, so the
|
||||
// default value is not returned
|
||||
const telemetryEnabled = telemetryConfig?.enabled !== false;
|
||||
|
||||
// tracing is enabled only when telemetry is enabled and tracing is not disabled
|
||||
const tracingEnabled = telemetryEnabled && telemetryConfig?.tracing?.enabled;
|
||||
|
||||
if (!tracingEnabled) {
|
||||
return async () => {};
|
||||
}
|
||||
|
||||
return initTracing({
|
||||
tracingConfig: telemetryConfig.tracing,
|
||||
apmConfig,
|
||||
});
|
||||
};
|
20
src/platform/packages/shared/kbn-telemetry/tsconfig.json
Normal file
20
src/platform/packages/shared/kbn-telemetry/tsconfig.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"extends": "../../../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "target/types",
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/apm-config-loader",
|
||||
"@kbn/tracing",
|
||||
]
|
||||
}
|
3
src/platform/packages/shared/kbn-tracing/README.md
Normal file
3
src/platform/packages/shared/kbn-tracing/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/tracing
|
||||
|
||||
Contains OpenTelemetry tracing init functions and utilities.
|
11
src/platform/packages/shared/kbn-tracing/index.ts
Normal file
11
src/platform/packages/shared/kbn-tracing/index.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
export { LateBindingSpanProcessor } from './src/late_binding_span_processor';
|
||||
export { initTracing } from './src/init_tracing';
|
14
src/platform/packages/shared/kbn-tracing/jest.config.js
Normal file
14
src/platform/packages/shared/kbn-tracing/jest.config.js
Normal file
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../../../../..',
|
||||
roots: ['<rootDir>/src/platform/packages/shared/kbn-tracing'],
|
||||
};
|
7
src/platform/packages/shared/kbn-tracing/kibana.jsonc
Normal file
7
src/platform/packages/shared/kbn-tracing/kibana.jsonc
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "shared-server",
|
||||
"id": "@kbn/tracing",
|
||||
"owner": ["@elastic/kibana-core", "@elastic/obs-ai-assistant"],
|
||||
"group": "platform",
|
||||
"visibility": "shared"
|
||||
}
|
6
src/platform/packages/shared/kbn-tracing/package.json
Normal file
6
src/platform/packages/shared/kbn-tracing/package.json
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"name": "@kbn/tracing",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"license": "Elastic License 2.0 OR AGPL-3.0-only OR SSPL-1.0"
|
||||
}
|
55
src/platform/packages/shared/kbn-tracing/src/init_tracing.ts
Normal file
55
src/platform/packages/shared/kbn-tracing/src/init_tracing.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
import { context, trace } from '@opentelemetry/api';
|
||||
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
|
||||
import { resourceFromAttributes } from '@opentelemetry/resources';
|
||||
import {
|
||||
NodeTracerProvider,
|
||||
ParentBasedSampler,
|
||||
TraceIdRatioBasedSampler,
|
||||
} from '@opentelemetry/sdk-trace-node';
|
||||
import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from '@opentelemetry/semantic-conventions';
|
||||
import { TracingConfig } from '@kbn/telemetry-config';
|
||||
import { AgentConfigOptions } from 'elastic-apm-node';
|
||||
import { LateBindingSpanProcessor } from '..';
|
||||
|
||||
export function initTracing({
|
||||
tracingConfig,
|
||||
apmConfig,
|
||||
}: {
|
||||
tracingConfig?: TracingConfig;
|
||||
apmConfig: AgentConfigOptions;
|
||||
}) {
|
||||
const contextManager = new AsyncLocalStorageContextManager();
|
||||
context.setGlobalContextManager(contextManager);
|
||||
contextManager.enable();
|
||||
|
||||
// this is used for late-binding of span processors
|
||||
const processor = LateBindingSpanProcessor.get();
|
||||
|
||||
const nodeTracerProvider = new NodeTracerProvider({
|
||||
// by default, base sampling on parent context,
|
||||
// or for root spans, based on the configured sample rate
|
||||
sampler: new ParentBasedSampler({
|
||||
root: new TraceIdRatioBasedSampler(tracingConfig?.sample_rate),
|
||||
}),
|
||||
spanProcessors: [processor],
|
||||
resource: resourceFromAttributes({
|
||||
[ATTR_SERVICE_NAME]: apmConfig.serviceName,
|
||||
[ATTR_SERVICE_VERSION]: apmConfig.serviceVersion,
|
||||
}),
|
||||
});
|
||||
|
||||
trace.setGlobalTracerProvider(nodeTracerProvider);
|
||||
|
||||
return async () => {
|
||||
// allow for programmatic shutdown
|
||||
await processor.shutdown();
|
||||
};
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the "Elastic License
|
||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||
* Public License v 1"; you may not use this file except in compliance with, at
|
||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||
*/
|
||||
|
||||
import { Context } from '@opentelemetry/api';
|
||||
import { ReadableSpan, Span, SpanProcessor } from '@opentelemetry/sdk-trace-node';
|
||||
import { pull } from 'lodash';
|
||||
|
||||
const noop = async () => {};
|
||||
|
||||
/**
|
||||
* This processor allows consumers to register Span processors after startup,
|
||||
* which is useful if processors should be conditionally applied based on config
|
||||
* or runtime logic.
|
||||
*/
|
||||
export class LateBindingSpanProcessor implements SpanProcessor {
|
||||
static #instance?: LateBindingSpanProcessor;
|
||||
|
||||
#processors: SpanProcessor[] = [];
|
||||
|
||||
private constructor() {}
|
||||
|
||||
onStart(span: Span, parentContext: Context): void {
|
||||
this.#processors.forEach((processor) => processor.onStart(span, parentContext));
|
||||
}
|
||||
|
||||
onEnd(span: ReadableSpan): void {
|
||||
this.#processors.forEach((processor) => processor.onEnd(span));
|
||||
}
|
||||
|
||||
async forceFlush(): Promise<void> {
|
||||
await Promise.all(this.#processors.map((processor) => processor.forceFlush()));
|
||||
}
|
||||
async shutdown(): Promise<void> {
|
||||
await Promise.all(this.#processors.map((processor) => processor.shutdown()));
|
||||
}
|
||||
|
||||
register(processor: SpanProcessor) {
|
||||
this.#processors.push(processor);
|
||||
|
||||
return async () => {
|
||||
pull(this.#processors, processor);
|
||||
await processor.shutdown();
|
||||
};
|
||||
}
|
||||
|
||||
static register(processor: SpanProcessor): () => Promise<void> {
|
||||
return this.#instance?.register(processor) ?? noop;
|
||||
}
|
||||
|
||||
static get() {
|
||||
if (!this.#instance) {
|
||||
this.#instance = new LateBindingSpanProcessor();
|
||||
}
|
||||
return this.#instance;
|
||||
}
|
||||
}
|
19
src/platform/packages/shared/kbn-tracing/tsconfig.json
Normal file
19
src/platform/packages/shared/kbn-tracing/tsconfig.json
Normal file
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"extends": "../../../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "target/types",
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/telemetry-config",
|
||||
]
|
||||
}
|
|
@ -10,6 +10,7 @@
|
|||
import { schema, TypeOf, Type, offeringBasedSchema } from '@kbn/config-schema';
|
||||
import { getConfigPath } from '@kbn/utils';
|
||||
import { PluginConfigDescriptor } from '@kbn/core/server';
|
||||
import { telemetryTracingSchema } from '@kbn/telemetry-config';
|
||||
import { labelsSchema } from './telemetry_labels';
|
||||
|
||||
const clusterEnvSchema: [Type<'prod'>, Type<'staging'>] = [
|
||||
|
@ -49,6 +50,7 @@ const configSchema = schema.object({
|
|||
schema.literal(false),
|
||||
{ defaultValue: false }
|
||||
),
|
||||
tracing: schema.maybe(telemetryTracingSchema),
|
||||
});
|
||||
|
||||
export type TelemetryConfigType = TypeOf<typeof configSchema>;
|
||||
|
@ -76,6 +78,7 @@ export const config: PluginConfigDescriptor<TelemetryConfigType> = {
|
|||
set: [
|
||||
{ path: 'telemetry.optIn', value: false },
|
||||
{ path: 'telemetry.allowChangingOptInStatus', value: false },
|
||||
{ path: 'telemetry.tracing.enabled', value: false },
|
||||
],
|
||||
unset: [{ path: 'telemetry.enabled' }],
|
||||
};
|
||||
|
|
|
@ -40,6 +40,7 @@
|
|||
"@kbn/core-elasticsearch-server",
|
||||
"@kbn/logging",
|
||||
"@kbn/core-security-server",
|
||||
"@kbn/telemetry-config",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -1948,10 +1948,14 @@
|
|||
"@kbn/task-manager-performance-plugin/*": ["x-pack/test/plugin_api_perf/plugins/task_manager_performance/*"],
|
||||
"@kbn/task-manager-plugin": ["x-pack/platform/plugins/shared/task_manager"],
|
||||
"@kbn/task-manager-plugin/*": ["x-pack/platform/plugins/shared/task_manager/*"],
|
||||
"@kbn/telemetry": ["src/platform/packages/shared/kbn-telemetry"],
|
||||
"@kbn/telemetry/*": ["src/platform/packages/shared/kbn-telemetry/*"],
|
||||
"@kbn/telemetry-collection-manager-plugin": ["src/platform/plugins/shared/telemetry_collection_manager"],
|
||||
"@kbn/telemetry-collection-manager-plugin/*": ["src/platform/plugins/shared/telemetry_collection_manager/*"],
|
||||
"@kbn/telemetry-collection-xpack-plugin": ["x-pack/platform/plugins/private/telemetry_collection_xpack"],
|
||||
"@kbn/telemetry-collection-xpack-plugin/*": ["x-pack/platform/plugins/private/telemetry_collection_xpack/*"],
|
||||
"@kbn/telemetry-config": ["src/platform/packages/shared/kbn-telemetry-config"],
|
||||
"@kbn/telemetry-config/*": ["src/platform/packages/shared/kbn-telemetry-config/*"],
|
||||
"@kbn/telemetry-management-section-plugin": ["src/platform/plugins/shared/telemetry_management_section"],
|
||||
"@kbn/telemetry-management-section-plugin/*": ["src/platform/plugins/shared/telemetry_management_section/*"],
|
||||
"@kbn/telemetry-plugin": ["src/platform/plugins/shared/telemetry"],
|
||||
|
@ -2002,6 +2006,8 @@
|
|||
"@kbn/tooling-log/*": ["src/platform/packages/shared/kbn-tooling-log/*"],
|
||||
"@kbn/traced-es-client": ["src/platform/packages/shared/kbn-traced-es-client"],
|
||||
"@kbn/traced-es-client/*": ["src/platform/packages/shared/kbn-traced-es-client/*"],
|
||||
"@kbn/tracing": ["src/platform/packages/shared/kbn-tracing"],
|
||||
"@kbn/tracing/*": ["src/platform/packages/shared/kbn-tracing/*"],
|
||||
"@kbn/transform-plugin": ["x-pack/platform/plugins/private/transform"],
|
||||
"@kbn/transform-plugin/*": ["x-pack/platform/plugins/private/transform/*"],
|
||||
"@kbn/translations-plugin": ["x-pack/platform/plugins/private/translations"],
|
||||
|
|
|
@ -111,6 +111,7 @@ export {
|
|||
isSupportedConnectorType,
|
||||
isSupportedConnector,
|
||||
getConnectorDefaultModel,
|
||||
getConnectorModel,
|
||||
getConnectorProvider,
|
||||
connectorToInference,
|
||||
type InferenceConnector,
|
||||
|
@ -121,4 +122,10 @@ export {
|
|||
elasticModelIds,
|
||||
} from './src/inference_endpoints';
|
||||
|
||||
export type {
|
||||
InferenceTracingExportConfig,
|
||||
InferenceTracingLangfuseExportConfig,
|
||||
InferenceTracingPhoenixExportConfig,
|
||||
} from './src/tracing';
|
||||
|
||||
export { Tokenizer } from './src/utils/tokenizer';
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getConnectorDefaultModel } from './connector_config';
|
||||
import { InferenceConnector, InferenceConnectorType } from './connectors';
|
||||
|
||||
/**
|
||||
* Guesses the model based on the connector type and configuration.
|
||||
*
|
||||
* Inferred from the type for "legacy" connectors,
|
||||
* and from the provider config field for inference connectors.
|
||||
*/
|
||||
export const getConnectorModel = (connector: InferenceConnector): string | undefined => {
|
||||
const defaultModel = getConnectorDefaultModel(connector);
|
||||
|
||||
if (defaultModel) {
|
||||
return defaultModel;
|
||||
}
|
||||
|
||||
if (connector.type === InferenceConnectorType.OpenAI && connector.config?.apiUrl) {
|
||||
return getOpenAiModelFromUrl(connector.config?.apiUrl);
|
||||
}
|
||||
};
|
||||
|
||||
const OPENAI_MODEL_NAMES = [
|
||||
'gpt-4.1-mini',
|
||||
'gpt-4.1-nano',
|
||||
'gpt-4.1',
|
||||
'gpt-4o-mini',
|
||||
'gpt-4o',
|
||||
'gpt-4',
|
||||
'gpt-35-turbo',
|
||||
'o3-mini',
|
||||
'o1-mini',
|
||||
'o1',
|
||||
];
|
||||
|
||||
function getOpenAiModelFromUrl(apiUrl: string) {
|
||||
const url = new URL(apiUrl);
|
||||
if (url.hostname.endsWith('azure.com')) {
|
||||
return OPENAI_MODEL_NAMES.find((modelName) => {
|
||||
return url.pathname.includes(modelName);
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
}
|
|
@ -8,4 +8,5 @@
|
|||
export { isSupportedConnectorType, isSupportedConnector } from './is_supported_connector';
|
||||
export { connectorToInference } from './connector_to_inference';
|
||||
export { getConnectorDefaultModel, getConnectorProvider } from './connector_config';
|
||||
export { getConnectorModel } from './get_connector_model';
|
||||
export { InferenceConnectorType, type InferenceConnector } from './connectors';
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Configuration schema for the Langfuse exporter.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export interface InferenceTracingLangfuseExportConfig {
|
||||
/**
|
||||
* The URL for Langfuse server and Langfuse UI.
|
||||
*/
|
||||
base_url: string;
|
||||
/**
|
||||
* The public key for API requests to Langfuse server.
|
||||
*/
|
||||
public_key: string;
|
||||
/**
|
||||
* The secret key for API requests to Langfuse server.
|
||||
*/
|
||||
secret_key: string;
|
||||
/**
|
||||
* The delay in milliseconds before the exporter sends another
|
||||
* batch of spans.
|
||||
*/
|
||||
scheduled_delay: number;
|
||||
}
|
||||
/**
|
||||
* Configuration schema for the Phoenix exporter.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export interface InferenceTracingPhoenixExportConfig {
|
||||
/**
|
||||
* The URL for Phoenix server.
|
||||
*/
|
||||
base_url: string;
|
||||
/**
|
||||
* The URL for Phoenix UI.
|
||||
*/
|
||||
public_url?: string;
|
||||
/**
|
||||
* The project in which traces are stored. Used for
|
||||
* generating links to Phoenix UI.
|
||||
*/
|
||||
project_name?: string;
|
||||
/**
|
||||
* The API key for API requests to Phoenix server.
|
||||
*/
|
||||
api_key?: string;
|
||||
/**
|
||||
* The delay in milliseconds before the exporter sends another
|
||||
* batch of spans.
|
||||
*/
|
||||
scheduled_delay: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration schema for inference tracing exporters.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export interface InferenceTracingExportConfig {
|
||||
/**
|
||||
* Defines to which system inference spans will be exported.
|
||||
* Should be one of {@link InferenceTracingLangfuseExportConfig}
|
||||
* or {@link InferenceTracingPhoenixExportConfig}
|
||||
*/
|
||||
exporter?: InferenceTracingLangfuseExportConfig | InferenceTracingPhoenixExportConfig;
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { ensureLangfuse } from '../src/langfuse/ensure_langfuse';
|
||||
|
||||
run(({ log, addCleanupTask }) => {
|
||||
const controller = new AbortController();
|
||||
|
||||
addCleanupTask(() => {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
return ensureLangfuse({
|
||||
log,
|
||||
signal: controller.signal,
|
||||
}).catch((error) => {
|
||||
throw new Error('Failed to start Langfuse', { cause: error });
|
||||
});
|
||||
});
|
|
@ -0,0 +1,23 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { ensurePhoenix } from '../src/phoenix/ensure_phoenix';
|
||||
|
||||
run(({ log, addCleanupTask }) => {
|
||||
const controller = new AbortController();
|
||||
|
||||
addCleanupTask(() => {
|
||||
controller.abort();
|
||||
});
|
||||
|
||||
return ensurePhoenix({
|
||||
log,
|
||||
signal: controller.signal,
|
||||
}).catch((error) => {
|
||||
throw new Error('Failed to start Phoenix', { cause: error });
|
||||
});
|
||||
});
|
|
@ -9,13 +9,13 @@ import { ToolingLog } from '@kbn/tooling-log';
|
|||
import execa from 'execa';
|
||||
import Path from 'path';
|
||||
import chalk from 'chalk';
|
||||
import { assertDockerAvailable } from './assert_docker_available';
|
||||
import { assertDockerAvailable } from '../util/assert_docker_available';
|
||||
import { getDockerComposeYaml } from './get_docker_compose_yaml';
|
||||
import { getEisGatewayConfig } from './get_eis_gateway_config';
|
||||
import { DATA_DIR, writeFile } from './file_utils';
|
||||
import { DATA_DIR, writeFile } from '../util/file_utils';
|
||||
import { getNginxConf } from './get_nginx_conf';
|
||||
import { untilGatewayReady } from './until_gateway_ready';
|
||||
import { getEisCredentials } from './get_eis_credentials';
|
||||
import { untilContainerReady } from '../util/until_container_ready';
|
||||
|
||||
const DOCKER_COMPOSE_FILE_PATH = Path.join(DATA_DIR, 'docker-compose.yaml');
|
||||
const NGINX_CONF_FILE_PATH = Path.join(DATA_DIR, 'nginx.conf');
|
||||
|
@ -79,7 +79,13 @@ export async function ensureEis({ log, signal }: { log: ToolingLog; signal: Abor
|
|||
|
||||
log.debug(`Wrote docker-compose file to ${DOCKER_COMPOSE_FILE_PATH}`);
|
||||
|
||||
untilGatewayReady({ dockerComposeFilePath: DOCKER_COMPOSE_FILE_PATH })
|
||||
untilContainerReady({
|
||||
containerName: 'gateway-proxy',
|
||||
signal,
|
||||
log,
|
||||
dockerComposeFilePath: DOCKER_COMPOSE_FILE_PATH,
|
||||
condition: ['.State.Health.Status', 'healthy'],
|
||||
})
|
||||
.then(() => {
|
||||
log.write('');
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import execa from 'execa';
|
|||
import Path from 'path';
|
||||
import { promises as Fs } from 'fs';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { DATA_DIR, createDirIfNotExists, fileExists } from './file_utils';
|
||||
import { DATA_DIR, createDirIfNotExists, fileExists } from '../util/file_utils';
|
||||
|
||||
const CERTS_DIR = Path.join(DATA_DIR, 'certs');
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { dump } from 'js-yaml';
|
||||
import { writeTempfile } from './file_utils';
|
||||
import { writeTempfile } from '../util/file_utils';
|
||||
import { generateCertificates } from './generate_certificate';
|
||||
import { getServiceConfigurationFromYaml } from './get_service_configuration';
|
||||
import { EisCredentials } from './get_eis_credentials';
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { backOff } from 'exponential-backoff';
|
||||
import execa from 'execa';
|
||||
|
||||
export async function untilGatewayReady({
|
||||
dockerComposeFilePath,
|
||||
}: {
|
||||
dockerComposeFilePath: string;
|
||||
}) {
|
||||
async function isGatewayReady() {
|
||||
const { stdout: gatewayProxyContainerName } = await execa.command(
|
||||
`docker compose -f ${dockerComposeFilePath} ps -q gateway-proxy`
|
||||
);
|
||||
|
||||
const { stdout } = await execa.command(
|
||||
`docker inspect --format='{{.State.Health.Status}}' ${gatewayProxyContainerName}`
|
||||
);
|
||||
|
||||
if (stdout !== "'healthy'") {
|
||||
throw new Error(`gateway-proxy not healthy: ${stdout}`);
|
||||
}
|
||||
}
|
||||
|
||||
return await backOff(isGatewayReady, {
|
||||
delayFirstAttempt: true,
|
||||
startingDelay: 500,
|
||||
jitter: 'full',
|
||||
numOfAttempts: 20,
|
||||
});
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import execa from 'execa';
|
||||
import Path from 'path';
|
||||
import chalk from 'chalk';
|
||||
import { mapValues } from 'lodash';
|
||||
import { assertDockerAvailable } from '../util/assert_docker_available';
|
||||
import { sparseCheckout } from '../util/sparse_checkout';
|
||||
import { untilContainerReady } from '../util/until_container_ready';
|
||||
|
||||
const USER_EMAIL = 'kimchy@elasticsearch.com';
|
||||
const USER_NAME = 'elastic';
|
||||
const USER_PASSWORD = 'changeme';
|
||||
|
||||
const LOCAL_PUBLIC_KEY = 'langfuse-dev-public-key';
|
||||
const LOCAL_SECRET_KEY = 'langfuse-dev-secret-key';
|
||||
|
||||
async function down(dockerComposeFilePath: string, cleanup: boolean = true) {
|
||||
await execa
|
||||
.command(`docker compose -f ${dockerComposeFilePath} down`, { cleanup })
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
export async function ensureLangfuse({ log, signal }: { log: ToolingLog; signal: AbortSignal }) {
|
||||
log.info(`Ensuring Langfuse is available`);
|
||||
|
||||
await assertDockerAvailable();
|
||||
|
||||
const repoDir = await sparseCheckout({
|
||||
repository: {
|
||||
user: 'langfuse',
|
||||
name: 'langfuse',
|
||||
},
|
||||
files: ['docker-compose.yml'],
|
||||
});
|
||||
|
||||
const dockerComposeFilePath = Path.join(repoDir, 'docker-compose.yml');
|
||||
|
||||
log.info(`Stopping existing containers`);
|
||||
|
||||
await down(dockerComposeFilePath);
|
||||
|
||||
log.debug(`Retrieved docker-compose file at ${dockerComposeFilePath}`);
|
||||
|
||||
log.info(`Waiting until Langfuse is ready`);
|
||||
|
||||
const env = mapValues(
|
||||
{
|
||||
LANGFUSE_INIT_USER_EMAIL: USER_EMAIL,
|
||||
LANGFUSE_INIT_USER_NAME: USER_NAME,
|
||||
LANGFUSE_INIT_USER_PASSWORD: USER_PASSWORD,
|
||||
LANGFUSE_INIT_PROJECT_PUBLIC_KEY: LOCAL_PUBLIC_KEY,
|
||||
LANGFUSE_INIT_PROJECT_SECRET_KEY: LOCAL_SECRET_KEY,
|
||||
LANGFUSE_BASE_URL: `http://localhost:3000`,
|
||||
LANGFUSE_INIT_ORG_ID: 'elastic',
|
||||
LANGFUSE_INIT_ORG_NAME: 'Elastic',
|
||||
LANGFUSE_INIT_PROJECT_ID: 'Elastic',
|
||||
LANGFUSE_INIT_PROJECT_NAME: 'Elastic',
|
||||
},
|
||||
(value, key) => {
|
||||
return process.env[key] || value;
|
||||
}
|
||||
);
|
||||
|
||||
untilContainerReady({
|
||||
containerName: 'langfuse-web',
|
||||
dockerComposeFilePath,
|
||||
signal,
|
||||
log,
|
||||
condition: ['.State.Status', 'running'],
|
||||
})
|
||||
.then(async () => {
|
||||
log.write('');
|
||||
|
||||
log.write(
|
||||
`${chalk.green(`✔`)} Langfuse started. Log in with ${env.LANGFUSE_INIT_USER_EMAIL}:${
|
||||
env.LANGFUSE_INIT_USER_PASSWORD
|
||||
} at ${
|
||||
env.LANGFUSE_BASE_URL
|
||||
}. Paste the following config in kibana.(dev.).yml if you don't already have Langfuse configured:`
|
||||
);
|
||||
|
||||
const lines = [
|
||||
`telemetry.enabled: true`,
|
||||
`telemetry.tracing.enabled: true`,
|
||||
`xpack.inference.tracing.exporter.langfuse.base_url: "${env.LANGFUSE_BASE_URL}"`,
|
||||
`xpack.inference.tracing.exporter.langfuse.public_key: "${env.LANGFUSE_INIT_PROJECT_PUBLIC_KEY}"`,
|
||||
`xpack.inference.tracing.exporter.langfuse.secret_key: "${env.LANGFUSE_INIT_PROJECT_SECRET_KEY}"`,
|
||||
];
|
||||
|
||||
log.write('');
|
||||
|
||||
lines.forEach((line) => {
|
||||
if (line) {
|
||||
log.write(line);
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error(error);
|
||||
});
|
||||
|
||||
await execa.command(`docker compose -f ${dockerComposeFilePath} up`, {
|
||||
stdio: 'inherit',
|
||||
cleanup: true,
|
||||
env,
|
||||
});
|
||||
}
|
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import chalk from 'chalk';
|
||||
import execa from 'execa';
|
||||
import { mapValues } from 'lodash';
|
||||
import Os from 'os';
|
||||
import Path from 'path';
|
||||
import { assertDockerAvailable } from '../util/assert_docker_available';
|
||||
import { createDirIfNotExists, writeFile } from '../util/file_utils';
|
||||
import { untilContainerReady } from '../util/until_container_ready';
|
||||
import { getDockerComposeYaml } from './get_docker_compose_yaml';
|
||||
|
||||
const PHOENIX_PORT = '6006';
|
||||
const PHOENIX_HOST = '0.0.0.0';
|
||||
const PHOENIX_ENABLE_AUTH = false;
|
||||
const PHOENIX_SECRET = '';
|
||||
const PHOENIX_LOGGING_MODE = 'default';
|
||||
const PHOENIX_LOGGING_LEVEL = 'info';
|
||||
const PHOENIX_DB_LOGGING_LEVEL = 'info';
|
||||
|
||||
async function down(dockerComposeFilePath: string, cleanup: boolean = true) {
|
||||
await execa
|
||||
.command(`docker compose -f ${dockerComposeFilePath} down`, { cleanup })
|
||||
.catch(() => {});
|
||||
}
|
||||
|
||||
export async function ensurePhoenix({ log, signal }: { log: ToolingLog; signal: AbortSignal }) {
|
||||
log.info(`Ensuring Phoenix is available`);
|
||||
|
||||
await assertDockerAvailable();
|
||||
|
||||
const tmpDir = Path.join(Os.tmpdir(), 'kibana-inference', 'phoenix');
|
||||
|
||||
await createDirIfNotExists(tmpDir);
|
||||
|
||||
const dockerComposeFilePath = Path.join(tmpDir, 'docker-compose.yml');
|
||||
|
||||
const env = mapValues(
|
||||
{
|
||||
PHOENIX_PORT,
|
||||
PHOENIX_HOST,
|
||||
PHOENIX_ENABLE_AUTH,
|
||||
PHOENIX_SECRET,
|
||||
PHOENIX_LOGGING_LEVEL,
|
||||
PHOENIX_DB_LOGGING_LEVEL,
|
||||
PHOENIX_LOGGING_MODE,
|
||||
},
|
||||
(value, key) => {
|
||||
return String(process.env[key] || value);
|
||||
}
|
||||
);
|
||||
|
||||
await writeFile(
|
||||
dockerComposeFilePath,
|
||||
await getDockerComposeYaml({
|
||||
ports: {
|
||||
phoenix: Number(env.PHOENIX_PORT),
|
||||
},
|
||||
env,
|
||||
})
|
||||
);
|
||||
|
||||
log.debug(`Wrote to ${dockerComposeFilePath}`);
|
||||
|
||||
log.info(`Stopping existing containers`);
|
||||
|
||||
await down(dockerComposeFilePath);
|
||||
|
||||
log.debug(`Retrieved docker-compose file at ${dockerComposeFilePath}`);
|
||||
|
||||
log.info(`Waiting until Phoenix is ready`);
|
||||
|
||||
untilContainerReady({
|
||||
containerName: 'phoenix',
|
||||
dockerComposeFilePath,
|
||||
signal,
|
||||
log,
|
||||
condition: ['.State.Status', 'running'],
|
||||
})
|
||||
.then(async () => {
|
||||
log.write('');
|
||||
|
||||
log.write(
|
||||
`${chalk.green(
|
||||
`✔`
|
||||
)} Phoenix started. Visit at ${`http://${env.PHOENIX_HOST}:${env.PHOENIX_PORT}`}. Paste the following config in kibana.(dev.).yml if you don't already have Phoenix configured:`
|
||||
);
|
||||
|
||||
const lines = [
|
||||
`telemetry.enabled: true`,
|
||||
`telemetry.tracing.enabled: true`,
|
||||
`xpack.inference.tracing.exporter.phoenix.base_url: "http://${env.PHOENIX_HOST}:${env.PHOENIX_PORT}"`,
|
||||
`xpack.inference.tracing.exporter.phoenix.public_url: "http://${env.PHOENIX_HOST}:${env.PHOENIX_PORT}"`,
|
||||
...(env.PHOENIX_SECRET
|
||||
? [`xpack.inference.tracing.exporter.phoenix.secret: "${env.PHOENIX_SECRET}"`]
|
||||
: []),
|
||||
];
|
||||
|
||||
log.write('');
|
||||
|
||||
lines.forEach((line) => {
|
||||
if (line) {
|
||||
log.write(line);
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
log.error(error);
|
||||
});
|
||||
|
||||
await execa.command(`docker compose -f ${dockerComposeFilePath} up`, {
|
||||
stdio: 'inherit',
|
||||
cleanup: true,
|
||||
env,
|
||||
});
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export function getDockerComposeYaml({
|
||||
ports,
|
||||
env,
|
||||
}: {
|
||||
ports: { phoenix: number };
|
||||
env: Record<string, any>;
|
||||
}) {
|
||||
const { phoenix } = ports;
|
||||
|
||||
return `
|
||||
services:
|
||||
phoenix:
|
||||
image: arizephoenix/phoenix:latest # Must be greater than 4.0 version to work
|
||||
ports:
|
||||
- ${phoenix}:6006 # PHOENIX_PORT
|
||||
environment:
|
||||
- PHOENIX_WORKING_DIR=/mnt/data
|
||||
- PHOENIX_HOST=${env.PHOENIX_HOST}
|
||||
${env.PHOENIX_SECRET ? `PHOENIX_SECRET=${env.PHOENIX_SECRET}` : ``}
|
||||
- PHOENIX_ENABLE_AUTH=${env.PHOENIX_ENABLE_AUTH}
|
||||
- PHOENIX_LOGGING_LEVEL=${env.PHOENIX_LOGGING_LEVEL}
|
||||
- PHOENIX_DB_LOGGING_LEVEL=${env.PHOENIX_DB_LOGGING_LEVEL}
|
||||
- PHOENIX_LOGGING_MODE=${env.PHOENIX_LOGGING_MODE}
|
||||
volumes:
|
||||
- phoenix_data:/mnt/data
|
||||
volumes:
|
||||
phoenix_data:
|
||||
driver: local
|
||||
`;
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { promises as Fs } from 'fs';
|
||||
import Path from 'path';
|
||||
import os from 'os';
|
||||
import { noop } from 'lodash';
|
||||
import simpleGit, { ResetMode } from 'simple-git';
|
||||
import { createDirIfNotExists } from './file_utils';
|
||||
|
||||
class GitCheckoutError extends Error {
|
||||
constructor(cause: Error) {
|
||||
super(`Failed to checkout repository. Make sure you've authenticated to Git`, { cause });
|
||||
}
|
||||
}
|
||||
|
||||
export async function sparseCheckout({
|
||||
repository,
|
||||
files,
|
||||
}: {
|
||||
repository: {
|
||||
user: string;
|
||||
name: string;
|
||||
};
|
||||
files: string[];
|
||||
}): Promise<string> {
|
||||
// Create a temporary directory
|
||||
const tmpDir = Path.join(os.tmpdir(), 'kibana-inference', repository.name);
|
||||
|
||||
await createDirIfNotExists(tmpDir);
|
||||
|
||||
const git = simpleGit(tmpDir);
|
||||
|
||||
// Initialize an empty repository and add remote
|
||||
await git.init();
|
||||
await git.raw(['config', 'core.sparseCheckout', 'true']);
|
||||
|
||||
const sparseCheckoutPath = Path.join(tmpDir, '.git', 'info', 'sparse-checkout');
|
||||
await Fs.writeFile(sparseCheckoutPath, files.join('\n'), 'utf-8');
|
||||
|
||||
async function pull() {
|
||||
await git.fetch('origin', ['--depth', '1']);
|
||||
await git.reset(ResetMode.HARD, ['origin/main']).catch(noop);
|
||||
}
|
||||
|
||||
const remotes = (await git.getRemotes()).map((remote) => remote.name);
|
||||
|
||||
if (!remotes.includes('origin')) {
|
||||
await git.addRemote('origin', `git@github.com:${repository.user}/${repository.name}.git`);
|
||||
}
|
||||
|
||||
await pull()
|
||||
.catch(async () => {
|
||||
await git.remote([
|
||||
'set-url',
|
||||
'origin',
|
||||
`https://github.com/${repository.user}/${repository.name}.git`,
|
||||
]);
|
||||
await pull();
|
||||
})
|
||||
.catch((error) => {
|
||||
throw new GitCheckoutError(error);
|
||||
});
|
||||
|
||||
return tmpDir;
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { backOff } from 'exponential-backoff';
|
||||
import execa from 'execa';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
|
||||
export async function untilContainerReady({
|
||||
containerName,
|
||||
dockerComposeFilePath,
|
||||
signal,
|
||||
log,
|
||||
condition,
|
||||
}: {
|
||||
containerName: string;
|
||||
dockerComposeFilePath: string;
|
||||
signal: AbortSignal;
|
||||
log: ToolingLog;
|
||||
condition: [string, string];
|
||||
}) {
|
||||
async function isContainerReady() {
|
||||
log.debug(`Checking container is ready`);
|
||||
const { stdout: globalScopeContainerName } = await execa.command(
|
||||
`docker compose -f ${dockerComposeFilePath} ps -q ${containerName}`
|
||||
);
|
||||
|
||||
const [field, value] = condition;
|
||||
|
||||
const { stdout } = await execa
|
||||
.command(`docker inspect --format='{{${field}}}' ${globalScopeContainerName}`)
|
||||
.catch((error) => {
|
||||
log.debug(`Error retrieving container status: ${error.stderr.split('\n')[0]}`);
|
||||
throw error;
|
||||
});
|
||||
|
||||
log.debug(`Container status: ${stdout}`);
|
||||
|
||||
if (stdout !== `'${value}'`) {
|
||||
throw new Error(`${containerName} not ${value}: ${stdout}`);
|
||||
}
|
||||
}
|
||||
|
||||
return await backOff(isContainerReady, {
|
||||
delayFirstAttempt: true,
|
||||
startingDelay: 500,
|
||||
jitter: 'full',
|
||||
numOfAttempts: 20,
|
||||
retry: () => {
|
||||
return !signal.aborted;
|
||||
},
|
||||
});
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import Path from 'path';
|
||||
import { load, dump } from 'js-yaml';
|
||||
import { REPO_ROOT } from '@kbn/repo-info';
|
||||
import { writeFile } from './file_utils';
|
||||
|
||||
export async function writeKibanaConfig(
|
||||
cb: (config: Record<string, any>) => Promise<Record<string, any>>
|
||||
): Promise<void> {
|
||||
const configFilePath = Path.join(REPO_ROOT, 'config/kibana.dev.yml');
|
||||
|
||||
const config = (await load(configFilePath)) as Record<string, string>;
|
||||
|
||||
const result = await cb(config);
|
||||
|
||||
const fileContent = dump(result);
|
||||
|
||||
await writeFile(configFilePath, fileContent);
|
||||
}
|
|
@ -17,7 +17,7 @@ import { MakeSchemaFrom } from '@kbn/usage-collection-plugin/server';
|
|||
import { metrics } from '@opentelemetry/api-metrics';
|
||||
import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-grpc';
|
||||
import { MeterProvider, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics-base';
|
||||
import { Resource } from '@opentelemetry/resources';
|
||||
import { resourceFromAttributes } from '@opentelemetry/resources';
|
||||
import { diag, DiagLogger, DiagLogLevel } from '@opentelemetry/api';
|
||||
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions';
|
||||
import * as grpc from '@grpc/grpc-js';
|
||||
|
@ -127,7 +127,7 @@ export class MonitoringCollectionPlugin implements Plugin<MonitoringCollectionSe
|
|||
serviceVersion?: string
|
||||
) {
|
||||
const meterProvider = new MeterProvider({
|
||||
resource: new Resource({
|
||||
resource: resourceFromAttributes({
|
||||
[SemanticResourceAttributes.SERVICE_NAME]: serviceName,
|
||||
[SemanticResourceAttributes.SERVICE_INSTANCE_ID]: serviceInstanceId,
|
||||
[SemanticResourceAttributes.SERVICE_VERSION]: serviceVersion,
|
||||
|
|
|
@ -15,6 +15,8 @@ import {
|
|||
createInferenceRequestError,
|
||||
type ToolOptions,
|
||||
ChatCompleteOptions,
|
||||
getConnectorModel,
|
||||
getConnectorProvider,
|
||||
} from '@kbn/inference-common';
|
||||
import type { PluginStartContract as ActionsPluginStart } from '@kbn/actions-plugin/server';
|
||||
import { getInferenceAdapter } from './adapters';
|
||||
|
@ -26,6 +28,7 @@ import {
|
|||
retryWithExponentialBackoff,
|
||||
getRetryFilter,
|
||||
} from './utils';
|
||||
import { withChatCompleteSpan } from '../tracing/with_chat_complete_span';
|
||||
|
||||
interface CreateChatCompleteApiOptions {
|
||||
request: KibanaRequest;
|
||||
|
@ -57,7 +60,8 @@ export function createChatCompleteApi({ request, actions, logger }: CreateChatCo
|
|||
return await getInferenceExecutor({ connectorId, request, actions });
|
||||
}).pipe(
|
||||
switchMap((executor) => {
|
||||
const connectorType = executor.getConnector().type;
|
||||
const connector = executor.getConnector();
|
||||
const connectorType = connector.type;
|
||||
const inferenceAdapter = getInferenceAdapter(connectorType);
|
||||
|
||||
if (!inferenceAdapter) {
|
||||
|
@ -81,23 +85,36 @@ export function createChatCompleteApi({ request, actions, logger }: CreateChatCo
|
|||
})
|
||||
);
|
||||
|
||||
return inferenceAdapter.chatComplete({
|
||||
system,
|
||||
executor,
|
||||
messages,
|
||||
toolChoice,
|
||||
tools,
|
||||
temperature,
|
||||
logger,
|
||||
functionCalling,
|
||||
modelName,
|
||||
abortSignal,
|
||||
metadata,
|
||||
});
|
||||
}),
|
||||
chunksIntoMessage({
|
||||
toolOptions: { toolChoice, tools },
|
||||
logger,
|
||||
return withChatCompleteSpan(
|
||||
{
|
||||
system,
|
||||
messages,
|
||||
model: getConnectorModel(connector),
|
||||
provider: getConnectorProvider(connector),
|
||||
},
|
||||
() => {
|
||||
return inferenceAdapter
|
||||
.chatComplete({
|
||||
system,
|
||||
executor,
|
||||
messages,
|
||||
toolChoice,
|
||||
tools,
|
||||
temperature,
|
||||
logger,
|
||||
functionCalling,
|
||||
modelName,
|
||||
abortSignal,
|
||||
metadata,
|
||||
})
|
||||
.pipe(
|
||||
chunksIntoMessage({
|
||||
toolOptions: { toolChoice, tools },
|
||||
logger,
|
||||
})
|
||||
);
|
||||
}
|
||||
);
|
||||
}),
|
||||
retryWithExponentialBackoff({
|
||||
maxRetry: maxRetries,
|
||||
|
|
|
@ -7,8 +7,40 @@
|
|||
|
||||
import { schema, type TypeOf } from '@kbn/config-schema';
|
||||
|
||||
export const config = schema.object({
|
||||
const scheduledDelay = schema.conditional(
|
||||
schema.contextRef('dev'),
|
||||
true,
|
||||
schema.number({ defaultValue: 1000 }),
|
||||
schema.number({ defaultValue: 5000 })
|
||||
);
|
||||
|
||||
export const configSchema = schema.object({
|
||||
enabled: schema.boolean({ defaultValue: true }),
|
||||
tracing: schema.maybe(
|
||||
schema.object({
|
||||
exporter: schema.maybe(
|
||||
schema.oneOf([
|
||||
schema.object({
|
||||
langfuse: schema.object({
|
||||
base_url: schema.uri(),
|
||||
public_key: schema.string(),
|
||||
secret_key: schema.string(),
|
||||
scheduled_delay: scheduledDelay,
|
||||
}),
|
||||
}),
|
||||
schema.object({
|
||||
phoenix: schema.object({
|
||||
base_url: schema.string(),
|
||||
public_url: schema.maybe(schema.uri()),
|
||||
project_name: schema.maybe(schema.string()),
|
||||
api_key: schema.maybe(schema.string()),
|
||||
scheduled_delay: scheduledDelay,
|
||||
}),
|
||||
}),
|
||||
])
|
||||
),
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
export type InferenceConfig = TypeOf<typeof config>;
|
||||
export type InferenceConfig = TypeOf<typeof configSchema>;
|
||||
|
|
|
@ -5,8 +5,12 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { PluginInitializer, PluginInitializerContext } from '@kbn/core/server';
|
||||
import type { InferenceConfig } from './config';
|
||||
import type {
|
||||
PluginConfigDescriptor,
|
||||
PluginInitializer,
|
||||
PluginInitializerContext,
|
||||
} from '@kbn/core/server';
|
||||
import { InferenceConfig, configSchema } from './config';
|
||||
import type {
|
||||
InferenceServerSetup,
|
||||
InferenceServerStart,
|
||||
|
@ -18,6 +22,10 @@ import { InferencePlugin } from './plugin';
|
|||
export type { InferenceClient, BoundInferenceClient } from './inference_client';
|
||||
export type { InferenceServerSetup, InferenceServerStart };
|
||||
|
||||
export { withChatCompleteSpan } from './tracing/with_chat_complete_span';
|
||||
export { withInferenceSpan } from './tracing/with_inference_span';
|
||||
export { withExecuteToolSpan } from './tracing/with_execute_tool_span';
|
||||
|
||||
export { naturalLanguageToEsql } from './tasks/nl_to_esql';
|
||||
|
||||
export const plugin: PluginInitializer<
|
||||
|
@ -27,3 +35,7 @@ export const plugin: PluginInitializer<
|
|||
InferenceStartDependencies
|
||||
> = async (pluginInitializerContext: PluginInitializerContext<InferenceConfig>) =>
|
||||
new InferencePlugin(pluginInitializerContext);
|
||||
|
||||
export const config: PluginConfigDescriptor<InferenceConfig> = {
|
||||
schema: configSchema,
|
||||
};
|
||||
|
|
|
@ -23,6 +23,8 @@ import {
|
|||
InferenceSetupDependencies,
|
||||
InferenceStartDependencies,
|
||||
} from './types';
|
||||
import { initLangfuseProcessor } from './tracing/langfuse/init_langfuse_processor';
|
||||
import { initPhoenixProcessor } from './tracing/phoenix/init_phoenix_processor';
|
||||
|
||||
export class InferencePlugin
|
||||
implements
|
||||
|
@ -35,8 +37,27 @@ export class InferencePlugin
|
|||
{
|
||||
private logger: Logger;
|
||||
|
||||
private config: InferenceConfig;
|
||||
|
||||
private shutdownProcessor?: () => Promise<void>;
|
||||
|
||||
constructor(context: PluginInitializerContext<InferenceConfig>) {
|
||||
this.logger = context.logger.get();
|
||||
this.config = context.config.get();
|
||||
|
||||
const exporter = this.config.tracing?.exporter;
|
||||
|
||||
if (exporter && 'langfuse' in exporter) {
|
||||
this.shutdownProcessor = initLangfuseProcessor({
|
||||
logger: this.logger,
|
||||
config: exporter.langfuse,
|
||||
});
|
||||
} else if (exporter && 'phoenix' in exporter) {
|
||||
this.shutdownProcessor = initPhoenixProcessor({
|
||||
logger: this.logger,
|
||||
config: exporter.phoenix,
|
||||
});
|
||||
}
|
||||
}
|
||||
setup(
|
||||
coreSetup: CoreSetup<InferenceStartDependencies, InferenceServerStart>,
|
||||
|
@ -74,4 +95,8 @@ export class InferencePlugin
|
|||
},
|
||||
};
|
||||
}
|
||||
|
||||
async stop() {
|
||||
await this.shutdownProcessor?.();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,8 +5,5 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { trace } from '@opentelemetry/api';
|
||||
|
||||
export function getLangtraceTracer() {
|
||||
return trace.getTracer('langtrace');
|
||||
}
|
||||
export const BAGGAGE_TRACKING_BEACON_KEY = 'kibana.inference.tracing';
|
||||
export const BAGGAGE_TRACKING_BEACON_VALUE = '1';
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Context } from '@opentelemetry/api';
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
|
||||
import {
|
||||
ReadableSpan,
|
||||
SpanProcessor,
|
||||
Span,
|
||||
BatchSpanProcessor,
|
||||
} from '@opentelemetry/sdk-trace-node';
|
||||
import { isInInferenceContext } from './is_in_inference_context';
|
||||
import { IS_ROOT_INFERENCE_SPAN_ATTRIBUTE_NAME } from './root_inference_span';
|
||||
|
||||
export abstract class BaseInferenceSpanProcessor implements SpanProcessor {
|
||||
private delegate: SpanProcessor;
|
||||
|
||||
constructor(exporter: OTLPTraceExporter, scheduledDelayMillis: number) {
|
||||
this.delegate = new BatchSpanProcessor(exporter, {
|
||||
scheduledDelayMillis,
|
||||
});
|
||||
}
|
||||
|
||||
abstract processInferenceSpan(span: ReadableSpan): ReadableSpan;
|
||||
|
||||
onStart(span: Span, parentContext: Context): void {
|
||||
const shouldTrack =
|
||||
isInInferenceContext(parentContext) || span.instrumentationScope.name === 'inference';
|
||||
|
||||
if (shouldTrack) {
|
||||
span.setAttribute('_should_track', true);
|
||||
this.delegate.onStart(span, parentContext);
|
||||
}
|
||||
}
|
||||
|
||||
onEnd(span: ReadableSpan): void {
|
||||
if (span.attributes._should_track) {
|
||||
delete span.attributes._should_track;
|
||||
|
||||
// if this is the "root" inference span, but has a parent,
|
||||
// drop the parent context and Langfuse only shows root spans
|
||||
if (span.attributes[IS_ROOT_INFERENCE_SPAN_ATTRIBUTE_NAME] && span.parentSpanContext) {
|
||||
span = {
|
||||
...span,
|
||||
spanContext: span.spanContext.bind(span),
|
||||
parentSpanContext: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
delete span.attributes[IS_ROOT_INFERENCE_SPAN_ATTRIBUTE_NAME];
|
||||
|
||||
span = this.processInferenceSpan(span);
|
||||
this.delegate.onEnd(span);
|
||||
}
|
||||
}
|
||||
|
||||
forceFlush(): Promise<void> {
|
||||
return this.delegate.forceFlush();
|
||||
}
|
||||
|
||||
shutdown(): Promise<void> {
|
||||
return this.delegate.shutdown();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import apm from 'elastic-apm-node';
|
||||
import { isTracingSuppressed } from '@opentelemetry/core';
|
||||
import { Span, context, propagation, trace } from '@opentelemetry/api';
|
||||
import { BAGGAGE_TRACKING_BEACON_KEY, BAGGAGE_TRACKING_BEACON_VALUE } from './baggage';
|
||||
import { InferenceSpanAttributes } from './with_inference_span';
|
||||
import { IS_ROOT_INFERENCE_SPAN_ATTRIBUTE_NAME } from './root_inference_span';
|
||||
|
||||
export function createActiveInferenceSpan<T>(
|
||||
options: string | (InferenceSpanAttributes & { name: string }),
|
||||
cb: (span?: Span) => T
|
||||
) {
|
||||
const tracer = trace.getTracer('inference');
|
||||
|
||||
const { name, ...attributes } = typeof options === 'string' ? { name: options } : options;
|
||||
|
||||
const currentTransaction = apm.currentTransaction;
|
||||
|
||||
const parentSpan = trace.getActiveSpan();
|
||||
|
||||
const elasticApmTraceId = currentTransaction?.ids['trace.id'];
|
||||
const elasticApmSpanId =
|
||||
apm.currentSpan?.ids['span.id'] ?? currentTransaction?.ids['transaction.id'];
|
||||
|
||||
let parentContext = context.active();
|
||||
|
||||
if (isTracingSuppressed(parentContext)) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
let baggage = propagation.getBaggage(parentContext);
|
||||
|
||||
let isRootInferenceSpan = false;
|
||||
|
||||
if (!baggage) {
|
||||
baggage = propagation.createBaggage({
|
||||
[BAGGAGE_TRACKING_BEACON_KEY]: {
|
||||
value: BAGGAGE_TRACKING_BEACON_VALUE,
|
||||
},
|
||||
});
|
||||
isRootInferenceSpan = true;
|
||||
} else if (
|
||||
baggage.getEntry(BAGGAGE_TRACKING_BEACON_KEY)?.value !== BAGGAGE_TRACKING_BEACON_VALUE
|
||||
) {
|
||||
isRootInferenceSpan = true;
|
||||
baggage = baggage.setEntry(BAGGAGE_TRACKING_BEACON_KEY, {
|
||||
value: BAGGAGE_TRACKING_BEACON_VALUE,
|
||||
});
|
||||
}
|
||||
|
||||
parentContext = propagation.setBaggage(parentContext, baggage);
|
||||
|
||||
if (!parentSpan && elasticApmSpanId && elasticApmTraceId) {
|
||||
parentContext = trace.setSpanContext(parentContext, {
|
||||
spanId: elasticApmSpanId,
|
||||
traceId: elasticApmTraceId,
|
||||
traceFlags: 1,
|
||||
});
|
||||
}
|
||||
|
||||
return tracer.startActiveSpan(
|
||||
name,
|
||||
{
|
||||
attributes: {
|
||||
...attributes,
|
||||
[IS_ROOT_INFERENCE_SPAN_ATTRIBUTE_NAME]: isRootInferenceSpan,
|
||||
},
|
||||
},
|
||||
parentContext,
|
||||
(span) => {
|
||||
return cb(span);
|
||||
}
|
||||
);
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Context, propagation } from '@opentelemetry/api';
|
||||
import { BAGGAGE_TRACKING_BEACON_KEY, BAGGAGE_TRACKING_BEACON_VALUE } from './baggage';
|
||||
|
||||
export function isInInferenceContext(context: Context) {
|
||||
// Only capture if span is part of an inference trace/span
|
||||
// baggage is set in ../create_inference_active_span.ts
|
||||
const baggage = propagation.getBaggage(context);
|
||||
const inInferenceContext =
|
||||
baggage?.getEntry(BAGGAGE_TRACKING_BEACON_KEY)?.value === BAGGAGE_TRACKING_BEACON_VALUE;
|
||||
|
||||
return inInferenceContext;
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { LateBindingSpanProcessor } from '@kbn/tracing';
|
||||
import { InferenceTracingLangfuseExportConfig } from '@kbn/inference-common';
|
||||
import { Logger } from '@kbn/core/server';
|
||||
import { LangfuseSpanProcessor } from './langfuse_span_processor';
|
||||
|
||||
export function initLangfuseProcessor({
|
||||
logger,
|
||||
config,
|
||||
}: {
|
||||
logger: Logger;
|
||||
config: InferenceTracingLangfuseExportConfig;
|
||||
}): () => Promise<void> {
|
||||
const processor = new LangfuseSpanProcessor(logger, config);
|
||||
|
||||
return LateBindingSpanProcessor.register(processor);
|
||||
}
|
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Logger } from '@kbn/core/server';
|
||||
import { InferenceTracingLangfuseExportConfig } from '@kbn/inference-common';
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http';
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-node';
|
||||
import { memoize, omit, partition } from 'lodash';
|
||||
import { BaseInferenceSpanProcessor } from '../base_inference_span_processor';
|
||||
import { unflattenAttributes } from '../util/unflatten_attributes';
|
||||
|
||||
export class LangfuseSpanProcessor extends BaseInferenceSpanProcessor {
|
||||
private getProjectId: () => Promise<string | undefined>;
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly config: InferenceTracingLangfuseExportConfig
|
||||
) {
|
||||
const headers = {
|
||||
Authorization: `Basic ${Buffer.from(`${config.public_key}:${config.secret_key}`).toString(
|
||||
'base64'
|
||||
)}`,
|
||||
};
|
||||
|
||||
const exporter = new OTLPTraceExporter({
|
||||
url: `${config.base_url}/api/public/otel/v1/traces`,
|
||||
headers,
|
||||
});
|
||||
|
||||
super(exporter, config.scheduled_delay);
|
||||
|
||||
const getProjectIdMemoized = memoize(async () => {
|
||||
const base = new URL(config.base_url);
|
||||
|
||||
const { data } = await fetch(new URL('/api/public/projects', base), { headers }).then(
|
||||
(response) => response.json() as Promise<{ data: Array<{ id: string; name: string }> }>
|
||||
);
|
||||
|
||||
return data?.[0]?.id;
|
||||
});
|
||||
|
||||
this.getProjectId = () => {
|
||||
return getProjectIdMemoized().catch((error) => {
|
||||
logger.error(`Could not get project ID from Langfuse: ${error.message}`);
|
||||
getProjectIdMemoized.cache.clear?.();
|
||||
return undefined;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
override processInferenceSpan(span: ReadableSpan): ReadableSpan {
|
||||
// Langfuse doesn't understand fully semconv-compliant span events
|
||||
// yet, so we translate to a format it does understand. see
|
||||
// https://github.com/langfuse/langfuse/blob/c1c22a9b9b684bd45ca9436556c2599d5a23271d/web/src/features/otel/server/index.ts#L476
|
||||
if (span.attributes['gen_ai.operation.name'] === 'chat') {
|
||||
const [inputEvents, outputEvents] = partition(
|
||||
span.events,
|
||||
(event) => event.name !== 'gen_ai.choice'
|
||||
);
|
||||
|
||||
span.attributes['input.value'] = JSON.stringify(
|
||||
inputEvents.map((event) => {
|
||||
return unflattenAttributes(event.attributes ?? {});
|
||||
})
|
||||
);
|
||||
|
||||
span.attributes['output.value'] = JSON.stringify(
|
||||
outputEvents.map((event) => {
|
||||
const { message, ...rest } = unflattenAttributes(event.attributes ?? {});
|
||||
return {
|
||||
...omit(rest, 'finish_reason', 'index'),
|
||||
...message,
|
||||
};
|
||||
})[0]
|
||||
);
|
||||
}
|
||||
|
||||
if (!span.parentSpanContext) {
|
||||
const traceId = span.spanContext().traceId;
|
||||
void this.getProjectId().then((projectId) => {
|
||||
// this is how Langfuse generates IDs, see
|
||||
// https://github.com/langfuse/langfuse/blob/2d4708921c67bca61c774633b7df65b3c5105f0d/web/src/features/otel/server/index.ts#L506
|
||||
const langfuseTraceId = Buffer.from(traceId).toString('hex');
|
||||
const url = new URL(
|
||||
`/project/${projectId}/traces/${langfuseTraceId}`,
|
||||
new URL(this.config.base_url)
|
||||
);
|
||||
this.logger.info(`View trace at ${url.toString()}`);
|
||||
});
|
||||
}
|
||||
|
||||
return span;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
INPUT_MIME_TYPE,
|
||||
INPUT_VALUE,
|
||||
LLM_INPUT_MESSAGES,
|
||||
LLM_INVOCATION_PARAMETERS,
|
||||
LLM_MODEL_NAME,
|
||||
LLM_OUTPUT_MESSAGES,
|
||||
LLM_PROVIDER,
|
||||
LLM_SYSTEM,
|
||||
LLM_TOKEN_COUNT_COMPLETION,
|
||||
LLM_TOKEN_COUNT_PROMPT,
|
||||
LLM_TOKEN_COUNT_TOTAL,
|
||||
MESSAGE_CONTENT,
|
||||
MESSAGE_ROLE,
|
||||
MESSAGE_TOOL_CALLS,
|
||||
MESSAGE_TOOL_CALL_ID,
|
||||
MimeType,
|
||||
OUTPUT_VALUE,
|
||||
SemanticConventions,
|
||||
TOOL_CALL_FUNCTION_ARGUMENTS_JSON,
|
||||
TOOL_CALL_FUNCTION_NAME,
|
||||
TOOL_CALL_ID,
|
||||
} from '@arizeai/openinference-semantic-conventions';
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-base';
|
||||
import { omit, partition } from 'lodash';
|
||||
import { ChoiceEvent, GenAISemanticConventions, MessageEvent } from '../types';
|
||||
import { flattenAttributes } from '../util/flatten_attributes';
|
||||
import { unflattenAttributes } from '../util/unflatten_attributes';
|
||||
|
||||
export function getChatSpan(span: ReadableSpan) {
|
||||
const [inputEvents, outputEvents] = partition(
|
||||
span.events,
|
||||
(event) => event.name !== GenAISemanticConventions.GenAIChoice
|
||||
);
|
||||
|
||||
span.attributes[LLM_MODEL_NAME] = span.attributes[GenAISemanticConventions.GenAIResponseModel];
|
||||
|
||||
span.attributes[INPUT_MIME_TYPE] = MimeType.JSON;
|
||||
span.attributes[LLM_INVOCATION_PARAMETERS] = JSON.stringify({
|
||||
system: inputEvents.find((event) => event.name === GenAISemanticConventions.GenAISystemMessage)
|
||||
?.attributes?.content,
|
||||
});
|
||||
span.attributes[LLM_SYSTEM] = span.attributes[GenAISemanticConventions.GenAISystem];
|
||||
|
||||
span.attributes[LLM_PROVIDER] = span.attributes[GenAISemanticConventions.GenAISystem];
|
||||
|
||||
span.attributes[LLM_TOKEN_COUNT_COMPLETION] =
|
||||
span.attributes[GenAISemanticConventions.GenAIUsageOutputTokens];
|
||||
|
||||
span.attributes[LLM_TOKEN_COUNT_PROMPT] =
|
||||
span.attributes[GenAISemanticConventions.GenAIUsageInputTokens];
|
||||
|
||||
span.attributes[LLM_TOKEN_COUNT_TOTAL] =
|
||||
Number(span.attributes[LLM_TOKEN_COUNT_COMPLETION] ?? 0) +
|
||||
Number(span.attributes[LLM_TOKEN_COUNT_PROMPT] ?? 0);
|
||||
|
||||
span.attributes[INPUT_VALUE] = JSON.stringify(
|
||||
inputEvents.map((event) => {
|
||||
return unflattenAttributes(event.attributes ?? {});
|
||||
})
|
||||
);
|
||||
|
||||
span.attributes[OUTPUT_VALUE] = JSON.stringify(
|
||||
outputEvents.map((event) => {
|
||||
const { message, ...rest } = unflattenAttributes(event.attributes ?? {});
|
||||
return {
|
||||
...omit(rest, 'finish_reason', 'index'),
|
||||
...message,
|
||||
};
|
||||
})[0]
|
||||
);
|
||||
|
||||
const outputUnflattened = unflattenAttributes(
|
||||
outputEvents[0].attributes ?? {}
|
||||
) as ChoiceEvent['body'];
|
||||
|
||||
Object.assign(
|
||||
span.attributes,
|
||||
flattenAttributes({
|
||||
[`${LLM_OUTPUT_MESSAGES}.0`]: {
|
||||
[MESSAGE_ROLE]: 'assistant',
|
||||
[MESSAGE_CONTENT]: outputUnflattened.message.content,
|
||||
[MESSAGE_TOOL_CALLS]: outputUnflattened.message.tool_calls?.map((toolCall) => {
|
||||
return {
|
||||
[TOOL_CALL_ID]: toolCall.id,
|
||||
[TOOL_CALL_FUNCTION_NAME]: toolCall.function.name,
|
||||
[TOOL_CALL_FUNCTION_ARGUMENTS_JSON]: toolCall.function.arguments,
|
||||
};
|
||||
}),
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const messageEvents = inputEvents.filter(
|
||||
(event) =>
|
||||
event.name === GenAISemanticConventions.GenAIAssistantMessage ||
|
||||
event.name === GenAISemanticConventions.GenAIUserMessage ||
|
||||
event.name === GenAISemanticConventions.GenAIToolMessage ||
|
||||
event.name === GenAISemanticConventions.GenAISystemMessage
|
||||
);
|
||||
|
||||
const llmInputMessages: Array<Record<string, any>> = messageEvents.map((message) => {
|
||||
const unflattened = unflattenAttributes(message.attributes ?? {}) as Record<string, any> &
|
||||
Exclude<MessageEvent, ChoiceEvent>['body'];
|
||||
|
||||
const role = unflattened.role;
|
||||
const content = unflattened.content;
|
||||
|
||||
unflattened[SemanticConventions.MESSAGE_ROLE] = role;
|
||||
unflattened[SemanticConventions.MESSAGE_CONTENT] = content ?? '';
|
||||
|
||||
unflattened[MESSAGE_TOOL_CALLS] =
|
||||
role === 'assistant' && 'tool_calls' in unflattened
|
||||
? unflattened.tool_calls?.map((toolCall) => {
|
||||
return {
|
||||
[SemanticConventions.TOOL_CALL_ID]: toolCall.id,
|
||||
[SemanticConventions.TOOL_CALL_FUNCTION_NAME]: toolCall.function.name,
|
||||
[SemanticConventions.TOOL_CALL_FUNCTION_ARGUMENTS_JSON]: toolCall.function.arguments,
|
||||
};
|
||||
})
|
||||
: [];
|
||||
|
||||
if (unflattened.role === 'tool') {
|
||||
unflattened[MESSAGE_TOOL_CALL_ID] = unflattened.id;
|
||||
}
|
||||
|
||||
return unflattened;
|
||||
});
|
||||
|
||||
const flattenedInputMessages = flattenAttributes(
|
||||
Object.fromEntries(
|
||||
llmInputMessages.map((message, index) => {
|
||||
return [`${LLM_INPUT_MESSAGES}.${index}`, message];
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
Object.assign(span.attributes, flattenedInputMessages);
|
||||
|
||||
return span;
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { SemanticConventions } from '@arizeai/openinference-semantic-conventions';
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-base';
|
||||
import { ElasticGenAIAttributes } from '../types';
|
||||
|
||||
export function getExecuteToolSpan(span: ReadableSpan) {
|
||||
span.attributes[SemanticConventions.TOOL_PARAMETERS] =
|
||||
span.attributes[ElasticGenAIAttributes.ToolParameters];
|
||||
span.attributes[SemanticConventions.TOOL_DESCRIPTION] =
|
||||
span.attributes[ElasticGenAIAttributes.ToolDescription];
|
||||
|
||||
return span;
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { LateBindingSpanProcessor } from '@kbn/tracing';
|
||||
import { InferenceTracingPhoenixExportConfig } from '@kbn/inference-common';
|
||||
import { Logger } from '@kbn/core/server';
|
||||
import { PhoenixSpanProcessor } from './phoenix_span_processor';
|
||||
|
||||
export function initPhoenixProcessor({
|
||||
logger,
|
||||
config,
|
||||
}: {
|
||||
logger: Logger;
|
||||
config: InferenceTracingPhoenixExportConfig;
|
||||
}): () => Promise<void> {
|
||||
const processor = new PhoenixSpanProcessor(logger, config);
|
||||
|
||||
return LateBindingSpanProcessor.register(processor);
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-proto';
|
||||
import {
|
||||
OTLPExporterNodeConfigBase,
|
||||
createOtlpNetworkExportDelegate,
|
||||
} from '@opentelemetry/otlp-exporter-base';
|
||||
|
||||
interface Delegate {
|
||||
_serializer: Parameters<typeof createOtlpNetworkExportDelegate>[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* This exporter exists because Phoenix /v1/traces responds with JSON
|
||||
* which is not spec-compliant. It will cause a warning to be logged.
|
||||
*/
|
||||
export class PhoenixProtoExporter extends OTLPTraceExporter {
|
||||
constructor(config?: OTLPExporterNodeConfigBase) {
|
||||
super(config);
|
||||
const serializer = (this as unknown as { _delegate: Delegate })._delegate._serializer;
|
||||
|
||||
const originalDeserializeResponse = serializer.deserializeResponse.bind(serializer);
|
||||
|
||||
serializer.deserializeResponse = (data) => {
|
||||
if (data.toString() === '"{}"') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return originalDeserializeResponse(data);
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Logger } from '@kbn/core/server';
|
||||
import { InferenceTracingPhoenixExportConfig } from '@kbn/inference-common';
|
||||
import { ReadableSpan } from '@opentelemetry/sdk-trace-node';
|
||||
import { memoize } from 'lodash';
|
||||
import {
|
||||
SEMRESATTRS_PROJECT_NAME,
|
||||
SemanticConventions,
|
||||
} from '@arizeai/openinference-semantic-conventions';
|
||||
import { BaseInferenceSpanProcessor } from '../base_inference_span_processor';
|
||||
import { ElasticGenAIAttributes, GenAISemanticConventions } from '../types';
|
||||
import { getChatSpan } from './get_chat_span';
|
||||
import { getExecuteToolSpan } from './get_execute_tool_span';
|
||||
import { PhoenixProtoExporter } from './phoenix_otlp_exporter';
|
||||
|
||||
export class PhoenixSpanProcessor extends BaseInferenceSpanProcessor {
|
||||
private getProjectId: () => Promise<string | undefined>;
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly config: InferenceTracingPhoenixExportConfig
|
||||
) {
|
||||
const headers = {
|
||||
...(config.api_key ? { Authorization: `Bearer ${config.api_key}` } : {}),
|
||||
};
|
||||
|
||||
const exporter = new PhoenixProtoExporter({
|
||||
headers,
|
||||
url: `${config.base_url}/v1/traces`,
|
||||
});
|
||||
|
||||
super(exporter, config.scheduled_delay);
|
||||
|
||||
const getProjectIdMemoized = memoize(async () => {
|
||||
if (!config.public_url) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const base = new URL(config.public_url);
|
||||
|
||||
const { data } = await fetch(new URL('/v1/projects', base), { headers }).then(
|
||||
(response) =>
|
||||
response.json() as Promise<{
|
||||
data: Array<{ id: string; name: string; description: string }>;
|
||||
}>
|
||||
);
|
||||
|
||||
return config.project_name
|
||||
? data.find((item) => item.name === config.project_name)?.id
|
||||
: data[0]?.id;
|
||||
});
|
||||
|
||||
this.getProjectId = () => {
|
||||
return getProjectIdMemoized().catch((error) => {
|
||||
logger.error(`Could not get project ID from Phoenix: ${error.message}`);
|
||||
getProjectIdMemoized.cache.clear?.();
|
||||
return undefined;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
processInferenceSpan(span: ReadableSpan): ReadableSpan {
|
||||
const operationName = span.attributes[GenAISemanticConventions.GenAIOperationName];
|
||||
span.resource.attributes[SEMRESATTRS_PROJECT_NAME] = this.config.project_name ?? 'default';
|
||||
span.attributes[SemanticConventions.OPENINFERENCE_SPAN_KIND] =
|
||||
span.attributes[ElasticGenAIAttributes.InferenceSpanKind];
|
||||
|
||||
if (operationName === 'chat') {
|
||||
span = getChatSpan(span);
|
||||
} else if (operationName === 'execute_tool') {
|
||||
span = getExecuteToolSpan(span);
|
||||
}
|
||||
|
||||
if (!span.parentSpanContext) {
|
||||
const traceId = span.spanContext().traceId;
|
||||
void this.getProjectId().then((projectId) => {
|
||||
if (!projectId || !this.config.public_url) {
|
||||
return;
|
||||
}
|
||||
|
||||
const url = new URL(
|
||||
`/projects/${projectId}/traces/${traceId}?selected`,
|
||||
new URL(this.config.public_url)
|
||||
);
|
||||
this.logger.info(`View trace at ${url.toString()}`);
|
||||
});
|
||||
}
|
||||
return span;
|
||||
}
|
||||
}
|
|
@ -5,11 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export function getLangtraceSpanAttributes() {
|
||||
return {
|
||||
'langtrace.sdk.name': '@langtrase/typescript-sdk',
|
||||
'langtrace.service.type': 'llm',
|
||||
'langtrace.service.version': 'unknown',
|
||||
'langtrace.version': '2.1.0',
|
||||
};
|
||||
}
|
||||
export const IS_ROOT_INFERENCE_SPAN_ATTRIBUTE_NAME = 'kibana.inference.root';
|
139
x-pack/platform/plugins/shared/inference/server/tracing/types.ts
Normal file
139
x-pack/platform/plugins/shared/inference/server/tracing/types.ts
Normal file
|
@ -0,0 +1,139 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Context, Span } from '@opentelemetry/api';
|
||||
|
||||
export enum GenAISemanticConventions {
|
||||
GenAIUsageCost = 'gen_ai.usage.cost',
|
||||
GenAIUsageInputTokens = 'gen_ai.usage.input_tokens',
|
||||
GenAIUsageOutputTokens = 'gen_ai.usage.output_tokens',
|
||||
GenAIOperationName = 'gen_ai.operation.name',
|
||||
GenAIResponseModel = 'gen_ai.response.model',
|
||||
GenAISystem = 'gen_ai.system',
|
||||
GenAIOutputType = 'gen_ai.output.type',
|
||||
GenAIToolCallId = 'gen_ai.tool.call.id',
|
||||
GenAIToolName = 'gen_ai.tool.name',
|
||||
GenAISystemMessage = 'gen_ai.system.message',
|
||||
GenAIUserMessage = 'gen_ai.user.message',
|
||||
GenAIAssistantMessage = 'gen_ai.assistant.message',
|
||||
GenAIToolMessage = 'gen_ai.tool.message',
|
||||
GenAIChoice = 'gen_ai.choice',
|
||||
}
|
||||
|
||||
export enum ElasticGenAIAttributes {
|
||||
ToolDescription = 'elastic.tool.description',
|
||||
ToolParameters = 'elastic.tool.parameters',
|
||||
InferenceSpanKind = 'elastic.inference.span.kind',
|
||||
}
|
||||
|
||||
export interface GenAISemConvAttributes {
|
||||
[GenAISemanticConventions.GenAIUsageCost]?: number;
|
||||
[GenAISemanticConventions.GenAIUsageInputTokens]?: number;
|
||||
[GenAISemanticConventions.GenAIUsageOutputTokens]?: number;
|
||||
[GenAISemanticConventions.GenAIOperationName]?: 'chat' | 'execute_tool';
|
||||
[GenAISemanticConventions.GenAIResponseModel]?: string;
|
||||
[GenAISemanticConventions.GenAISystem]?: string;
|
||||
'error.type'?: string;
|
||||
[GenAISemanticConventions.GenAIOutputType]?: 'text' | 'image' | 'json';
|
||||
[GenAISemanticConventions.GenAIToolCallId]?: string;
|
||||
[GenAISemanticConventions.GenAIToolName]?: string;
|
||||
'input.value'?: any;
|
||||
'output.value'?: any;
|
||||
[ElasticGenAIAttributes.InferenceSpanKind]?: 'CHAIN' | 'LLM' | 'TOOL';
|
||||
[ElasticGenAIAttributes.ToolDescription]?: string;
|
||||
[ElasticGenAIAttributes.ToolParameters]?: string;
|
||||
}
|
||||
|
||||
interface GenAISemConvEvent<
|
||||
TName extends string,
|
||||
TBody extends {},
|
||||
TAttributeName extends keyof GenAISemConvAttributes
|
||||
> {
|
||||
name: TName;
|
||||
body: TBody;
|
||||
attributes?: {
|
||||
[key in TAttributeName]: GenAISemConvAttributes[TAttributeName];
|
||||
};
|
||||
}
|
||||
|
||||
export type SystemMessageEvent = GenAISemConvEvent<
|
||||
GenAISemanticConventions.GenAISystemMessage,
|
||||
{
|
||||
role: 'system';
|
||||
content: string;
|
||||
},
|
||||
GenAISemanticConventions.GenAISystem
|
||||
>;
|
||||
|
||||
export type UserMessageEvent = GenAISemConvEvent<
|
||||
GenAISemanticConventions.GenAIUserMessage,
|
||||
{
|
||||
role: 'user';
|
||||
content: string;
|
||||
},
|
||||
GenAISemanticConventions.GenAISystem
|
||||
>;
|
||||
|
||||
export type AssistantMessageEvent = GenAISemConvEvent<
|
||||
GenAISemanticConventions.GenAIAssistantMessage,
|
||||
{
|
||||
content?: unknown;
|
||||
role: 'assistant';
|
||||
tool_calls?: Array<{
|
||||
function: {
|
||||
arguments: string;
|
||||
name: string;
|
||||
};
|
||||
id: string;
|
||||
type: 'function';
|
||||
}>;
|
||||
},
|
||||
GenAISemanticConventions.GenAISystem
|
||||
>;
|
||||
|
||||
export type ToolMessageEvent = GenAISemConvEvent<
|
||||
GenAISemanticConventions.GenAIToolMessage,
|
||||
{
|
||||
content?: string;
|
||||
id: string;
|
||||
role: 'tool' | 'function';
|
||||
},
|
||||
GenAISemanticConventions.GenAISystem
|
||||
>;
|
||||
|
||||
export type ChoiceEvent = GenAISemConvEvent<
|
||||
GenAISemanticConventions.GenAIChoice,
|
||||
{
|
||||
index: number;
|
||||
finish_reason: 'stop' | 'tool_calls';
|
||||
message: {
|
||||
content?: string | null;
|
||||
role: 'assistant';
|
||||
tool_calls?: Array<{
|
||||
function: {
|
||||
name: string;
|
||||
arguments: string;
|
||||
};
|
||||
id: string;
|
||||
type: 'function';
|
||||
}>;
|
||||
};
|
||||
},
|
||||
GenAISemanticConventions.GenAISystem
|
||||
>;
|
||||
|
||||
export type MessageEvent =
|
||||
| SystemMessageEvent
|
||||
| UserMessageEvent
|
||||
| AssistantMessageEvent
|
||||
| ToolMessageEvent
|
||||
| ChoiceEvent;
|
||||
|
||||
export interface InferenceSpanInit {
|
||||
span: Span;
|
||||
context: Context;
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { AttributeValue } from '@opentelemetry/api';
|
||||
import { isArray, isPlainObject } from 'lodash';
|
||||
|
||||
export function flattenAttributes(
|
||||
obj: Record<string, any>,
|
||||
parentKey: string = ''
|
||||
): Record<string, AttributeValue> {
|
||||
const result: Record<string, AttributeValue> = {};
|
||||
|
||||
for (const key in obj) {
|
||||
if (Object.hasOwn(obj, key)) {
|
||||
const value = obj[key];
|
||||
const newKey = parentKey ? `${parentKey}.${key}` : key;
|
||||
if (isPlainObject(value) || isArray(value)) {
|
||||
Object.assign(result, flattenAttributes(value, newKey));
|
||||
} else {
|
||||
result[newKey] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { set } from '@kbn/safer-lodash-set';
|
||||
import { AttributeValue } from '@opentelemetry/api';
|
||||
|
||||
export function unflattenAttributes(
|
||||
flat: Record<string, AttributeValue | undefined>
|
||||
): Record<string, any> {
|
||||
const result: Record<string, any> = {};
|
||||
|
||||
for (const key in flat) {
|
||||
if (Object.hasOwn(flat, key)) {
|
||||
// split on dot; numeric segments cause array creation
|
||||
set(result, key.split('.'), flat[key]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
|
@ -0,0 +1,224 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
AssistantMessage,
|
||||
ChatCompleteCompositeResponse,
|
||||
Message,
|
||||
MessageRole,
|
||||
ToolCall,
|
||||
ToolMessage,
|
||||
ToolOptions,
|
||||
UserMessage,
|
||||
isChatCompletionMessageEvent,
|
||||
isChatCompletionTokenCountEvent,
|
||||
} from '@kbn/inference-common';
|
||||
import { Span } from '@opentelemetry/api';
|
||||
import { isObservable, tap } from 'rxjs';
|
||||
import { isPromise } from 'util/types';
|
||||
import { withInferenceSpan } from './with_inference_span';
|
||||
import {
|
||||
AssistantMessageEvent,
|
||||
ChoiceEvent,
|
||||
ElasticGenAIAttributes,
|
||||
GenAISemConvAttributes,
|
||||
GenAISemanticConventions,
|
||||
MessageEvent,
|
||||
SystemMessageEvent,
|
||||
ToolMessageEvent,
|
||||
UserMessageEvent,
|
||||
} from './types';
|
||||
import { flattenAttributes } from './util/flatten_attributes';
|
||||
|
||||
function addEvent(span: Span, event: MessageEvent) {
|
||||
const flattened = flattenAttributes(event.body);
|
||||
return span.addEvent(event.name, {
|
||||
...flattened,
|
||||
...event.attributes,
|
||||
});
|
||||
}
|
||||
|
||||
function setChoice(span: Span, { content, toolCalls }: { content: string; toolCalls: ToolCall[] }) {
|
||||
addEvent(span, {
|
||||
name: GenAISemanticConventions.GenAIChoice,
|
||||
body: {
|
||||
finish_reason: toolCalls.length ? 'tool_calls' : 'stop',
|
||||
index: 0,
|
||||
message: {
|
||||
...mapAssistantResponse({ content, toolCalls }),
|
||||
},
|
||||
},
|
||||
} satisfies ChoiceEvent);
|
||||
}
|
||||
|
||||
function setTokens(span: Span, { prompt, completion }: { prompt: number; completion: number }) {
|
||||
span.setAttributes({
|
||||
[GenAISemanticConventions.GenAIUsageInputTokens]: prompt,
|
||||
[GenAISemanticConventions.GenAIUsageOutputTokens]: completion,
|
||||
} satisfies GenAISemConvAttributes);
|
||||
}
|
||||
|
||||
interface InferenceGenerationOptions {
|
||||
provider?: string;
|
||||
model?: string;
|
||||
system?: string;
|
||||
messages: Message[];
|
||||
}
|
||||
|
||||
function getUserMessageEvent(message: UserMessage): UserMessageEvent {
|
||||
return {
|
||||
name: GenAISemanticConventions.GenAIUserMessage,
|
||||
body: {
|
||||
content:
|
||||
typeof message.content === 'string' ? message.content : JSON.stringify(message.content),
|
||||
role: 'user',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getAssistantMessageEvent(message: AssistantMessage): AssistantMessageEvent {
|
||||
return {
|
||||
name: GenAISemanticConventions.GenAIAssistantMessage,
|
||||
body: mapAssistantResponse({
|
||||
content: message.content,
|
||||
toolCalls: message.toolCalls,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
function getToolMessageEvent(message: ToolMessage): ToolMessageEvent {
|
||||
return {
|
||||
name: GenAISemanticConventions.GenAIToolMessage,
|
||||
body: {
|
||||
role: 'tool',
|
||||
id: message.toolCallId,
|
||||
content:
|
||||
typeof message.response === 'string' ? message.response : JSON.stringify(message.response),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function mapAssistantResponse({
|
||||
content,
|
||||
toolCalls,
|
||||
}: {
|
||||
content?: string | null;
|
||||
toolCalls?: ToolCall[];
|
||||
}) {
|
||||
return {
|
||||
content: content || null,
|
||||
role: 'assistant' as const,
|
||||
tool_calls: toolCalls?.map((toolCall) => {
|
||||
return {
|
||||
function: {
|
||||
name: toolCall.function.name,
|
||||
arguments: JSON.stringify(
|
||||
'arguments' in toolCall.function ? toolCall.function.arguments : {}
|
||||
),
|
||||
},
|
||||
id: toolCall.toolCallId,
|
||||
type: 'function' as const,
|
||||
};
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper around {@link withInferenceSpan} that sets the right attributes for a chat operation span.
|
||||
* @param options
|
||||
* @param cb
|
||||
*/
|
||||
export function withChatCompleteSpan<T extends ChatCompleteCompositeResponse<ToolOptions, boolean>>(
|
||||
options: InferenceGenerationOptions,
|
||||
cb: (span?: Span) => T
|
||||
): T;
|
||||
|
||||
export function withChatCompleteSpan(
|
||||
options: InferenceGenerationOptions,
|
||||
cb: (span?: Span) => ChatCompleteCompositeResponse<ToolOptions, boolean>
|
||||
): ChatCompleteCompositeResponse<ToolOptions, boolean> {
|
||||
const { system, messages, model, provider, ...attributes } = options;
|
||||
|
||||
const next = withInferenceSpan(
|
||||
{
|
||||
name: 'chatComplete',
|
||||
...attributes,
|
||||
[GenAISemanticConventions.GenAIOperationName]: 'chat',
|
||||
[GenAISemanticConventions.GenAIResponseModel]: model ?? 'unknown',
|
||||
[GenAISemanticConventions.GenAISystem]: provider ?? 'unknown',
|
||||
[ElasticGenAIAttributes.InferenceSpanKind]: 'LLM',
|
||||
},
|
||||
(span) => {
|
||||
if (!span) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
if (system) {
|
||||
addEvent(span, {
|
||||
name: GenAISemanticConventions.GenAISystemMessage,
|
||||
body: {
|
||||
content: system,
|
||||
role: 'system',
|
||||
},
|
||||
} satisfies SystemMessageEvent);
|
||||
}
|
||||
|
||||
messages
|
||||
.map((message) => {
|
||||
switch (message.role) {
|
||||
case MessageRole.User:
|
||||
return getUserMessageEvent(message);
|
||||
|
||||
case MessageRole.Assistant:
|
||||
return getAssistantMessageEvent(message);
|
||||
|
||||
case MessageRole.Tool:
|
||||
return getToolMessageEvent(message);
|
||||
}
|
||||
})
|
||||
.forEach((event) => {
|
||||
addEvent(span, event);
|
||||
});
|
||||
|
||||
const result = cb();
|
||||
|
||||
if (isObservable(result)) {
|
||||
return result.pipe(
|
||||
tap({
|
||||
next: (value) => {
|
||||
if (isChatCompletionMessageEvent(value)) {
|
||||
setChoice(span, {
|
||||
content: value.content,
|
||||
toolCalls: value.toolCalls,
|
||||
});
|
||||
} else if (isChatCompletionTokenCountEvent(value)) {
|
||||
setTokens(span, value.tokens);
|
||||
}
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (isPromise(result)) {
|
||||
return result.then((value) => {
|
||||
setChoice(span, {
|
||||
content: value.content,
|
||||
toolCalls: value.toolCalls,
|
||||
});
|
||||
if (value.tokens) {
|
||||
setTokens(span, value.tokens);
|
||||
}
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
);
|
||||
|
||||
return next;
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Span } from '@opentelemetry/api';
|
||||
import { isPromise } from 'util/types';
|
||||
import { safeJsonStringify } from '@kbn/std';
|
||||
import { withInferenceSpan } from './with_inference_span';
|
||||
import { ElasticGenAIAttributes, GenAISemanticConventions } from './types';
|
||||
|
||||
/**
|
||||
* Wrapper around {@link withInferenceSpan} that sets the right attributes for a execute_tool operation span.
|
||||
* @param options
|
||||
* @param cb
|
||||
*/
|
||||
export function withExecuteToolSpan<T>(
|
||||
options: string | { name: string; description?: string; toolCallId?: string; input?: unknown },
|
||||
cb: (span?: Span) => T
|
||||
): T {
|
||||
const { name, description, toolCallId, input } =
|
||||
typeof options === 'string'
|
||||
? { name: options, description: undefined, toolCallId: undefined, input: undefined }
|
||||
: options;
|
||||
|
||||
return withInferenceSpan(
|
||||
{
|
||||
name: `execute_tool ${name}`,
|
||||
[GenAISemanticConventions.GenAIToolName]: name,
|
||||
[GenAISemanticConventions.GenAIOperationName]: 'execute_tool',
|
||||
[GenAISemanticConventions.GenAIToolCallId]: toolCallId,
|
||||
[ElasticGenAIAttributes.InferenceSpanKind]: 'TOOL',
|
||||
[ElasticGenAIAttributes.ToolDescription]: description,
|
||||
[ElasticGenAIAttributes.ToolParameters]: safeJsonStringify(input),
|
||||
},
|
||||
(span) => {
|
||||
if (!span) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
const res = cb(span);
|
||||
|
||||
if (isPromise(res)) {
|
||||
res.then(
|
||||
(value) => {
|
||||
const stringified = safeJsonStringify(value);
|
||||
if (stringified) {
|
||||
span.setAttribute('output.value', stringified);
|
||||
}
|
||||
},
|
||||
// if the promise fails, we catch it and noop
|
||||
() => {}
|
||||
);
|
||||
return res;
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
);
|
||||
}
|
|
@ -0,0 +1,134 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Context, Span, SpanStatusCode, context } from '@opentelemetry/api';
|
||||
import { Observable, from, ignoreElements, isObservable, of, switchMap, tap } from 'rxjs';
|
||||
import { isPromise } from 'util/types';
|
||||
import { createActiveInferenceSpan } from './create_inference_active_span';
|
||||
import { GenAISemConvAttributes } from './types';
|
||||
|
||||
export type InferenceSpanAttributes = GenAISemConvAttributes;
|
||||
|
||||
/**
|
||||
* Wraps a callback in an active span. If the callback returns an Observable
|
||||
* or Promise, it will set the span status to the appropriate value when the
|
||||
* async operation completes.
|
||||
* @param options
|
||||
* @param cb
|
||||
*/
|
||||
export function withInferenceSpan<T>(
|
||||
options: string | ({ name: string } & InferenceSpanAttributes),
|
||||
cb: (span?: Span) => T
|
||||
): T {
|
||||
const parentContext = context.active();
|
||||
return createActiveInferenceSpan(options, (span) => {
|
||||
if (!span) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
try {
|
||||
const res = cb(span);
|
||||
if (isObservable(res)) {
|
||||
return withInferenceSpan$(span, parentContext, res) as T;
|
||||
}
|
||||
|
||||
if (isPromise(res)) {
|
||||
return withInferenceSpanPromise(span, res) as T;
|
||||
}
|
||||
|
||||
span.setStatus({ code: SpanStatusCode.OK });
|
||||
span.end();
|
||||
return res;
|
||||
} catch (error) {
|
||||
span.recordException(error);
|
||||
span.setStatus({ code: SpanStatusCode.ERROR, message: error.message });
|
||||
span.end();
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function withInferenceSpan$<T>(
|
||||
span: Span,
|
||||
parentContext: Context,
|
||||
source$: Observable<T>
|
||||
): Observable<T> {
|
||||
const ctx = context.active();
|
||||
|
||||
return new Observable<T>((subscriber) => {
|
||||
// Make sure anything that happens during this callback uses the context
|
||||
// that was active when this function was called
|
||||
const subscription = context.with(ctx, () => {
|
||||
return source$
|
||||
.pipe(
|
||||
tap({
|
||||
next: (value) => {
|
||||
subscriber.next(value);
|
||||
},
|
||||
error: (error) => {
|
||||
// Call span.end() and subscriber.error() in the parent context, to
|
||||
// ensure a span that gets created right after doesn't get created
|
||||
// as a child of this span, but as a child of its parent span.
|
||||
context.with(parentContext, () => {
|
||||
span.recordException(error);
|
||||
span.setStatus({ code: SpanStatusCode.ERROR, message: error.message });
|
||||
span.end();
|
||||
subscriber.error(error);
|
||||
});
|
||||
},
|
||||
}),
|
||||
switchMap((value) => {
|
||||
// unwraps observable -> observable | promise which is a use case for the
|
||||
// Observability AI Assistant in tool calling
|
||||
if (isObservable(value)) {
|
||||
return value;
|
||||
}
|
||||
if (isPromise(value)) {
|
||||
return from(value);
|
||||
}
|
||||
return of(value);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
.subscribe({
|
||||
error: (error) => {
|
||||
context.with(parentContext, () => {
|
||||
span.recordException(error);
|
||||
span.setStatus({ code: SpanStatusCode.ERROR, message: error.message });
|
||||
span.end();
|
||||
subscriber.error(error);
|
||||
});
|
||||
},
|
||||
complete: () => {
|
||||
context.with(parentContext, () => {
|
||||
span.setStatus({
|
||||
code: SpanStatusCode.OK,
|
||||
});
|
||||
span.end();
|
||||
subscriber.complete();
|
||||
});
|
||||
},
|
||||
});
|
||||
});
|
||||
return () => context.with(parentContext, () => subscription.unsubscribe());
|
||||
});
|
||||
}
|
||||
|
||||
function withInferenceSpanPromise<T>(span: Span, promise: Promise<T>): Promise<T> {
|
||||
return promise
|
||||
.then((res) => {
|
||||
span.setStatus({ code: SpanStatusCode.OK });
|
||||
span.end();
|
||||
return res;
|
||||
})
|
||||
.catch((error) => {
|
||||
span.recordException(error);
|
||||
span.setStatus({ code: SpanStatusCode.ERROR, message: error.message });
|
||||
span.end();
|
||||
throw error;
|
||||
});
|
||||
}
|
|
@ -37,6 +37,9 @@
|
|||
"@kbn/field-types",
|
||||
"@kbn/expressions-plugin",
|
||||
"@kbn/inference-langchain",
|
||||
"@kbn/sse-utils-server"
|
||||
"@kbn/sse-utils-server",
|
||||
"@kbn/tracing",
|
||||
"@kbn/safer-lodash-set",
|
||||
"@kbn/std"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import {
|
|||
} from './types';
|
||||
import { registerFunctions } from './functions';
|
||||
import { recallRankingEvent } from './analytics/recall_ranking';
|
||||
import { initLangtrace } from './service/client/instrumentation/init_langtrace';
|
||||
import { aiAssistantCapabilities } from '../common/capabilities';
|
||||
import { runStartupMigrations } from './service/startup_migrations/run_startup_migrations';
|
||||
export class ObservabilityAIAssistantPlugin
|
||||
|
@ -50,7 +49,6 @@ export class ObservabilityAIAssistantPlugin
|
|||
this.isDev = context.env.mode.dev;
|
||||
this.logger = context.logger.get();
|
||||
this.config = context.config.get<ObservabilityAIAssistantConfig>();
|
||||
initLangtrace();
|
||||
}
|
||||
public setup(
|
||||
core: CoreSetup<
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
import { notImplemented } from '@hapi/boom';
|
||||
import { toBooleanRt } from '@kbn/io-ts-utils';
|
||||
import { context as otelContext } from '@opentelemetry/api';
|
||||
import * as t from 'io-ts';
|
||||
import { from, map } from 'rxjs';
|
||||
import { v4 } from 'uuid';
|
||||
|
@ -14,7 +13,6 @@ import { Readable } from 'stream';
|
|||
import { AssistantScope } from '@kbn/ai-assistant-common';
|
||||
import { aiAssistantSimulatedFunctionCalling } from '../..';
|
||||
import { createFunctionResponseMessage } from '../../../common/utils/create_function_response_message';
|
||||
import { LangTracer } from '../../service/client/instrumentation/lang_tracer';
|
||||
import { flushBuffer } from '../../service/util/flush_buffer';
|
||||
import { observableIntoOpenAIStream } from '../../service/util/observable_into_openai_stream';
|
||||
import { observableIntoStream } from '../../service/util/observable_into_stream';
|
||||
|
@ -168,7 +166,6 @@ const chatRoute = createObservabilityAIAssistantServerRoute({
|
|||
}
|
||||
: {}),
|
||||
simulateFunctionCalling,
|
||||
tracer: new LangTracer(otelContext.active()),
|
||||
});
|
||||
|
||||
return observableIntoStream(response$.pipe(flushBuffer(isCloudEnabled)));
|
||||
|
@ -207,7 +204,6 @@ const chatRecallRoute = createObservabilityAIAssistantServerRoute({
|
|||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
signal,
|
||||
tracer: new LangTracer(otelContext.active()),
|
||||
}),
|
||||
context,
|
||||
logger: resources.logger,
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
*/
|
||||
|
||||
import { nonEmptyStringRt, toBooleanRt } from '@kbn/io-ts-utils';
|
||||
import { context as otelContext } from '@opentelemetry/api';
|
||||
import * as t from 'io-ts';
|
||||
import { v4 } from 'uuid';
|
||||
import { FunctionDefinition } from '../../../common/functions/types';
|
||||
|
@ -16,7 +15,6 @@ import { getSystemMessageFromInstructions } from '../../service/util/get_system_
|
|||
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
|
||||
import { assistantScopeType } from '../runtime_types';
|
||||
import { getDatasetInfo } from '../../functions/get_dataset_info';
|
||||
import { LangTracer } from '../../service/client/instrumentation/lang_tracer';
|
||||
|
||||
const getFunctionsRoute = createObservabilityAIAssistantServerRoute({
|
||||
endpoint: 'GET /internal/observability_ai_assistant/functions',
|
||||
|
@ -112,7 +110,6 @@ const functionDatasetInfoRoute = createObservabilityAIAssistantServerRoute({
|
|||
return client.chat(operationName, {
|
||||
...params,
|
||||
stream: true,
|
||||
tracer: new LangTracer(otelContext.active()),
|
||||
connectorId,
|
||||
});
|
||||
},
|
||||
|
|
|
@ -10,7 +10,6 @@ import type { ActionsClient } from '@kbn/actions-plugin/server';
|
|||
import type { CoreSetup, ElasticsearchClient, IUiSettingsClient } from '@kbn/core/server';
|
||||
import type { Logger } from '@kbn/logging';
|
||||
import type { PublicMethodsOf } from '@kbn/utility-types';
|
||||
import { context } from '@opentelemetry/api';
|
||||
import { last, merge, omit } from 'lodash';
|
||||
import {
|
||||
catchError,
|
||||
|
@ -29,7 +28,7 @@ import {
|
|||
} from 'rxjs';
|
||||
import { v4 } from 'uuid';
|
||||
import type { AssistantScope } from '@kbn/ai-assistant-common';
|
||||
import type { InferenceClient } from '@kbn/inference-plugin/server';
|
||||
import { withInferenceSpan, type InferenceClient } from '@kbn/inference-plugin/server';
|
||||
import { ChatCompleteResponse, FunctionCallingMode, ToolChoiceType } from '@kbn/inference-common';
|
||||
import { isLockAcquisitionError } from '@kbn/lock-manager';
|
||||
import { resourceNames } from '..';
|
||||
|
@ -62,7 +61,6 @@ import { getAccessQuery } from '../util/get_access_query';
|
|||
import { getSystemMessageFromInstructions } from '../util/get_system_message_from_instructions';
|
||||
import { failOnNonExistingFunctionCall } from './operators/fail_on_non_existing_function_call';
|
||||
import { getContextFunctionRequestIfNeeded } from './get_context_function_request_if_needed';
|
||||
import { LangTracer } from './instrumentation/lang_tracer';
|
||||
import { continueConversation } from './operators/continue_conversation';
|
||||
import { convertInferenceEventsToStreamingEvents } from './operators/convert_inference_events_to_streaming_events';
|
||||
import { extractMessages } from './operators/extract_messages';
|
||||
|
@ -70,7 +68,6 @@ import { getGeneratedTitle } from './operators/get_generated_title';
|
|||
import { runStartupMigrations } from '../startup_migrations/run_startup_migrations';
|
||||
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
|
||||
import { ObservabilityAIAssistantConfig } from '../../config';
|
||||
import { apmInstrumentation } from './operators/apm_instrumentation';
|
||||
import { waitForKbModel, warmupModel } from '../inference_endpoint';
|
||||
import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base';
|
||||
import { populateMissingSemanticTextFieldWithLock } from '../startup_migrations/populate_missing_semantic_text_fields';
|
||||
|
@ -204,70 +201,68 @@ export class ObservabilityAIAssistantClient {
|
|||
except: string[];
|
||||
};
|
||||
}): Observable<Exclude<StreamingChatResponseEvent, ChatCompletionErrorEvent>> => {
|
||||
return new LangTracer(context.active()).startActiveSpan(
|
||||
'complete',
|
||||
({ tracer: completeTracer }) => {
|
||||
const isConversationUpdate = persist && !!predefinedConversationId;
|
||||
return withInferenceSpan('run_tools', () => {
|
||||
const isConversationUpdate = persist && !!predefinedConversationId;
|
||||
|
||||
const conversationId = persist ? predefinedConversationId || v4() : '';
|
||||
const conversationId = persist ? predefinedConversationId || v4() : '';
|
||||
|
||||
if (persist && !isConversationUpdate && kibanaPublicUrl) {
|
||||
functionClient.registerInstruction(
|
||||
`This conversation will be persisted in Kibana and available at this url: ${
|
||||
kibanaPublicUrl + `/app/observabilityAIAssistant/conversations/${conversationId}`
|
||||
}.`
|
||||
);
|
||||
}
|
||||
|
||||
const kbUserInstructions$ = from(this.getKnowledgeBaseUserInstructions()).pipe(
|
||||
shareReplay()
|
||||
if (persist && !isConversationUpdate && kibanaPublicUrl) {
|
||||
functionClient.registerInstruction(
|
||||
`This conversation will be persisted in Kibana and available at this url: ${
|
||||
kibanaPublicUrl + `/app/observabilityAIAssistant/conversations/${conversationId}`
|
||||
}.`
|
||||
);
|
||||
}
|
||||
|
||||
// if it is:
|
||||
// - a new conversation
|
||||
// - no predefined title is given
|
||||
// - we need to store the conversation
|
||||
// we generate a title
|
||||
// if not, we complete with an empty string
|
||||
const title$ =
|
||||
predefinedTitle || isConversationUpdate || !persist
|
||||
? of(predefinedTitle || '').pipe(shareReplay())
|
||||
: getGeneratedTitle({
|
||||
messages: initialMessages,
|
||||
logger: this.dependencies.logger,
|
||||
chat: (name, chatParams) =>
|
||||
const kbUserInstructions$ = from(this.getKnowledgeBaseUserInstructions()).pipe(shareReplay());
|
||||
|
||||
// if it is:
|
||||
// - a new conversation
|
||||
// - no predefined title is given
|
||||
// - we need to store the conversation
|
||||
// we generate a title
|
||||
// if not, we complete with an empty string
|
||||
const title$ =
|
||||
predefinedTitle || isConversationUpdate || !persist
|
||||
? of(predefinedTitle || '').pipe(shareReplay())
|
||||
: getGeneratedTitle({
|
||||
messages: initialMessages,
|
||||
logger: this.dependencies.logger,
|
||||
chat: (name, chatParams) =>
|
||||
withInferenceSpan('get_title', () =>
|
||||
this.chat(name, {
|
||||
...chatParams,
|
||||
simulateFunctionCalling,
|
||||
connectorId,
|
||||
signal,
|
||||
stream: false,
|
||||
}),
|
||||
tracer: completeTracer,
|
||||
}).pipe(shareReplay());
|
||||
})
|
||||
),
|
||||
}).pipe(shareReplay());
|
||||
|
||||
const systemMessage$ = kbUserInstructions$.pipe(
|
||||
map((kbUserInstructions) => {
|
||||
return getSystemMessageFromInstructions({
|
||||
applicationInstructions: functionClient.getInstructions(),
|
||||
kbUserInstructions,
|
||||
apiUserInstructions,
|
||||
availableFunctionNames: functionClient.getFunctions().map((fn) => fn.definition.name),
|
||||
});
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
const systemMessage$ = kbUserInstructions$.pipe(
|
||||
map((kbUserInstructions) => {
|
||||
return getSystemMessageFromInstructions({
|
||||
applicationInstructions: functionClient.getInstructions(),
|
||||
kbUserInstructions,
|
||||
apiUserInstructions,
|
||||
availableFunctionNames: functionClient.getFunctions().map((fn) => fn.definition.name),
|
||||
});
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
|
||||
// we continue the conversation here, after resolving both the materialized
|
||||
// messages and the knowledge base instructions
|
||||
const nextEvents$ = forkJoin([systemMessage$, kbUserInstructions$]).pipe(
|
||||
switchMap(([systemMessage, kbUserInstructions]) => {
|
||||
// if needed, inject a context function request here
|
||||
const contextRequest = functionClient.hasFunction(CONTEXT_FUNCTION_NAME)
|
||||
? getContextFunctionRequestIfNeeded(initialMessages)
|
||||
: undefined;
|
||||
// we continue the conversation here, after resolving both the materialized
|
||||
// messages and the knowledge base instructions
|
||||
const nextEvents$ = forkJoin([systemMessage$, kbUserInstructions$]).pipe(
|
||||
switchMap(([systemMessage, kbUserInstructions]) => {
|
||||
// if needed, inject a context function request here
|
||||
const contextRequest = functionClient.hasFunction(CONTEXT_FUNCTION_NAME)
|
||||
? getContextFunctionRequestIfNeeded(initialMessages)
|
||||
: undefined;
|
||||
|
||||
return mergeOperator(
|
||||
return withInferenceSpan('run_tools', () =>
|
||||
mergeOperator(
|
||||
// if we have added a context function request, also emit
|
||||
// the messageAdd event for it, so we can notify the consumer
|
||||
// and add it to the conversation
|
||||
|
@ -293,151 +288,149 @@ export class ObservabilityAIAssistantClient {
|
|||
signal,
|
||||
logger: this.dependencies.logger,
|
||||
disableFunctions,
|
||||
tracer: completeTracer,
|
||||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
})
|
||||
)
|
||||
);
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
|
||||
const conversationWithMetaFields$ = from(
|
||||
this.getConversationWithMetaFields(conversationId)
|
||||
).pipe(
|
||||
switchMap((conversation) => {
|
||||
if (isConversationUpdate && !conversation) {
|
||||
return throwError(() => createConversationNotFoundError());
|
||||
}
|
||||
|
||||
if (conversation?._source && !this.isConversationOwnedByUser(conversation._source)) {
|
||||
return throwError(
|
||||
() => new Error('Cannot update conversation that is not owned by the user')
|
||||
);
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
}
|
||||
|
||||
const conversationWithMetaFields$ = from(
|
||||
this.getConversationWithMetaFields(conversationId)
|
||||
).pipe(
|
||||
switchMap((conversation) => {
|
||||
if (isConversationUpdate && !conversation) {
|
||||
return throwError(() => createConversationNotFoundError());
|
||||
}
|
||||
return of(conversation);
|
||||
})
|
||||
);
|
||||
|
||||
if (conversation?._source && !this.isConversationOwnedByUser(conversation._source)) {
|
||||
return throwError(
|
||||
() => new Error('Cannot update conversation that is not owned by the user')
|
||||
);
|
||||
}
|
||||
const output$ = conversationWithMetaFields$.pipe(
|
||||
switchMap((conversation) => {
|
||||
return mergeOperator(
|
||||
// get all the events from continuing the conversation
|
||||
nextEvents$,
|
||||
// wait until all dependencies have completed
|
||||
forkJoin([
|
||||
// get just the new messages
|
||||
nextEvents$.pipe(extractMessages()),
|
||||
// get just the title, and drop the token count events
|
||||
title$.pipe(filter((value): value is string => typeof value === 'string')),
|
||||
systemMessage$,
|
||||
]).pipe(
|
||||
switchMap(([addedMessages, title, systemMessage]) => {
|
||||
const initialMessagesWithAddedMessages = initialMessages.concat(addedMessages);
|
||||
|
||||
return of(conversation);
|
||||
})
|
||||
);
|
||||
const lastMessage = last(initialMessagesWithAddedMessages);
|
||||
|
||||
const output$ = conversationWithMetaFields$.pipe(
|
||||
switchMap((conversation) => {
|
||||
return mergeOperator(
|
||||
// get all the events from continuing the conversation
|
||||
nextEvents$,
|
||||
// wait until all dependencies have completed
|
||||
forkJoin([
|
||||
// get just the new messages
|
||||
nextEvents$.pipe(extractMessages()),
|
||||
// get just the title, and drop the token count events
|
||||
title$.pipe(filter((value): value is string => typeof value === 'string')),
|
||||
systemMessage$,
|
||||
]).pipe(
|
||||
switchMap(([addedMessages, title, systemMessage]) => {
|
||||
const initialMessagesWithAddedMessages = initialMessages.concat(addedMessages);
|
||||
// if a function request is at the very end, close the stream to consumer
|
||||
// without persisting or updating the conversation. we need to wait
|
||||
// on the function response to have a valid conversation
|
||||
const isFunctionRequest = !!lastMessage?.message.function_call?.name;
|
||||
|
||||
const lastMessage = last(initialMessagesWithAddedMessages);
|
||||
|
||||
// if a function request is at the very end, close the stream to consumer
|
||||
// without persisting or updating the conversation. we need to wait
|
||||
// on the function response to have a valid conversation
|
||||
const isFunctionRequest = !!lastMessage?.message.function_call?.name;
|
||||
|
||||
if (!persist || isFunctionRequest) {
|
||||
return of();
|
||||
}
|
||||
|
||||
if (isConversationUpdate && conversation) {
|
||||
return from(
|
||||
this.update(
|
||||
conversationId,
|
||||
|
||||
merge(
|
||||
{},
|
||||
|
||||
// base conversation without messages
|
||||
omit(conversation._source, 'messages'),
|
||||
|
||||
// update messages and system message
|
||||
{ messages: initialMessagesWithAddedMessages, systemMessage },
|
||||
|
||||
// update title
|
||||
{
|
||||
conversation: {
|
||||
title: title || conversation._source?.conversation.title,
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
).pipe(
|
||||
map((conversationUpdated): ConversationUpdateEvent => {
|
||||
return {
|
||||
conversation: conversationUpdated.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationUpdate,
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
if (!persist || isFunctionRequest) {
|
||||
return of();
|
||||
}
|
||||
|
||||
if (isConversationUpdate && conversation) {
|
||||
return from(
|
||||
this.create({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
conversation: {
|
||||
title,
|
||||
id: conversationId,
|
||||
},
|
||||
public: !!isPublic,
|
||||
labels: {},
|
||||
numeric_labels: {},
|
||||
systemMessage,
|
||||
messages: initialMessagesWithAddedMessages,
|
||||
archived: false,
|
||||
})
|
||||
this.update(
|
||||
conversationId,
|
||||
|
||||
merge(
|
||||
{},
|
||||
|
||||
// base conversation without messages
|
||||
omit(conversation._source, 'messages'),
|
||||
|
||||
// update messages and system message
|
||||
{ messages: initialMessagesWithAddedMessages, systemMessage },
|
||||
|
||||
// update title
|
||||
{
|
||||
conversation: {
|
||||
title: title || conversation._source?.conversation.title,
|
||||
},
|
||||
}
|
||||
)
|
||||
)
|
||||
).pipe(
|
||||
map((conversationCreated): ConversationCreateEvent => {
|
||||
map((conversationUpdated): ConversationUpdateEvent => {
|
||||
return {
|
||||
conversation: conversationCreated.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationCreate,
|
||||
conversation: conversationUpdated.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationUpdate,
|
||||
};
|
||||
})
|
||||
);
|
||||
})
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return output$.pipe(
|
||||
apmInstrumentation('complete'),
|
||||
catchError((error) => {
|
||||
this.dependencies.logger.error(error);
|
||||
return throwError(() => error);
|
||||
}),
|
||||
tap((event) => {
|
||||
switch (event.type) {
|
||||
case StreamingChatResponseEventType.MessageAdd:
|
||||
this.dependencies.logger.debug(
|
||||
() => `Added message: ${JSON.stringify(event.message)}`
|
||||
return from(
|
||||
this.create({
|
||||
'@timestamp': new Date().toISOString(),
|
||||
conversation: {
|
||||
title,
|
||||
id: conversationId,
|
||||
},
|
||||
public: !!isPublic,
|
||||
labels: {},
|
||||
numeric_labels: {},
|
||||
systemMessage,
|
||||
messages: initialMessagesWithAddedMessages,
|
||||
archived: false,
|
||||
})
|
||||
).pipe(
|
||||
map((conversationCreated): ConversationCreateEvent => {
|
||||
return {
|
||||
conversation: conversationCreated.conversation,
|
||||
type: StreamingChatResponseEventType.ConversationCreate,
|
||||
};
|
||||
})
|
||||
);
|
||||
break;
|
||||
})
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
case StreamingChatResponseEventType.ConversationCreate:
|
||||
this.dependencies.logger.debug(
|
||||
() => `Created conversation: ${JSON.stringify(event.conversation)}`
|
||||
);
|
||||
break;
|
||||
return output$.pipe(
|
||||
catchError((error) => {
|
||||
this.dependencies.logger.error(error);
|
||||
return throwError(() => error);
|
||||
}),
|
||||
tap((event) => {
|
||||
switch (event.type) {
|
||||
case StreamingChatResponseEventType.MessageAdd:
|
||||
this.dependencies.logger.debug(
|
||||
() => `Added message: ${JSON.stringify(event.message)}`
|
||||
);
|
||||
break;
|
||||
|
||||
case StreamingChatResponseEventType.ConversationUpdate:
|
||||
this.dependencies.logger.debug(
|
||||
() => `Updated conversation: ${JSON.stringify(event.conversation)}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
}
|
||||
);
|
||||
case StreamingChatResponseEventType.ConversationCreate:
|
||||
this.dependencies.logger.debug(
|
||||
() => `Created conversation: ${JSON.stringify(event.conversation)}`
|
||||
);
|
||||
break;
|
||||
|
||||
case StreamingChatResponseEventType.ConversationUpdate:
|
||||
this.dependencies.logger.debug(
|
||||
() => `Updated conversation: ${JSON.stringify(event.conversation)}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
}),
|
||||
shareReplay()
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
chat<TStream extends boolean>(
|
||||
|
@ -450,7 +443,6 @@ export class ObservabilityAIAssistantClient {
|
|||
functionCall,
|
||||
signal,
|
||||
simulateFunctionCalling,
|
||||
tracer,
|
||||
stream,
|
||||
}: {
|
||||
systemMessage?: string;
|
||||
|
@ -460,7 +452,6 @@ export class ObservabilityAIAssistantClient {
|
|||
functionCall?: string;
|
||||
signal: AbortSignal;
|
||||
simulateFunctionCalling?: boolean;
|
||||
tracer: LangTracer;
|
||||
stream: TStream;
|
||||
}
|
||||
): TStream extends true
|
||||
|
@ -511,7 +502,6 @@ export class ObservabilityAIAssistantClient {
|
|||
})
|
||||
).pipe(
|
||||
convertInferenceEventsToStreamingEvents(),
|
||||
apmInstrumentation(name),
|
||||
failOnNonExistingFunctionCall({ functions }),
|
||||
tap((event) => {
|
||||
if (event.type === StreamingChatResponseEventType.ChatCompletionChunk) {
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
// import { init } from '@langtrase/typescript-sdk';
|
||||
|
||||
export function initLangtrace() {
|
||||
const apiKey = process.env.LANGTRACE_API_KEY;
|
||||
const apiHost = process.env.LANGTRACE_API_HOST;
|
||||
if (apiKey && apiHost) {
|
||||
// init({
|
||||
// api_host: apiHost,
|
||||
// api_key: apiKey,
|
||||
// write_to_langtrace_cloud: true,
|
||||
// disable_instrumentations: {
|
||||
// only: [],
|
||||
// },
|
||||
// });
|
||||
}
|
||||
}
|
|
@ -1,111 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import {
|
||||
InMemorySpanExporter,
|
||||
BasicTracerProvider,
|
||||
SimpleSpanProcessor,
|
||||
ReadableSpan,
|
||||
} from '@opentelemetry/sdk-trace-base';
|
||||
import { context } from '@opentelemetry/api';
|
||||
import { LangTracer } from './lang_tracer';
|
||||
import { lastValueFrom, of, throwError } from 'rxjs';
|
||||
|
||||
describe('langTracer', () => {
|
||||
const provider = new BasicTracerProvider();
|
||||
const memoryExporter = new InMemorySpanExporter();
|
||||
provider.addSpanProcessor(new SimpleSpanProcessor(memoryExporter));
|
||||
provider.register();
|
||||
|
||||
beforeEach(() => {
|
||||
memoryExporter.reset();
|
||||
});
|
||||
|
||||
describe('when creating a span against an observable', () => {
|
||||
let tracer: LangTracer;
|
||||
|
||||
beforeEach(() => {
|
||||
tracer = new LangTracer(context.active());
|
||||
});
|
||||
|
||||
it('calls the callback with the created span', async () => {
|
||||
const spanCallback = jest.fn().mockImplementation(() => of('my_value'));
|
||||
await lastValueFrom(tracer.startActiveSpan('my_span', spanCallback));
|
||||
|
||||
const { span } = spanCallback.mock.calls[0][0] as {
|
||||
span: ReadableSpan;
|
||||
};
|
||||
|
||||
expect(span.name).toEqual('my_span');
|
||||
|
||||
expect(span.attributes).toEqual({
|
||||
'langtrace.sdk.name': '@langtrase/typescript-sdk',
|
||||
'langtrace.service.type': 'llm',
|
||||
'langtrace.service.version': 'unknown',
|
||||
'langtrace.version': '2.1.0',
|
||||
});
|
||||
|
||||
// OK
|
||||
expect(span.status.code).toBe(1);
|
||||
});
|
||||
|
||||
it('returns the observable', async () => {
|
||||
const spanCallback = jest.fn().mockImplementation(() => of('my_value'));
|
||||
const value = await lastValueFrom(tracer.startActiveSpan('my_span', spanCallback));
|
||||
|
||||
expect(value).toEqual('my_value');
|
||||
});
|
||||
|
||||
it('ends the span with an error status code when the observable', async () => {
|
||||
const spanCallback = jest
|
||||
.fn()
|
||||
.mockImplementation(() => throwError(() => new Error('Unexpected error')));
|
||||
|
||||
const errorHandler = jest.fn();
|
||||
|
||||
await lastValueFrom(tracer.startActiveSpan('my_span', spanCallback)).catch(errorHandler);
|
||||
|
||||
const { span } = spanCallback.mock.calls[0][0] as {
|
||||
span: ReadableSpan;
|
||||
};
|
||||
|
||||
expect(span.status).toEqual({
|
||||
// Error
|
||||
code: 2,
|
||||
message: 'Unexpected error',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when creating a child span', () => {
|
||||
it('sets the first span as the parent of the second span', async () => {
|
||||
const tracer = new LangTracer(context.active());
|
||||
|
||||
const value = await lastValueFrom(
|
||||
tracer.startActiveSpan('parent', ({ span, tracer: nextTracer }) => {
|
||||
return nextTracer.startActiveSpan('child', () => of('my_value'));
|
||||
})
|
||||
);
|
||||
|
||||
expect(value).toEqual('my_value');
|
||||
|
||||
const mappedSpans = memoryExporter.getFinishedSpans().map((span) => ({
|
||||
name: span.name,
|
||||
id: span.spanContext().spanId,
|
||||
parentId: span.parentSpanId,
|
||||
}));
|
||||
|
||||
const parentSpan = mappedSpans.find((span) => span.name === 'parent');
|
||||
const childSpan = mappedSpans.find((span) => span.name === 'child');
|
||||
|
||||
expect(parentSpan).not.toBeUndefined();
|
||||
|
||||
expect(childSpan).not.toBeUndefined();
|
||||
|
||||
expect(childSpan?.parentId).toEqual(parentSpan?.id);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { LLMSpanAttributes } from '@langtrase/trace-attributes';
|
||||
import { Context, Span, SpanKind, SpanStatusCode, trace } from '@opentelemetry/api';
|
||||
import { finalize, Observable, tap } from 'rxjs';
|
||||
import { getLangtraceSpanAttributes } from './get_langtrace_span_attributes';
|
||||
import { getLangtraceTracer } from './get_langtrace_tracer';
|
||||
|
||||
type SpanCallback<T> = ({}: { span: Span; tracer: LangTracer }) => Observable<T>;
|
||||
|
||||
interface Options {
|
||||
attributes?: Partial<LLMSpanAttributes>;
|
||||
kind?: SpanKind;
|
||||
}
|
||||
|
||||
export class LangTracer {
|
||||
private tracer = getLangtraceTracer();
|
||||
|
||||
constructor(private context: Context) {}
|
||||
|
||||
startActiveSpan<T>(name: string, callback: SpanCallback<T>): Observable<T>;
|
||||
startActiveSpan<T>(name: string, options: Options, callback: SpanCallback<T>): Observable<T>;
|
||||
startActiveSpan<T>(
|
||||
name: string,
|
||||
...rest: [Options, SpanCallback<T>] | [SpanCallback<T>]
|
||||
): Observable<T> {
|
||||
let [options, callback] = rest;
|
||||
|
||||
if (typeof options === 'function') {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
const span = this.tracer.startSpan(
|
||||
name,
|
||||
{
|
||||
...options,
|
||||
attributes: {
|
||||
...getLangtraceSpanAttributes(),
|
||||
...(options.attributes || {}),
|
||||
},
|
||||
},
|
||||
this.context
|
||||
);
|
||||
|
||||
const nextContext = trace.setSpan(this.context, span);
|
||||
|
||||
const nextTracer = new LangTracer(nextContext);
|
||||
|
||||
return callback!({ span, tracer: nextTracer }).pipe(
|
||||
tap({
|
||||
error: (error) => {
|
||||
span.recordException(error);
|
||||
span.setStatus({ code: SpanStatusCode.ERROR, message: error.message });
|
||||
span.end();
|
||||
},
|
||||
complete: () => {
|
||||
span.setStatus({ code: SpanStatusCode.OK });
|
||||
span.end();
|
||||
},
|
||||
}),
|
||||
finalize(() => {
|
||||
span.end();
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import apm from 'elastic-apm-node';
|
||||
import {
|
||||
catchError,
|
||||
ignoreElements,
|
||||
merge,
|
||||
OperatorFunction,
|
||||
shareReplay,
|
||||
tap,
|
||||
last,
|
||||
throwError,
|
||||
finalize,
|
||||
} from 'rxjs';
|
||||
import type { StreamingChatResponseEvent } from '../../../../common/conversation_complete';
|
||||
|
||||
export function apmInstrumentation<T extends StreamingChatResponseEvent>(
|
||||
name: string
|
||||
): OperatorFunction<T, T> {
|
||||
return (source$) => {
|
||||
const span = apm.startSpan(name);
|
||||
|
||||
if (!span) {
|
||||
return source$;
|
||||
}
|
||||
span?.addLabels({
|
||||
plugin: 'observability_ai_assistant',
|
||||
});
|
||||
|
||||
const shared$ = source$.pipe(shareReplay());
|
||||
|
||||
return merge(
|
||||
shared$,
|
||||
shared$.pipe(
|
||||
last(),
|
||||
tap(() => {
|
||||
span?.setOutcome('success');
|
||||
}),
|
||||
catchError((error) => {
|
||||
span?.setOutcome('failure');
|
||||
return throwError(() => error);
|
||||
}),
|
||||
finalize(() => {
|
||||
span?.end();
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
|
@ -21,6 +21,7 @@ import {
|
|||
switchMap,
|
||||
throwError,
|
||||
} from 'rxjs';
|
||||
import { withExecuteToolSpan } from '@kbn/inference-plugin/server';
|
||||
import { CONTEXT_FUNCTION_NAME } from '../../../functions/context';
|
||||
import { createFunctionNotFoundError, Message, MessageRole } from '../../../../common';
|
||||
import {
|
||||
|
@ -34,7 +35,6 @@ import { emitWithConcatenatedMessage } from '../../../../common/utils/emit_with_
|
|||
import type { ChatFunctionClient } from '../../chat_function_client';
|
||||
import type { AutoAbortedChatFunction } from '../../types';
|
||||
import { createServerSideFunctionResponseError } from '../../util/create_server_side_function_response_error';
|
||||
import { LangTracer } from '../instrumentation/lang_tracer';
|
||||
import { catchFunctionNotFoundError } from './catch_function_not_found_error';
|
||||
import { extractMessages } from './extract_messages';
|
||||
|
||||
|
@ -48,7 +48,6 @@ function executeFunctionAndCatchError({
|
|||
chat,
|
||||
signal,
|
||||
logger,
|
||||
tracer,
|
||||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
}: {
|
||||
|
@ -59,21 +58,19 @@ function executeFunctionAndCatchError({
|
|||
chat: AutoAbortedChatFunction;
|
||||
signal: AbortSignal;
|
||||
logger: Logger;
|
||||
tracer: LangTracer;
|
||||
connectorId: string;
|
||||
simulateFunctionCalling: boolean;
|
||||
}): Observable<MessageOrChatEvent> {
|
||||
// hide token count events from functions to prevent them from
|
||||
// having to deal with it as well
|
||||
|
||||
return tracer.startActiveSpan(`execute_function ${name}`, ({ tracer: nextTracer }) => {
|
||||
const executeFunctionResponse$ = from(
|
||||
const executeFunctionResponse$ = from(
|
||||
withExecuteToolSpan({ name, input: args }, () =>
|
||||
functionClient.executeFunction({
|
||||
name,
|
||||
chat: (operationName, params) => {
|
||||
return chat(operationName, {
|
||||
...params,
|
||||
tracer: nextTracer,
|
||||
connectorId,
|
||||
});
|
||||
},
|
||||
|
@ -84,48 +81,47 @@ function executeFunctionAndCatchError({
|
|||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
})
|
||||
);
|
||||
)
|
||||
);
|
||||
|
||||
return executeFunctionResponse$.pipe(
|
||||
catchError((error) => {
|
||||
logger.error(`Encountered error running function ${name}: ${JSON.stringify(error)}`);
|
||||
// We want to catch the error only when a promise occurs
|
||||
// if it occurs in the Observable, we cannot easily recover
|
||||
// from it because the function may have already emitted
|
||||
// values which could lead to an invalid conversation state,
|
||||
// so in that case we let the stream fail.
|
||||
return of(createServerSideFunctionResponseError({ name, error }));
|
||||
}),
|
||||
switchMap((response) => {
|
||||
if (isObservable(response)) {
|
||||
return response;
|
||||
}
|
||||
return executeFunctionResponse$.pipe(
|
||||
catchError((error) => {
|
||||
logger.error(`Encountered error running function ${name}: ${JSON.stringify(error)}`);
|
||||
// We want to catch the error only when a promise occurs
|
||||
// if it occurs in the Observable, we cannot easily recover
|
||||
// from it because the function may have already emitted
|
||||
// values which could lead to an invalid conversation state,
|
||||
// so in that case we let the stream fail.
|
||||
return of(createServerSideFunctionResponseError({ name, error }));
|
||||
}),
|
||||
switchMap((response) => {
|
||||
if (isObservable(response)) {
|
||||
return response;
|
||||
}
|
||||
|
||||
// is messageAdd event
|
||||
if ('type' in response) {
|
||||
return of(response);
|
||||
}
|
||||
// is messageAdd event
|
||||
if ('type' in response) {
|
||||
return of(response);
|
||||
}
|
||||
|
||||
const encoded = encode(JSON.stringify(response.content || {}));
|
||||
const encoded = encode(JSON.stringify(response.content || {}));
|
||||
|
||||
const exceededTokenLimit = encoded.length >= MAX_FUNCTION_RESPONSE_TOKEN_COUNT;
|
||||
const exceededTokenLimit = encoded.length >= MAX_FUNCTION_RESPONSE_TOKEN_COUNT;
|
||||
|
||||
return of(
|
||||
createFunctionResponseMessage({
|
||||
name,
|
||||
content: exceededTokenLimit
|
||||
? {
|
||||
message:
|
||||
'Function response exceeded the maximum length allowed and was truncated',
|
||||
truncated: decode(take(encoded, MAX_FUNCTION_RESPONSE_TOKEN_COUNT)),
|
||||
}
|
||||
: response.content,
|
||||
data: response.data,
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
return of(
|
||||
createFunctionResponseMessage({
|
||||
name,
|
||||
content: exceededTokenLimit
|
||||
? {
|
||||
message: 'Function response exceeded the maximum length allowed and was truncated',
|
||||
truncated: decode(take(encoded, MAX_FUNCTION_RESPONSE_TOKEN_COUNT)),
|
||||
}
|
||||
: response.content,
|
||||
data: response.data,
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function getFunctionDefinitions({
|
||||
|
@ -177,7 +173,6 @@ export function continueConversation({
|
|||
kbUserInstructions,
|
||||
logger,
|
||||
disableFunctions,
|
||||
tracer,
|
||||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
}: {
|
||||
|
@ -194,7 +189,6 @@ export function continueConversation({
|
|||
| {
|
||||
except: string[];
|
||||
};
|
||||
tracer: LangTracer;
|
||||
connectorId: string;
|
||||
simulateFunctionCalling: boolean;
|
||||
}): Observable<MessageOrChatEvent> {
|
||||
|
@ -223,7 +217,6 @@ export function continueConversation({
|
|||
return chat(operationName, {
|
||||
messages: initialMessages,
|
||||
functions: definitions,
|
||||
tracer,
|
||||
connectorId,
|
||||
stream: true,
|
||||
}).pipe(emitWithConcatenatedMessage(), catchFunctionNotFoundError(functionLimitExceeded));
|
||||
|
@ -299,7 +292,6 @@ export function continueConversation({
|
|||
messages: initialMessages,
|
||||
signal,
|
||||
logger,
|
||||
tracer,
|
||||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
});
|
||||
|
@ -327,7 +319,6 @@ export function continueConversation({
|
|||
apiUserInstructions,
|
||||
logger,
|
||||
disableFunctions,
|
||||
tracer,
|
||||
connectorId,
|
||||
simulateFunctionCalling,
|
||||
});
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import { filter, lastValueFrom, of, throwError } from 'rxjs';
|
||||
import { ChatCompleteResponse } from '@kbn/inference-common';
|
||||
import { Message, MessageRole } from '../../../../common';
|
||||
import { LangTracer } from '../instrumentation/lang_tracer';
|
||||
import { TITLE_CONVERSATION_FUNCTION_NAME, getGeneratedTitle } from './get_generated_title';
|
||||
|
||||
describe('getGeneratedTitle', () => {
|
||||
|
@ -54,9 +53,6 @@ describe('getGeneratedTitle', () => {
|
|||
error: jest.fn(),
|
||||
},
|
||||
messages,
|
||||
tracer: {
|
||||
startActiveSpan: jest.fn(),
|
||||
} as unknown as LangTracer,
|
||||
...options,
|
||||
});
|
||||
|
||||
|
@ -132,9 +128,6 @@ describe('getGeneratedTitle', () => {
|
|||
chat: chatSpy,
|
||||
logger,
|
||||
messages,
|
||||
tracer: {
|
||||
startActiveSpan: jest.fn(),
|
||||
} as unknown as LangTracer,
|
||||
});
|
||||
|
||||
const title = await lastValueFrom(title$);
|
||||
|
|
|
@ -10,7 +10,6 @@ import { Logger } from '@kbn/logging';
|
|||
import { ChatCompleteResponse } from '@kbn/inference-common';
|
||||
import type { ObservabilityAIAssistantClient } from '..';
|
||||
import { Message, MessageRole } from '../../../../common';
|
||||
import { LangTracer } from '../instrumentation/lang_tracer';
|
||||
|
||||
export const TITLE_CONVERSATION_FUNCTION_NAME = 'title_conversation';
|
||||
export const TITLE_SYSTEM_MESSAGE =
|
||||
|
@ -27,12 +26,10 @@ export function getGeneratedTitle({
|
|||
messages,
|
||||
chat,
|
||||
logger,
|
||||
tracer,
|
||||
}: {
|
||||
messages: Message[];
|
||||
chat: ChatFunctionWithoutConnectorAndTokenCount;
|
||||
logger: Pick<Logger, 'debug' | 'error'>;
|
||||
tracer: LangTracer;
|
||||
}): Observable<string> {
|
||||
return from(
|
||||
chat('generate_title', {
|
||||
|
@ -65,7 +62,6 @@ export function getGeneratedTitle({
|
|||
},
|
||||
],
|
||||
functionCall: TITLE_CONVERSATION_FUNCTION_NAME,
|
||||
tracer,
|
||||
stream: false,
|
||||
})
|
||||
).pipe(
|
||||
|
|
315
yarn.lock
315
yarn.lock
|
@ -138,6 +138,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@appland/sql-parser/-/sql-parser-1.5.1.tgz#331d644364899858ba7aa6e884e2492596990626"
|
||||
integrity sha512-R2FBHUOdzdBPUCCiL6WvXT9Fu+Xaj89exa1g+wMlatIe5z6vqitzLkY5a9zGDL3IByTiwbR0jiYuvFMfhp1Q+Q==
|
||||
|
||||
"@arizeai/openinference-semantic-conventions@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@arizeai/openinference-semantic-conventions/-/openinference-semantic-conventions-1.1.0.tgz#8bb41a4e213295ba9fc21faf1b7d282bf2d898ef"
|
||||
integrity sha512-rxRYnUWjt28DlVXnWukcQAyGhPYQ3ckmKrjEdUjmUNnvvv4k8Dabbp5h6AEjNy7YzN9jL2smNRJnbLIVtkrLEg==
|
||||
|
||||
"@assemblyscript/loader@^0.10.1":
|
||||
version "0.10.1"
|
||||
resolved "https://registry.yarnpkg.com/@assemblyscript/loader/-/loader-0.10.1.tgz#70e45678f06c72fa2e350e8553ec4a4d72b92e06"
|
||||
|
@ -7599,6 +7604,10 @@
|
|||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/telemetry-config@link:src/platform/packages/shared/kbn-telemetry-config":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/telemetry-management-section-plugin@link:src/platform/plugins/shared/telemetry_management_section":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
@ -7615,6 +7624,10 @@
|
|||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/telemetry@link:src/platform/packages/shared/kbn-telemetry":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/test-eui-helpers@link:src/platform/packages/private/kbn-test-eui-helpers":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
@ -7699,6 +7712,10 @@
|
|||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/tracing@link:src/platform/packages/shared/kbn-tracing":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
||||
"@kbn/transform-plugin@link:x-pack/platform/plugins/private/transform":
|
||||
version "0.0.0"
|
||||
uid ""
|
||||
|
@ -8885,6 +8902,13 @@
|
|||
resolved "https://registry.yarnpkg.com/@openfeature/web-sdk/-/web-sdk-1.5.0.tgz#23ed7acc67ff8f67c3d46f686193f19889b8a482"
|
||||
integrity sha512-AK9A4X6vRKQf/OvCue1LKM6thSDqbx/Sf3dHBTZ6p7DfpIKsD8mzCTgMhb5jukVlqwdKMlewU/rlYTYqqfnnTw==
|
||||
|
||||
"@opentelemetry/api-logs@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.200.0.tgz#f9015fd844920c13968715b3cdccf5a4d4ff907e"
|
||||
integrity sha512-IKJBQxh91qJ+3ssRly5hYEJ8NDHu9oY/B1PXVSCWf7zytmYO9RNLB0Ox9XQ/fJ8m6gY6Q6NtBWlmXfaXt5Uc4Q==
|
||||
dependencies:
|
||||
"@opentelemetry/api" "^1.3.0"
|
||||
|
||||
"@opentelemetry/api-logs@0.53.0":
|
||||
version "0.53.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.53.0.tgz#c478cbd8120ec2547b64edfa03a552cfe42170be"
|
||||
|
@ -8899,7 +8923,7 @@
|
|||
dependencies:
|
||||
"@opentelemetry/api" "^1.0.0"
|
||||
|
||||
"@opentelemetry/api@1.9.0", "@opentelemetry/api@1.x", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1":
|
||||
"@opentelemetry/api@1.9.0", "@opentelemetry/api@1.x", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.3.0", "@opentelemetry/api@^1.4.1", "@opentelemetry/api@^1.9.0":
|
||||
version "1.9.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe"
|
||||
integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==
|
||||
|
@ -8909,6 +8933,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@opentelemetry/context-async-hooks/-/context-async-hooks-1.26.0.tgz#fa92f722cf685685334bba95f258d3ef9fce60f6"
|
||||
integrity sha512-HedpXXYzzbaoutw6DFLWLDket2FwLkLpil4hGCZ1xYEIMTcivdfwEOISgdbLEWyG3HW52gTq2V9mOVJrONgiwg==
|
||||
|
||||
"@opentelemetry/context-async-hooks@2.0.0", "@opentelemetry/context-async-hooks@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/context-async-hooks/-/context-async-hooks-2.0.0.tgz#c98a727238ca199cda943780acf6124af8d8cd80"
|
||||
integrity sha512-IEkJGzK1A9v3/EHjXh3s2IiFc6L4jfK+lNgKVgUjeUJQRRhnVFMIO3TAvKwonm9O1HebCuoOt98v8bZW7oVQHA==
|
||||
|
||||
"@opentelemetry/core@1.26.0", "@opentelemetry/core@^1.11.0":
|
||||
version "1.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.26.0.tgz#7d84265aaa850ed0ca5813f97d831155be42b328"
|
||||
|
@ -8930,6 +8959,63 @@
|
|||
dependencies:
|
||||
"@opentelemetry/semantic-conventions" "1.8.0"
|
||||
|
||||
"@opentelemetry/core@2.0.0", "@opentelemetry/core@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-2.0.0.tgz#37e9f0e9ddec4479b267aca6f32d88757c941b3a"
|
||||
integrity sha512-SLX36allrcnVaPYG3R78F/UZZsBsvbc7lMCLx37LyH5MJ1KAAZ2E3mW9OAD3zGz0G8q/BtoS5VUrjzDydhD6LQ==
|
||||
dependencies:
|
||||
"@opentelemetry/semantic-conventions" "^1.29.0"
|
||||
|
||||
"@opentelemetry/exporter-logs-otlp-grpc@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-logs-otlp-grpc/-/exporter-logs-otlp-grpc-0.200.0.tgz#693e0f7041c533061d0689ab43d64d039078ee7a"
|
||||
integrity sha512-+3MDfa5YQPGM3WXxW9kqGD85Q7s9wlEMVNhXXG7tYFLnIeaseUt9YtCeFhEDFzfEktacdFpOtXmJuNW8cHbU5A==
|
||||
dependencies:
|
||||
"@grpc/grpc-js" "^1.7.1"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-grpc-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/sdk-logs" "0.200.0"
|
||||
|
||||
"@opentelemetry/exporter-logs-otlp-http@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-logs-otlp-http/-/exporter-logs-otlp-http-0.200.0.tgz#3a99c9554f871b5c6cddb8716316c125d4edca6c"
|
||||
integrity sha512-KfWw49htbGGp9s8N4KI8EQ9XuqKJ0VG+yVYVYFiCYSjEV32qpQ5qZ9UZBzOZ6xRb+E16SXOSCT3RkqBVSABZ+g==
|
||||
dependencies:
|
||||
"@opentelemetry/api-logs" "0.200.0"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/sdk-logs" "0.200.0"
|
||||
|
||||
"@opentelemetry/exporter-logs-otlp-proto@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-logs-otlp-proto/-/exporter-logs-otlp-proto-0.200.0.tgz#53573ea43bce4129bcb18bda172a95c6535bb1a2"
|
||||
integrity sha512-GmahpUU/55hxfH4TP77ChOfftADsCq/nuri73I/AVLe2s4NIglvTsaACkFVZAVmnXXyPS00Fk3x27WS3yO07zA==
|
||||
dependencies:
|
||||
"@opentelemetry/api-logs" "0.200.0"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-logs" "0.200.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-metrics-otlp-grpc/-/exporter-metrics-otlp-grpc-0.200.0.tgz#f9a4d209083a6a12489c4ae4c20e6923a1780c88"
|
||||
integrity sha512-uHawPRvKIrhqH09GloTuYeq2BjyieYHIpiklOvxm9zhrCL2eRsnI/6g9v2BZTVtGp8tEgIa7rCQ6Ltxw6NBgew==
|
||||
dependencies:
|
||||
"@grpc/grpc-js" "^1.7.1"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/exporter-metrics-otlp-http" "0.200.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-grpc-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-metrics" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc@^0.34.0":
|
||||
version "0.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-metrics-otlp-grpc/-/exporter-metrics-otlp-grpc-0.34.0.tgz#3a84f4e2c21ce5c9dce507ff36715cc2536bfa87"
|
||||
|
@ -8943,6 +9029,17 @@
|
|||
"@opentelemetry/resources" "1.8.0"
|
||||
"@opentelemetry/sdk-metrics" "1.8.0"
|
||||
|
||||
"@opentelemetry/exporter-metrics-otlp-http@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-metrics-otlp-http/-/exporter-metrics-otlp-http-0.200.0.tgz#daa28a2b868bacf02efb153fa8780d078807919e"
|
||||
integrity sha512-5BiR6i8yHc9+qW7F6LqkuUnIzVNA7lt0qRxIKcKT+gq3eGUPHZ3DY29sfxI3tkvnwMgtnHDMNze5DdxW39HsAw==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-metrics" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-metrics-otlp-http@0.34.0":
|
||||
version "0.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-metrics-otlp-http/-/exporter-metrics-otlp-http-0.34.0.tgz#f890a83f695b60719e54492e72bcbfa21d2968ee"
|
||||
|
@ -8954,6 +9051,27 @@
|
|||
"@opentelemetry/resources" "1.8.0"
|
||||
"@opentelemetry/sdk-metrics" "1.8.0"
|
||||
|
||||
"@opentelemetry/exporter-metrics-otlp-proto@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-metrics-otlp-proto/-/exporter-metrics-otlp-proto-0.200.0.tgz#5a494e2df8703be2f1f5f01629dfd48a6d39e5a6"
|
||||
integrity sha512-E+uPj0yyvz81U9pvLZp3oHtFrEzNSqKGVkIViTQY1rH3TOobeJPSpLnTVXACnCwkPR5XeTvPnK3pZ2Kni8AFMg==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/exporter-metrics-otlp-http" "0.200.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-metrics" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-prometheus@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-prometheus/-/exporter-prometheus-0.200.0.tgz#8f3dd3a8903447563a5be30ddf9e7bfb1e7ad127"
|
||||
integrity sha512-ZYdlU9r0USuuYppiDyU2VFRA0kFl855ylnb3N/2aOlXrbA4PMCznen7gmPbetGQu7pz8Jbaf4fwvrDnVdQQXSw==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-metrics" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-prometheus@^0.31.0":
|
||||
version "0.31.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-prometheus/-/exporter-prometheus-0.31.0.tgz#b0696be42542a961ec1145f3754a845efbda942e"
|
||||
|
@ -8963,6 +9081,30 @@
|
|||
"@opentelemetry/core" "1.5.0"
|
||||
"@opentelemetry/sdk-metrics-base" "0.31.0"
|
||||
|
||||
"@opentelemetry/exporter-trace-otlp-grpc@0.200.0", "@opentelemetry/exporter-trace-otlp-grpc@^0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-trace-otlp-grpc/-/exporter-trace-otlp-grpc-0.200.0.tgz#e259367f324c01342bf3f0175c52d9f4e61a345f"
|
||||
integrity sha512-hmeZrUkFl1YMsgukSuHCFPYeF9df0hHoKeHUthRKFCxiURs+GwF1VuabuHmBMZnjTbsuvNjOB+JSs37Csem/5Q==
|
||||
dependencies:
|
||||
"@grpc/grpc-js" "^1.7.1"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-grpc-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-trace-otlp-http@0.200.0", "@opentelemetry/exporter-trace-otlp-http@^0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-trace-otlp-http/-/exporter-trace-otlp-http-0.200.0.tgz#ddf2bbdff5157a89f64aad6dad44c394872d589d"
|
||||
integrity sha512-Goi//m/7ZHeUedxTGVmEzH19NgqJY+Bzr6zXo1Rni1+hwqaksEyJ44gdlEMREu6dzX1DlAaH/qSykSVzdrdafA==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-trace-otlp-http@0.53.0":
|
||||
version "0.53.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-trace-otlp-http/-/exporter-trace-otlp-http-0.53.0.tgz#48e46c4573a35d31c14e6bc44635923e32970b9a"
|
||||
|
@ -8974,6 +9116,46 @@
|
|||
"@opentelemetry/resources" "1.26.0"
|
||||
"@opentelemetry/sdk-trace-base" "1.26.0"
|
||||
|
||||
"@opentelemetry/exporter-trace-otlp-proto@0.200.0", "@opentelemetry/exporter-trace-otlp-proto@^0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-trace-otlp-proto/-/exporter-trace-otlp-proto-0.200.0.tgz#f3f149e6bad8c899c8f1e5c58e5d855ce07f7319"
|
||||
integrity sha512-V9TDSD3PjK1OREw2iT9TUTzNYEVWJk4Nhodzhp9eiz4onDMYmPy3LaGbPv81yIR6dUb/hNp/SIhpiCHwFUq2Vg==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
|
||||
"@opentelemetry/exporter-zipkin@2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-zipkin/-/exporter-zipkin-2.0.0.tgz#6aca658d64f5e8bc079b07ee0a3076c4ca328ec9"
|
||||
integrity sha512-icxaKZ+jZL/NHXX8Aru4HGsrdhK0MLcuRXkX5G5IRmCgoRLw+Br6I/nMVozX2xjGGwV7hw2g+4Slj8K7s4HbVg==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
"@opentelemetry/semantic-conventions" "^1.29.0"
|
||||
|
||||
"@opentelemetry/instrumentation@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/instrumentation/-/instrumentation-0.200.0.tgz#29d1d4f70cbf0cb1ca9f2f78966379b0be96bddc"
|
||||
integrity sha512-pmPlzfJd+vvgaZd/reMsC8RWgTXn2WY1OWT5RT42m3aOn5532TozwXNDhg1vzqJ+jnvmkREcdLr27ebJEQt0Jg==
|
||||
dependencies:
|
||||
"@opentelemetry/api-logs" "0.200.0"
|
||||
"@types/shimmer" "^1.2.0"
|
||||
import-in-the-middle "^1.8.1"
|
||||
require-in-the-middle "^7.1.1"
|
||||
shimmer "^1.2.1"
|
||||
|
||||
"@opentelemetry/otlp-exporter-base@0.200.0", "@opentelemetry/otlp-exporter-base@^0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.200.0.tgz#906bcf2e59815c8ded732d328f6bc060fb7b0459"
|
||||
integrity sha512-IxJgA3FD7q4V6gGq4bnmQM5nTIyMDkoGFGrBrrDjB6onEiq1pafma55V+bHvGYLWvcqbBbRfezr1GED88lacEQ==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
|
||||
"@opentelemetry/otlp-exporter-base@0.34.0":
|
||||
version "0.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.34.0.tgz#c6020b63590d4b8ac3833eda345a6f582fa014b1"
|
||||
|
@ -8989,6 +9171,16 @@
|
|||
"@opentelemetry/core" "1.26.0"
|
||||
"@opentelemetry/otlp-transformer" "0.53.0"
|
||||
|
||||
"@opentelemetry/otlp-grpc-exporter-base@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.200.0.tgz#cfc6cfd4def7d47f84e43d438d75cb463c67bf0d"
|
||||
integrity sha512-CK2S+bFgOZ66Bsu5hlDeOX6cvW5FVtVjFFbWuaJP0ELxJKBB6HlbLZQ2phqz/uLj1cWap5xJr/PsR3iGoB7Vqw==
|
||||
dependencies:
|
||||
"@grpc/grpc-js" "^1.7.1"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.200.0"
|
||||
"@opentelemetry/otlp-transformer" "0.200.0"
|
||||
|
||||
"@opentelemetry/otlp-grpc-exporter-base@0.34.0":
|
||||
version "0.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.34.0.tgz#edc3a9d8449f48e47c63c2f73e2c63c5a2f25102"
|
||||
|
@ -8999,6 +9191,19 @@
|
|||
"@opentelemetry/core" "1.8.0"
|
||||
"@opentelemetry/otlp-exporter-base" "0.34.0"
|
||||
|
||||
"@opentelemetry/otlp-transformer@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-transformer/-/otlp-transformer-0.200.0.tgz#19afb2274554cb74e2d2b7e32a54a7f7d83c8642"
|
||||
integrity sha512-+9YDZbYybOnv7sWzebWOeK6gKyt2XE7iarSyBFkwwnP559pEevKOUD8NyDHhRjCSp13ybh9iVXlMfcj/DwF/yw==
|
||||
dependencies:
|
||||
"@opentelemetry/api-logs" "0.200.0"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-logs" "0.200.0"
|
||||
"@opentelemetry/sdk-metrics" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
protobufjs "^7.3.0"
|
||||
|
||||
"@opentelemetry/otlp-transformer@0.34.0":
|
||||
version "0.34.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-transformer/-/otlp-transformer-0.34.0.tgz#71023706233c7bc6c3cdcf954c749fea9338084c"
|
||||
|
@ -9029,6 +9234,13 @@
|
|||
dependencies:
|
||||
"@opentelemetry/core" "1.26.0"
|
||||
|
||||
"@opentelemetry/propagator-b3@2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-b3/-/propagator-b3-2.0.0.tgz#1b6244ef2d08a70672521a9aff56e485bd607c17"
|
||||
integrity sha512-blx9S2EI49Ycuw6VZq+bkpaIoiJFhsDuvFGhBIoH3vJ5oYjJ2U0s3fAM5jYft99xVIAv6HqoPtlP9gpVA2IZtA==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
|
||||
"@opentelemetry/propagator-jaeger@1.26.0":
|
||||
version "1.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.26.0.tgz#096ac03d754204921cd5a886c77b5c9bd4677cd7"
|
||||
|
@ -9036,7 +9248,14 @@
|
|||
dependencies:
|
||||
"@opentelemetry/core" "1.26.0"
|
||||
|
||||
"@opentelemetry/resources@1.26.0", "@opentelemetry/resources@^1.4.0":
|
||||
"@opentelemetry/propagator-jaeger@2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-jaeger/-/propagator-jaeger-2.0.0.tgz#288d6767dea554db684fd5e144ad8653d83fd2ea"
|
||||
integrity sha512-Mbm/LSFyAtQKP0AQah4AfGgsD+vsZcyreZoQ5okFBk33hU7AquU4TltgyL9dvaO8/Zkoud8/0gEvwfOZ5d7EPA==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
|
||||
"@opentelemetry/resources@1.26.0":
|
||||
version "1.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.26.0.tgz#da4c7366018bd8add1f3aa9c91c6ac59fd503cef"
|
||||
integrity sha512-CPNYchBE7MBecCSVy0HKpUISEeJOniWqcHaAHpmasZ3j9o6V3AyBzhRc90jdmemq0HOxDr6ylhUbDhBqqPpeNw==
|
||||
|
@ -9060,6 +9279,23 @@
|
|||
"@opentelemetry/core" "1.8.0"
|
||||
"@opentelemetry/semantic-conventions" "1.8.0"
|
||||
|
||||
"@opentelemetry/resources@2.0.0", "@opentelemetry/resources@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-2.0.0.tgz#15c04794c32b7d0b3c7589225ece6ae9bba25989"
|
||||
integrity sha512-rnZr6dML2z4IARI4zPGQV4arDikF/9OXZQzrC01dLmn0CZxU5U5OLd/m1T7YkGRj5UitjeoCtg/zorlgMQcdTg==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/semantic-conventions" "^1.29.0"
|
||||
|
||||
"@opentelemetry/sdk-logs@0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-logs/-/sdk-logs-0.200.0.tgz#893d86cefa6f2c02a7cd03d5cb4a959eed3653d1"
|
||||
integrity sha512-VZG870063NLfObmQQNtCVcdXXLzI3vOjjrRENmU37HYiPFa0ZXpXVDsTD02Nh3AT3xYJzQaWKl2X2lQ2l7TWJA==
|
||||
dependencies:
|
||||
"@opentelemetry/api-logs" "0.200.0"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
|
||||
"@opentelemetry/sdk-logs@0.53.0":
|
||||
version "0.53.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-logs/-/sdk-logs-0.53.0.tgz#ec8b69278c4e683c13c58ed4285a47c27f5799c6"
|
||||
|
@ -9096,7 +9332,43 @@
|
|||
"@opentelemetry/resources" "1.8.0"
|
||||
lodash.merge "4.6.2"
|
||||
|
||||
"@opentelemetry/sdk-trace-base@1.26.0", "@opentelemetry/sdk-trace-base@^1.24.0":
|
||||
"@opentelemetry/sdk-metrics@2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-2.0.0.tgz#aba86060bc363c661ca286339c5b04590e298b69"
|
||||
integrity sha512-Bvy8QDjO05umd0+j+gDeWcTaVa1/R2lDj/eOvjzpm8VQj1K1vVZJuyjThpV5/lSHyYW2JaHF2IQ7Z8twJFAhjA==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
|
||||
"@opentelemetry/sdk-node@^0.200.0":
|
||||
version "0.200.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-node/-/sdk-node-0.200.0.tgz#033d0641da628f1537cf7442f41cd77c048923ae"
|
||||
integrity sha512-S/YSy9GIswnhYoDor1RusNkmRughipvTCOQrlF1dzI70yQaf68qgf5WMnzUxdlCl3/et/pvaO75xfPfuEmCK5A==
|
||||
dependencies:
|
||||
"@opentelemetry/api-logs" "0.200.0"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/exporter-logs-otlp-grpc" "0.200.0"
|
||||
"@opentelemetry/exporter-logs-otlp-http" "0.200.0"
|
||||
"@opentelemetry/exporter-logs-otlp-proto" "0.200.0"
|
||||
"@opentelemetry/exporter-metrics-otlp-grpc" "0.200.0"
|
||||
"@opentelemetry/exporter-metrics-otlp-http" "0.200.0"
|
||||
"@opentelemetry/exporter-metrics-otlp-proto" "0.200.0"
|
||||
"@opentelemetry/exporter-prometheus" "0.200.0"
|
||||
"@opentelemetry/exporter-trace-otlp-grpc" "0.200.0"
|
||||
"@opentelemetry/exporter-trace-otlp-http" "0.200.0"
|
||||
"@opentelemetry/exporter-trace-otlp-proto" "0.200.0"
|
||||
"@opentelemetry/exporter-zipkin" "2.0.0"
|
||||
"@opentelemetry/instrumentation" "0.200.0"
|
||||
"@opentelemetry/propagator-b3" "2.0.0"
|
||||
"@opentelemetry/propagator-jaeger" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/sdk-logs" "0.200.0"
|
||||
"@opentelemetry/sdk-metrics" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-node" "2.0.0"
|
||||
"@opentelemetry/semantic-conventions" "^1.29.0"
|
||||
|
||||
"@opentelemetry/sdk-trace-base@1.26.0":
|
||||
version "1.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.26.0.tgz#0c913bc6d2cfafd901de330e4540952269ae579c"
|
||||
integrity sha512-olWQldtvbK4v22ymrKLbIcBi9L2SpMO84sCPY54IVsJhP9fRsxJT194C/AVaAuJzLE30EdhhM1VmvVYR7az+cw==
|
||||
|
@ -9114,6 +9386,15 @@
|
|||
"@opentelemetry/resources" "1.8.0"
|
||||
"@opentelemetry/semantic-conventions" "1.8.0"
|
||||
|
||||
"@opentelemetry/sdk-trace-base@2.0.0", "@opentelemetry/sdk-trace-base@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.0.0.tgz#ebc06ea7537dea62f3882f8236c1234f4faf6b23"
|
||||
integrity sha512-qQnYdX+ZCkonM7tA5iU4fSRsVxbFGml8jbxOgipRGMFHKaXKHQ30js03rTobYjKjIfnOsZSbHKWF0/0v0OQGfw==
|
||||
dependencies:
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/resources" "2.0.0"
|
||||
"@opentelemetry/semantic-conventions" "^1.29.0"
|
||||
|
||||
"@opentelemetry/sdk-trace-node@1.26.0":
|
||||
version "1.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.26.0.tgz#169ef4fc058e82a12460da18cedaf6e4615fc617"
|
||||
|
@ -9126,7 +9407,16 @@
|
|||
"@opentelemetry/sdk-trace-base" "1.26.0"
|
||||
semver "^7.5.2"
|
||||
|
||||
"@opentelemetry/semantic-conventions@1.27.0", "@opentelemetry/semantic-conventions@^1.4.0":
|
||||
"@opentelemetry/sdk-trace-node@2.0.0", "@opentelemetry/sdk-trace-node@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-node/-/sdk-trace-node-2.0.0.tgz#ef9f8ab77ccb41a9c9ff272f6bf4bb6999491f5b"
|
||||
integrity sha512-omdilCZozUjQwY3uZRBwbaRMJ3p09l4t187Lsdf0dGMye9WKD4NGcpgZRvqhI1dwcH6og+YXQEtoO9Wx3ykilg==
|
||||
dependencies:
|
||||
"@opentelemetry/context-async-hooks" "2.0.0"
|
||||
"@opentelemetry/core" "2.0.0"
|
||||
"@opentelemetry/sdk-trace-base" "2.0.0"
|
||||
|
||||
"@opentelemetry/semantic-conventions@1.27.0":
|
||||
version "1.27.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz#1a857dcc95a5ab30122e04417148211e6f945e6c"
|
||||
integrity sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==
|
||||
|
@ -9141,6 +9431,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.8.0.tgz#fe2aa90e6df050a11cd57f5c0f47b0641fd2cad3"
|
||||
integrity sha512-TYh1MRcm4JnvpqtqOwT9WYaBYY4KERHdToxs/suDTLviGRsQkIjS5yYROTYTSJQUnYLOn/TuOh5GoMwfLSU+Ew==
|
||||
|
||||
"@opentelemetry/semantic-conventions@^1.29.0", "@opentelemetry/semantic-conventions@^1.32.0":
|
||||
version "1.32.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.32.0.tgz#a15e8f78f32388a7e4655e7f539570e40958ca3f"
|
||||
integrity sha512-s0OpmpQFSfMrmedAn9Lhg4KWJELHCU6uU9dtIJ28N8UGhf9Y55im5X8fEzwhwDwiSqN+ZPSNrDJF7ivf/AuRPQ==
|
||||
|
||||
"@paralleldrive/cuid2@^2.2.2":
|
||||
version "2.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@paralleldrive/cuid2/-/cuid2-2.2.2.tgz#7f91364d53b89e2c9cb9e02e8dd0f129e834455f"
|
||||
|
@ -12083,6 +12378,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/set-value/-/set-value-4.0.3.tgz#ac7f5f9715c95c7351e02832df672a112428e587"
|
||||
integrity sha512-tSuUcLl6kMzI+l0gG7FZ04xbIcynxNIYgWFj91LPAvRcn7W3L1EveXNdVjqFDgAZPjY1qCOsm8Sb1C70SxAPHw==
|
||||
|
||||
"@types/shimmer@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/shimmer/-/shimmer-1.2.0.tgz#9b706af96fa06416828842397a70dfbbf1c14ded"
|
||||
integrity sha512-UE7oxhQLLd9gub6JKIAhDq06T0F6FnztwMNRvYgjeQSBeMc1ZG/tA47EwfduvkuQS8apbkM/lpLpWsaCeYsXVg==
|
||||
|
||||
"@types/sinon@^7.0.13":
|
||||
version "7.0.13"
|
||||
resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-7.0.13.tgz#ca039c23a9e27ebea53e0901ef928ea2a1a6d313"
|
||||
|
@ -20442,7 +20742,7 @@ import-fresh@^3.2.1, import-fresh@^3.3.0:
|
|||
parent-module "^1.0.0"
|
||||
resolve-from "^4.0.0"
|
||||
|
||||
import-in-the-middle@1.13.1:
|
||||
import-in-the-middle@1.13.1, import-in-the-middle@^1.8.1:
|
||||
version "1.13.1"
|
||||
resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.13.1.tgz#789651f9e93dd902a5a306f499ab51eb72b03a12"
|
||||
integrity sha512-k2V9wNm9B+ysuelDTHjI9d5KPc4l8zAZTGqj+pcynvWkypZd857ryzN8jNC7Pg2YZXNMJcHRPpaDyCBbNyVRpA==
|
||||
|
@ -28467,6 +28767,11 @@ shelljs@^0.8.5:
|
|||
interpret "^1.0.0"
|
||||
rechoir "^0.6.2"
|
||||
|
||||
shimmer@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.1.tgz#610859f7de327b587efebf501fb43117f9aff337"
|
||||
integrity sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==
|
||||
|
||||
should-equal@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/should-equal/-/should-equal-2.0.0.tgz#6072cf83047360867e68e98b09d71143d04ee0c3"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue