mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Security Solution] [AI Insights] Enable LangSmith tracing in cloud deployments (#181159)
## [Security Solution] [AI Insights] Enable LangSmith tracing in cloud deployments
### Summary
This PR enables LangSmith tracing for the [AI Insights](https://github.com/elastic/kibana/pull/180611) feature in cloud deployments.
LangSmith project names and API keys are specified using the same UI and patterns introduced by @spong in <https://github.com/elastic/kibana/pull/180227>
### Details
To enable LangSmith tracing in cloud deployments, configure the following `xpack.securitySolution.enableExperimental` feature flags:
```
xpack.securitySolution.enableExperimental: ['assistantModelEvaluation', 'assistantAlertsInsights']
```
- The `assistantModelEvaluation` feature flag enables the `Evaluation` settings category in the assistant. The LangSmith project name and API key are configured here
- The `assistantAlertsInsights` feature flag enables the AI Insights feature, which is off by default at the time of this writing
After enabling the feature flags above:
1) Click the `AI Assistant` button anywhere in the Security Solution to launch the assistant
2) Click the settings gear in the assistant
3) Click the `Evaluation` settings category
4) Click `Show Trace Options (for internal use only)`
5) Specify a `LangSmith Project` and `LangSmith API Key` per the screenshot below:

This commit is contained in:
parent
cfee0fafb6
commit
89609fe596
6 changed files with 55 additions and 5 deletions
|
@ -5,13 +5,14 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { KibanaRequest, Logger } from '@kbn/core/server';
|
||||
import type { PluginStartContract as ActionsPluginStart } from '@kbn/actions-plugin/server';
|
||||
import { KibanaRequest, Logger } from '@kbn/core/server';
|
||||
import { LLM } from '@langchain/core/language_models/llms';
|
||||
import { get } from 'lodash/fp';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { getMessageContentAndRole } from './helpers';
|
||||
import { TraceOptions } from './types';
|
||||
|
||||
const LLM_TYPE = 'ActionsClientLlm';
|
||||
|
||||
|
@ -27,6 +28,7 @@ interface ActionsClientLlmParams {
|
|||
model?: string;
|
||||
temperature?: number;
|
||||
traceId?: string;
|
||||
traceOptions?: TraceOptions;
|
||||
}
|
||||
|
||||
export class ActionsClientLlm extends LLM {
|
||||
|
@ -52,8 +54,11 @@ export class ActionsClientLlm extends LLM {
|
|||
model,
|
||||
request,
|
||||
temperature,
|
||||
traceOptions,
|
||||
}: ActionsClientLlmParams) {
|
||||
super({});
|
||||
super({
|
||||
callbacks: [...(traceOptions?.tracers ?? [])],
|
||||
});
|
||||
|
||||
this.#actions = actions;
|
||||
this.#connectorId = connectorId;
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { LangChainTracer } from '@langchain/core/tracers/tracer_langchain';
|
||||
import {
|
||||
ChatCompletionContentPart,
|
||||
ChatCompletionCreateParamsNonStreaming,
|
||||
|
@ -38,3 +39,12 @@ export interface InvokeAIActionParamsSchema {
|
|||
functions?: ChatCompletionCreateParamsNonStreaming['functions'];
|
||||
signal?: AbortSignal;
|
||||
}
|
||||
|
||||
export interface TraceOptions {
|
||||
evaluationId?: string;
|
||||
exampleId?: string;
|
||||
projectName?: string;
|
||||
runName?: string;
|
||||
tags?: string[];
|
||||
tracers?: LangChainTracer[];
|
||||
}
|
||||
|
|
|
@ -56,6 +56,8 @@ export const AlertsInsightsPostRequestBody = z.object({
|
|||
anonymizationFields: z.array(AnonymizationFieldResponse),
|
||||
connectorId: z.string(),
|
||||
actionTypeId: z.string(),
|
||||
langSmithProject: z.string().optional(),
|
||||
langSmithApiKey: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
replacements: Replacements.optional(),
|
||||
size: z.number(),
|
||||
|
|
|
@ -73,6 +73,10 @@ paths:
|
|||
type: string
|
||||
actionTypeId:
|
||||
type: string
|
||||
langSmithProject:
|
||||
type: string
|
||||
langSmithApiKey:
|
||||
type: string
|
||||
model:
|
||||
type: string
|
||||
replacements:
|
||||
|
|
|
@ -19,6 +19,7 @@ import { transformError } from '@kbn/securitysolution-es-utils';
|
|||
import { INSIGHTS_ALERTS } from '../../../../common/constants';
|
||||
import { getAssistantToolParams, isInsightsFeatureEnabled } from './helpers';
|
||||
import { DEFAULT_PLUGIN_NAME, getPluginNameFromRequest } from '../../helpers';
|
||||
import { getLangSmithTracer } from '../../evaluate/utils';
|
||||
import { buildResponse } from '../../../lib/build_response';
|
||||
import { ElasticAssistantRequestHandlerContext } from '../../../types';
|
||||
import { getLlmType } from '../../utils';
|
||||
|
@ -73,7 +74,14 @@ export const postAlertsInsightsRoute = (router: IRouter<ElasticAssistantRequestH
|
|||
// get parameters from the request body
|
||||
const alertsIndexPattern = decodeURIComponent(request.body.alertsIndexPattern);
|
||||
const connectorId = decodeURIComponent(request.body.connectorId);
|
||||
const { actionTypeId, anonymizationFields, replacements, size } = request.body;
|
||||
const {
|
||||
actionTypeId,
|
||||
anonymizationFields,
|
||||
langSmithApiKey,
|
||||
langSmithProject,
|
||||
replacements,
|
||||
size,
|
||||
} = request.body;
|
||||
|
||||
// get an Elasticsearch client for the authenticated user:
|
||||
const esClient = (await context.core).elasticsearch.client.asCurrentUser;
|
||||
|
@ -91,6 +99,17 @@ export const postAlertsInsightsRoute = (router: IRouter<ElasticAssistantRequestH
|
|||
return response.notFound(); // insights tool not found
|
||||
}
|
||||
|
||||
const traceOptions = {
|
||||
projectName: langSmithProject,
|
||||
tracers: [
|
||||
...getLangSmithTracer({
|
||||
apiKey: langSmithApiKey,
|
||||
projectName: langSmithProject,
|
||||
logger,
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
const llm = new ActionsClientLlm({
|
||||
actions,
|
||||
connectorId,
|
||||
|
@ -98,6 +117,7 @@ export const postAlertsInsightsRoute = (router: IRouter<ElasticAssistantRequestH
|
|||
logger,
|
||||
request,
|
||||
temperature: 0, // zero temperature for insights, because we want structured JSON output
|
||||
traceOptions,
|
||||
});
|
||||
|
||||
const assistantToolParams = getAssistantToolParams({
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
AlertsInsightsPostResponse,
|
||||
ELASTIC_AI_ASSISTANT_INTERNAL_API_VERSION,
|
||||
} from '@kbn/elastic-assistant-common';
|
||||
import { isEmpty } from 'lodash/fp';
|
||||
import moment from 'moment';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import { useLocalStorage, useSessionStorage } from 'react-use';
|
||||
|
@ -60,7 +61,7 @@ export const useInsights = ({
|
|||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
// get alerts index pattern and allow lists from the assistant context:
|
||||
const { alertsIndexPattern, knowledgeBase } = useAssistantContext();
|
||||
const { alertsIndexPattern, knowledgeBase, traceOptions } = useAssistantContext();
|
||||
|
||||
const { data: anonymizationFields } = useFetchAnonymizationFields();
|
||||
|
||||
|
@ -116,6 +117,12 @@ export const useInsights = ({
|
|||
alertsIndexPattern: alertsIndexPattern ?? '',
|
||||
anonymizationFields: anonymizationFields?.data ?? [],
|
||||
connectorId: connectorId ?? '',
|
||||
langSmithProject: isEmpty(traceOptions?.langSmithProject)
|
||||
? undefined
|
||||
: traceOptions?.langSmithProject,
|
||||
langSmithApiKey: isEmpty(traceOptions?.langSmithApiKey)
|
||||
? undefined
|
||||
: traceOptions?.langSmithApiKey,
|
||||
size: knowledgeBase.latestAlerts,
|
||||
replacements: {}, // no need to re-use replacements in the current implementation
|
||||
subAction: 'invokeAI', // non-streaming
|
||||
|
@ -227,6 +234,8 @@ export const useInsights = ({
|
|||
setLocalStorageGenerationIntervals,
|
||||
setSessionStorageCachedInsights,
|
||||
toasts,
|
||||
traceOptions?.langSmithApiKey,
|
||||
traceOptions?.langSmithProject,
|
||||
]);
|
||||
|
||||
return {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue