mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[8.18] [Stack connector] Forward telemetryMetadata.pluginId
to EIS use case header (#214269) (#214429)
# Backport This will backport the following commits from `main` to `8.18`: - [[Stack connector] Forward `telemetryMetadata.pluginId` to EIS use case header (#214269)](https://github.com/elastic/kibana/pull/214269) <!--- Backport version: 9.6.6 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport) <!--BACKPORT [{"author":{"name":"Steph Milovic","email":"stephanie.milovic@elastic.co"},"sourceCommit":{"committedDate":"2025-03-13T15:44:06Z","message":"[Stack connector] Forward `telemetryMetadata.pluginId` to EIS use case header (#214269)","sha":"13906cbb76a17e7226084d208b52164e00f09c0e","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v9.0.0","Team: SecuritySolution","Team:Security Generative AI","backport:version","v8.18.0","v9.1.0","v8.19.0"],"title":"[Stack connector] Forward `telemetryMetadata.pluginId` to EIS use case header","number":214269,"url":"https://github.com/elastic/kibana/pull/214269","mergeCommit":{"message":"[Stack connector] Forward `telemetryMetadata.pluginId` to EIS use case header (#214269)","sha":"13906cbb76a17e7226084d208b52164e00f09c0e"}},"sourceBranch":"main","suggestedTargetBranches":["9.0","8.18","8.x"],"targetPullRequestStates":[{"branch":"9.0","label":"v9.0.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"8.18","label":"v8.18.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/214269","number":214269,"mergeCommit":{"message":"[Stack connector] Forward `telemetryMetadata.pluginId` to EIS use case header (#214269)","sha":"13906cbb76a17e7226084d208b52164e00f09c0e"}},{"branch":"8.x","label":"v8.19.0","branchLabelMappingKey":"^v8.19.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}] BACKPORT-->
This commit is contained in:
parent
ae70f3ee8b
commit
1b64d12817
14 changed files with 64 additions and 3 deletions
|
@ -11,6 +11,7 @@ import { LLM } from '@langchain/core/language_models/llms';
|
|||
import { get } from 'lodash/fp';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { PublicMethodsOf } from '@kbn/utility-types';
|
||||
import type { TelemetryMetadata } from '@kbn/actions-plugin/server/lib';
|
||||
import { DEFAULT_TIMEOUT, getDefaultArguments } from './constants';
|
||||
|
||||
import { getMessageContentAndRole } from './helpers';
|
||||
|
@ -28,6 +29,7 @@ interface ActionsClientLlmParams {
|
|||
timeout?: number;
|
||||
traceId?: string;
|
||||
traceOptions?: TraceOptions;
|
||||
telemetryMetadata?: TelemetryMetadata;
|
||||
}
|
||||
|
||||
export class ActionsClientLlm extends LLM {
|
||||
|
@ -36,6 +38,7 @@ export class ActionsClientLlm extends LLM {
|
|||
#logger: Logger;
|
||||
#traceId: string;
|
||||
#timeout?: number;
|
||||
telemetryMetadata?: TelemetryMetadata;
|
||||
|
||||
// Local `llmType` as it can change and needs to be accessed by abstract `_llmType()` method
|
||||
// Not using getter as `this._llmType()` is called in the constructor via `super({})`
|
||||
|
@ -54,6 +57,7 @@ export class ActionsClientLlm extends LLM {
|
|||
temperature,
|
||||
timeout,
|
||||
traceOptions,
|
||||
telemetryMetadata,
|
||||
}: ActionsClientLlmParams) {
|
||||
super({
|
||||
callbacks: [...(traceOptions?.tracers ?? [])],
|
||||
|
@ -67,6 +71,7 @@ export class ActionsClientLlm extends LLM {
|
|||
this.#timeout = timeout;
|
||||
this.model = model;
|
||||
this.temperature = temperature;
|
||||
this.telemetryMetadata = telemetryMetadata;
|
||||
}
|
||||
|
||||
_llmType() {
|
||||
|
@ -102,6 +107,7 @@ export class ActionsClientLlm extends LLM {
|
|||
model: this.model,
|
||||
messages: [assistantMessage], // the assistant message
|
||||
},
|
||||
telemetryMetadata: this.telemetryMetadata,
|
||||
},
|
||||
}
|
||||
: {
|
||||
|
@ -113,6 +119,7 @@ export class ActionsClientLlm extends LLM {
|
|||
...getDefaultArguments(this.llmType, this.temperature),
|
||||
// This timeout is large because LangChain prompts can be complicated and take a long time
|
||||
timeout: this.#timeout ?? DEFAULT_TIMEOUT,
|
||||
telemetryMetadata: this.telemetryMetadata,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
|
@ -90,6 +90,9 @@ export function registerAnalyzeLogsRoutes(
|
|||
maxTokens: 4096,
|
||||
signal: abortSignal,
|
||||
streaming: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'automatic_import',
|
||||
},
|
||||
});
|
||||
const options = {
|
||||
callbacks: [
|
||||
|
|
|
@ -95,6 +95,9 @@ export function registerCategorizationRoutes(
|
|||
maxTokens: 4096,
|
||||
signal: abortSignal,
|
||||
streaming: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'automatic_import',
|
||||
},
|
||||
});
|
||||
|
||||
const parameters = {
|
||||
|
|
|
@ -77,6 +77,9 @@ export function registerCelInputRoutes(router: IRouter<IntegrationAssistantRoute
|
|||
maxTokens: 4096,
|
||||
signal: abortSignal,
|
||||
streaming: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'automatic_import',
|
||||
},
|
||||
});
|
||||
|
||||
const parameters = {
|
||||
|
|
|
@ -87,6 +87,9 @@ export function registerEcsRoutes(router: IRouter<IntegrationAssistantRouteHandl
|
|||
maxTokens: 4096,
|
||||
signal: abortSignal,
|
||||
streaming: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'automatic_import',
|
||||
},
|
||||
});
|
||||
|
||||
const parameters = {
|
||||
|
|
|
@ -87,6 +87,9 @@ export function registerRelatedRoutes(router: IRouter<IntegrationAssistantRouteH
|
|||
maxTokens: 4096,
|
||||
signal: abortSignal,
|
||||
streaming: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'automatic_import',
|
||||
},
|
||||
});
|
||||
|
||||
const parameters = {
|
||||
|
|
|
@ -70,6 +70,7 @@ describe('InferenceConnector', () => {
|
|||
|
||||
const response = await connector.performApiUnifiedCompletion({
|
||||
body: { messages: [{ content: 'What is Elastic?', role: 'user' }] },
|
||||
telemetryMetadata: { pluginId: 'security_ai_assistant' },
|
||||
});
|
||||
expect(mockEsClient.transport.request).toBeCalledTimes(1);
|
||||
expect(mockEsClient.transport.request).toHaveBeenCalledWith(
|
||||
|
@ -86,7 +87,13 @@ describe('InferenceConnector', () => {
|
|||
method: 'POST',
|
||||
path: '_inference/chat_completion/test/_stream',
|
||||
},
|
||||
{ asStream: true, meta: true }
|
||||
{
|
||||
asStream: true,
|
||||
meta: true,
|
||||
headers: {
|
||||
'X-Elastic-Product-Use-Case': 'security_ai_assistant',
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(response.choices[0].message.content).toEqual(' you');
|
||||
});
|
||||
|
@ -290,7 +297,10 @@ describe('InferenceConnector', () => {
|
|||
method: 'POST',
|
||||
path: '_inference/chat_completion/test/_stream',
|
||||
},
|
||||
{ asStream: true, meta: true }
|
||||
{
|
||||
asStream: true,
|
||||
meta: true,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -312,7 +322,11 @@ describe('InferenceConnector', () => {
|
|||
method: 'POST',
|
||||
path: '_inference/chat_completion/test/_stream',
|
||||
},
|
||||
{ asStream: true, meta: true, signal }
|
||||
{
|
||||
asStream: true,
|
||||
meta: true,
|
||||
signal,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
|
|
|
@ -194,6 +194,13 @@ export class InferenceConnector extends SubActionConnector<Config, Secrets> {
|
|||
asStream: true,
|
||||
meta: true,
|
||||
signal: params.signal,
|
||||
...(params.telemetryMetadata?.pluginId
|
||||
? {
|
||||
headers: {
|
||||
'X-Elastic-Product-Use-Case': params.telemetryMetadata?.pluginId,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
}
|
||||
);
|
||||
// errors should be thrown as it will not be a stream response
|
||||
|
|
|
@ -61,5 +61,8 @@ export const getEvaluatorLlm = async ({
|
|||
temperature: 0, // zero temperature for evaluation
|
||||
timeout: connectorTimeout,
|
||||
traceOptions,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'security_attack_discovery',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
|
|
@ -92,6 +92,9 @@ export const evaluateAttackDiscovery = async ({
|
|||
temperature: 0, // zero temperature for attack discovery, because we want structured JSON output
|
||||
timeout: connectorTimeout,
|
||||
traceOptions,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'security_attack_discovery',
|
||||
},
|
||||
});
|
||||
|
||||
const graph = getDefaultAttackDiscoveryGraph({
|
||||
|
|
|
@ -90,6 +90,9 @@ export const callAssistantGraph: AgentExecutor<true | false> = async ({
|
|||
// failure could be due to bad connector, we should deliver that result to the client asap
|
||||
maxRetries: 0,
|
||||
convertSystemMessageToHumanContent: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'security_ai_assistant',
|
||||
},
|
||||
});
|
||||
|
||||
const anonymizationFieldsRes =
|
||||
|
|
|
@ -87,6 +87,9 @@ export const invokeAttackDiscoveryGraph = async ({
|
|||
temperature: 0, // zero temperature for attack discovery, because we want structured JSON output
|
||||
timeout: connectorTimeout,
|
||||
traceOptions,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'security_attack_discovery',
|
||||
},
|
||||
});
|
||||
|
||||
if (llm == null) {
|
||||
|
|
|
@ -162,6 +162,9 @@ export function getAssistantToolParams({
|
|||
temperature: 0, // zero temperature because we want structured JSON output
|
||||
timeout: connectorTimeout,
|
||||
traceOptions,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'security_defend_insights',
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -256,6 +256,9 @@ export const postEvaluateRoute = (
|
|||
streaming: false,
|
||||
maxRetries: 0,
|
||||
convertSystemMessageToHumanContent: false,
|
||||
telemetryMetadata: {
|
||||
pluginId: 'security_ai_assistant',
|
||||
},
|
||||
});
|
||||
const llm = createLlmInstance();
|
||||
const anonymizationFieldsRes =
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue