[Observability] Add feedback buttons for prompts (#160351)

Co-authored-by: Sean Heelan <sean@optimyze.cloud>
This commit is contained in:
Dario Gieselaar 2023-07-12 21:58:13 +02:00 committed by GitHub
parent 0cd98f1dfb
commit 4ae94c3502
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 583 additions and 209 deletions

View file

@ -273,6 +273,7 @@ export default function ({ getService }: PluginFunctionalProviderContext) {
'xpack.upgrade_assistant.featureSet.reindexCorrectiveActions (boolean)',
'xpack.upgrade_assistant.ui.enabled (boolean)',
'xpack.observability.aiAssistant.enabled (boolean)',
'xpack.observability.aiAssistant.feedback.enabled (boolean)',
'xpack.observability.unsafe.alertDetails.metrics.enabled (boolean)',
'xpack.observability.unsafe.alertDetails.logs.enabled (boolean)',
'xpack.observability.unsafe.alertDetails.uptime.enabled (boolean)',

View file

@ -49,6 +49,7 @@ export function ErrorSampleCoPilotPrompt({
)}
promptId={CoPilotPromptId.ApmExplainError}
params={promptParams}
feedbackEnabled={false}
/>
</EuiFlexItem>
<EuiSpacer size="s" />

View file

@ -35,7 +35,7 @@ export const APM_FEATURE = {
privileges: {
all: {
app: [APM_SERVER_FEATURE_ID, 'ux', 'kibana'],
api: [APM_SERVER_FEATURE_ID, 'apm_write', 'rac'],
api: [APM_SERVER_FEATURE_ID, 'apm_write', 'rac', 'ai_assistant'],
catalogue: [APM_SERVER_FEATURE_ID],
savedObject: {
all: [],
@ -56,7 +56,7 @@ export const APM_FEATURE = {
},
read: {
app: [APM_SERVER_FEATURE_ID, 'ux', 'kibana'],
api: [APM_SERVER_FEATURE_ID, 'rac'],
api: [APM_SERVER_FEATURE_ID, 'rac', 'ai_assistant'],
catalogue: [APM_SERVER_FEATURE_ID],
savedObject: {
all: [],

View file

@ -230,6 +230,7 @@ export const ExplainLogRateSpikes: FC<AlertDetailsExplainLogRateSpikesSectionPro
title={explainLogSpikeTitle}
params={logSpikeParams}
promptId={CoPilotPromptId.ExplainLogSpike}
feedbackEnabled={false}
/>
</EuiFlexItem>
) : null}

View file

@ -49,6 +49,7 @@ export const CopilotProcessRow = ({ command }: { command: string }) => {
title={explainProcessMessageTitle}
params={explainProcessParams}
promptId={CoPilotPromptId.InfraExplainProcess}
feedbackEnabled={true}
/>
</EuiFlexItem>
</EuiFlexItem>

View file

@ -33,7 +33,7 @@ export const METRICS_FEATURE = {
all: {
app: ['infra', 'metrics', 'kibana'],
catalogue: ['infraops', 'metrics'],
api: ['infra', 'rac'],
api: ['infra', 'rac', 'ai_assistant'],
savedObject: {
all: ['infrastructure-ui-source'],
read: ['index-pattern'],
@ -54,7 +54,7 @@ export const METRICS_FEATURE = {
read: {
app: ['infra', 'metrics', 'kibana'],
catalogue: ['infraops', 'metrics'],
api: ['infra', 'rac'],
api: ['infra', 'rac', 'ai_assistant'],
savedObject: {
all: [],
read: ['infrastructure-ui-source', 'index-pattern'],
@ -92,7 +92,7 @@ export const LOGS_FEATURE = {
all: {
app: ['infra', 'logs', 'kibana'],
catalogue: ['infralogging', 'logs'],
api: ['infra', 'rac'],
api: ['infra', 'rac', 'ai_assistant'],
savedObject: {
all: [infraSourceConfigurationSavedObjectName, logViewSavedObjectName],
read: [],
@ -113,7 +113,7 @@ export const LOGS_FEATURE = {
read: {
app: ['infra', 'logs', 'kibana'],
catalogue: ['infralogging', 'logs'],
api: ['infra', 'rac'],
api: ['infra', 'rac', 'ai_assistant'],
alerting: {
rule: {
read: [LOG_DOCUMENT_COUNT_RULE_TYPE_ID],

View file

@ -204,6 +204,7 @@ export const LogEntryFlyout = ({
title={explainLogMessageTitle}
params={explainLogMessageParams}
promptId={CoPilotPromptId.LogsExplainMessage}
feedbackEnabled={false}
/>
</EuiFlexItem>
) : null}
@ -214,6 +215,7 @@ export const LogEntryFlyout = ({
title={similarLogMessagesTitle}
params={similarLogMessageParams}
promptId={CoPilotPromptId.LogsFindSimilar}
feedbackEnabled={false}
/>
</EuiFlexItem>
) : null}

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export enum OpenAIProvider {
OpenAI = 'openAI',
AzureOpenAI = 'azureOpenAI',
}
export enum CoPilotPromptId {
ProfilingOptimizeFunction = 'profilingOptimizeFunction',
ApmExplainError = 'apmExplainError',
LogsExplainMessage = 'logsExplainMessage',
LogsFindSimilar = 'logsFindSimilar',
InfraExplainProcess = 'infraExplainProcess',
ExplainLogSpike = 'explainLogSpike',
}
export type {
CoPilotPromptMap,
CreateChatCompletionResponseChunk,
PromptParamsOf,
} from './prompts';
export const loadCoPilotPrompts = () => import('./prompts').then((m) => m.coPilotPrompts);

View file

@ -5,26 +5,12 @@
* 2.0.
*/
import * as t from 'io-ts';
import {
type ChatCompletionRequestMessage,
type CreateChatCompletionResponse,
type CreateChatCompletionResponseChoicesInner,
import type {
ChatCompletionRequestMessage,
CreateChatCompletionResponse,
CreateChatCompletionResponseChoicesInner,
} from 'openai';
export enum OpenAIProvider {
OpenAI = 'openAI',
AzureOpenAI = 'azureOpenAI',
}
export enum CoPilotPromptId {
ProfilingExplainFunction = 'profilingExplainFunction',
ProfilingOptimizeFunction = 'profilingOptimizeFunction',
ApmExplainError = 'apmExplainError',
LogsExplainMessage = 'logsExplainMessage',
LogsFindSimilar = 'logsFindSimilar',
InfraExplainProcess = 'infraExplainProcess',
ExplainLogSpike = 'explainLogSpike',
}
import { CoPilotPromptId } from '.';
const PERF_GPT_SYSTEM_MESSAGE = {
content: `You are perf-gpt, a helpful assistant for performance analysis and optimisation
@ -83,49 +69,6 @@ const significantFieldValuesRt = t.array(
export const coPilotPrompts = {
[CoPilotPromptId.ProfilingOptimizeFunction]: prompt({
params: t.type({
library: t.string,
functionName: t.string,
}),
messages: ({ library, functionName }) => {
return [
PERF_GPT_SYSTEM_MESSAGE,
{
content: `Assuming the function ${functionName} from the library ${library} is consuming significant CPU resources.
Suggest ways to optimize or improve the system that involve the ${functionName} function from the
${library} library. Types of improvements that would be useful to me are improvements that result in:
- Higher performance so that the system runs faster or uses less CPU
- Better memory efficient so that the system uses less RAM
- Better storage efficient so that the system stores less data on disk.
- Better network I/O efficiency so that less data is sent over the network
- Better disk I/O efficiency so that less data is read and written from disk
Make up to five suggestions. Your suggestions must meet all of the following criteria:
1. Your suggestions should be detailed, technical and include concrete examples.
2. Your suggestions should be specific to improving performance of a system in which the ${functionName} function from
the ${library} library is consuming significant CPU.
2. If you suggest replacing the function or library with a more efficient replacement you must suggest at least
one concrete replacement.
If you know of fewer than five ways to improve the performance of a system in which the ${functionName} function from the
${library} library is consuming significant CPU, then provide fewer than five suggestions. If you do not know of any
way in which to improve the performance then say "I do not know how to improve the performance of systems where
this function is consuming a significant amount of CPU".
If you have suggestions, the output format should look as follows:
Here are some suggestions as to how you might optimize your system if ${functionName} in ${library} is consuming
significant CPU resources:
1. Insert first suggestion
2. Insert second suggestion
etc.`,
role: 'user',
},
];
},
}),
[CoPilotPromptId.ProfilingExplainFunction]: prompt({
params: t.type({
library: t.string,
functionName: t.string,
@ -135,18 +78,52 @@ export const coPilotPrompts = {
PERF_GPT_SYSTEM_MESSAGE,
{
content: `I am a software engineer. I am trying to understand what a function in a particular
software library does.
The library is: ${library}
The function is: ${functionName}
Your task is to describe what the library is and what its use cases are, and to describe what the function
does. The output format should look as follows:
Library description: Provide a concise description of the library
Library use-cases: Provide a concise description of what the library is typically used for.
Function description: Provide a concise, technical, description of what the function does.
`,
software library does.
The library is: ${library}
The function is: ${functionName}
Your have two tasks. Your first task is to desribe what the library is and what its use cases are, and to
describe what the function does. The output format should look as follows:
Library description: Provide a concise description of the library
Library use-cases: Provide a concise description of what the library is typically used for.
Function description: Provide a concise, technical, description of what the function does.
Assume the function ${functionName} from the library ${library} is consuming significant CPU resources.
Your second task is to suggest ways to optimize or improve the system that involve the ${functionName} function from the
${library} library. Types of improvements that would be useful to me are improvements that result in:
- Higher performance so that the system runs faster or uses less CPU
- Better memory efficient so that the system uses less RAM
- Better storage efficient so that the system stores less data on disk.
- Better network I/O efficiency so that less data is sent over the network
- Better disk I/O efficiency so that less data is read and written from disk
Make up to five suggestions. Your suggestions must meet all of the following criteria:
1. Your suggestions should detailed, technical and include concrete examples.
2. Your suggestions should be specific to improving performance of a system in which the ${functionName} function from
the ${library} library is consuming significant CPU.
3. If you suggest replacing the function or library with a more efficient replacement you must suggest at least
one concrete replacement.
If you know of fewer than five ways to improve the performance of a system in which the ${functionName} function from the
${library} library is consuming significant CPU, then provide fewer than five suggestions. If you do not know of any
way in which to improve the performance then say "I do not know how to improve the performance of systems where
this function is consuming a significant amount of CPU".
Do not suggest using a CPU profiler. I have already profiled my code. The profiler I used is Elastic Universal Profiler.
If there is specific information I should look for in the profiler output then tell me what information to look for
in the output of Elastic Universal Profiler.
You must not include URLs, web addresses or websites of any kind in your output.
If you have suggestions, the output format should look as follows:
Here are some suggestions as to how you might optimize your system if ${functionName} in ${library} is consuming
significant CPU resources:
1. Insert first suggestion
2. Insert second suggestion`,
role: 'user',
},
];

View file

@ -82,4 +82,4 @@ export {
SYNTHETICS_WAIT_TIMINGS,
} from './field_names/synthetics';
export { CoPilotPromptId, coPilotPrompts } from './co_pilot';
export { CoPilotPromptId, loadCoPilotPrompts } from './co_pilot';

View file

@ -8,22 +8,25 @@ import {
EuiAccordion,
EuiFlexGroup,
EuiFlexItem,
EuiHorizontalRule,
EuiIcon,
EuiLoadingSpinner,
EuiPanel,
EuiSpacer,
EuiText,
EuiToolTip,
useEuiTheme,
} from '@elastic/eui';
import { css } from '@emotion/css';
import { i18n } from '@kbn/i18n';
import React, { useEffect, useMemo, useState } from 'react';
import { TechnicalPreviewBadge } from '@kbn/observability-shared-plugin/public';
import type { ChatCompletionRequestMessage } from 'openai';
import React, { useMemo, useState } from 'react';
import useObservable from 'react-use/lib/useObservable';
import { catchError, Observable, of } from 'rxjs';
import { CoPilotPromptId } from '../../../common';
import type { PromptParamsOf } from '../../../common/co_pilot';
import type { CoPilotService, PromptObservableState } from '../../typings/co_pilot';
import { CoPilotPromptFeedback } from './co_pilot_prompt_feedback';
const cursorCss = css`
@keyframes blink {
@ -51,6 +54,7 @@ export interface CoPilotPromptProps<TPromptId extends CoPilotPromptId> {
promptId: TPromptId;
coPilot: CoPilotService;
params: PromptParamsOf<TPromptId>;
feedbackEnabled: boolean;
}
// eslint-disable-next-line import/no-default-export
@ -59,26 +63,47 @@ export default function CoPilotPrompt<TPromptId extends CoPilotPromptId>({
coPilot,
promptId,
params,
feedbackEnabled,
}: CoPilotPromptProps<TPromptId>) {
const [hasOpened, setHasOpened] = useState(false);
const theme = useEuiTheme();
const [responseTime, setResponseTime] = useState<number | undefined>(undefined);
const conversation$ = useMemo(() => {
return hasOpened
? coPilot
.prompt(promptId, params)
.pipe(
catchError((err) => of({ loading: false, error: err, message: String(err.message) }))
)
: new Observable<PromptObservableState>(() => {});
}, [params, promptId, coPilot, hasOpened]);
if (hasOpened) {
setResponseTime(undefined);
const now = Date.now();
const observable = coPilot.prompt(promptId, params).pipe(
catchError((err) =>
of({
messages: [] as ChatCompletionRequestMessage[],
loading: false,
error: err,
message: String(err.message),
})
)
);
observable.subscribe({
complete: () => {
setResponseTime(Date.now() - now);
},
});
return observable;
}
return new Observable<PromptObservableState & { error?: any }>(() => {});
}, [params, promptId, coPilot, hasOpened, setResponseTime]);
const conversation = useObservable(conversation$);
useEffect(() => {}, [conversation$]);
const content = conversation?.message ?? '';
const messages = conversation?.messages;
let state: 'init' | 'loading' | 'streaming' | 'error' | 'complete' = 'init';
@ -94,10 +119,26 @@ export default function CoPilotPrompt<TPromptId extends CoPilotPromptId>({
if (state === 'complete' || state === 'streaming') {
inner = (
<p style={{ whiteSpace: 'pre-wrap', lineHeight: 1.5 }}>
{content}
{state === 'streaming' ? <span className={cursorCss} /> : <></>}
</p>
<>
<p style={{ whiteSpace: 'pre-wrap', lineHeight: 1.5 }}>
{content}
{state === 'streaming' ? <span className={cursorCss} /> : undefined}
</p>
{state === 'complete' ? (
<>
<EuiSpacer size="m" />
{coPilot.isTrackingEnabled() && feedbackEnabled ? (
<CoPilotPromptFeedback
messages={messages}
response={content}
responseTime={responseTime!}
promptId={promptId}
coPilot={coPilot}
/>
) : undefined}
</>
) : undefined}
</>
);
} else if (state === 'init' || state === 'loading') {
inner = (
@ -128,10 +169,6 @@ export default function CoPilotPrompt<TPromptId extends CoPilotPromptId>({
);
}
const tooltipContent = i18n.translate('xpack.observability.coPilotPrompt.askCoPilot', {
defaultMessage: 'Ask Observability AI Assistent for help',
});
return (
<EuiPanel color="primary">
<EuiAccordion
@ -148,14 +185,23 @@ export default function CoPilotPrompt<TPromptId extends CoPilotPromptId>({
buttonContent={
<EuiFlexGroup direction="row" alignItems="center">
<EuiFlexItem grow>
<EuiText size="m" color={theme.euiTheme.colors.primaryText}>
<strong>{title}</strong>
</EuiText>
<EuiFlexGroup direction="column" gutterSize="none" justifyContent="center">
<EuiFlexItem grow={false}>
<EuiText size="m" color={theme.euiTheme.colors.primaryText}>
<strong>{title}</strong>
</EuiText>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiText size="s" color={theme.euiTheme.colors.primaryText}>
{i18n.translate('xpack.observability.coPilotChatPrompt.subtitle', {
defaultMessage: 'Get helpful insights from our Elastic AI Assistant',
})}
</EuiText>
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiToolTip content={tooltipContent}>
<EuiIcon color={theme.euiTheme.colors.primaryText} type="questionInCircle" />
</EuiToolTip>
<TechnicalPreviewBadge />
</EuiFlexItem>
</EuiFlexGroup>
}
@ -164,7 +210,9 @@ export default function CoPilotPrompt<TPromptId extends CoPilotPromptId>({
setHasOpened(true);
}}
>
<EuiSpacer size="s" />
<EuiSpacer size="m" />
<EuiHorizontalRule margin="none" />
<EuiSpacer size="m" />
{inner}
</EuiAccordion>
</EuiPanel>

View file

@ -0,0 +1,131 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
EuiButtonEmpty,
EuiFlexGroup,
EuiFlexItem,
EuiIcon,
EuiText,
useEuiTheme,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import type { ChatCompletionRequestMessage } from 'openai';
import React, { useCallback, useEffect, useState } from 'react';
import { CoPilotPromptId } from '../../../common';
import type { CoPilotService } from '../../typings/co_pilot';
interface Props {
coPilot: CoPilotService;
promptId: CoPilotPromptId;
messages?: ChatCompletionRequestMessage[];
response: string;
responseTime: number;
}
export function CoPilotPromptFeedback({
coPilot,
promptId,
messages,
response,
responseTime,
}: Props) {
const theme = useEuiTheme();
const [hasSubmittedFeedback, setHasSubmittedFeedback] = useState(false);
const submitFeedback = useCallback(
(positive: boolean) => {
setHasSubmittedFeedback(true);
if (messages) {
coPilot
.track({
messages,
response,
responseTime,
promptId,
feedbackAction: positive ? 'thumbsup' : 'thumbsdown',
})
.catch((err) => {});
}
},
[coPilot, promptId, messages, response, responseTime]
);
const [hasSubmittedTelemetry, setHasSubmittedTelemetry] = useState(false);
useEffect(() => {
if (!hasSubmittedTelemetry && messages) {
setHasSubmittedTelemetry(true);
coPilot
.track({
messages,
response,
responseTime,
promptId,
})
.catch((err) => {});
}
}, [coPilot, promptId, messages, response, responseTime, hasSubmittedTelemetry]);
if (hasSubmittedFeedback) {
return (
<EuiFlexGroup direction="row" gutterSize="s" alignItems="center">
<EuiFlexItem grow={false}>
<EuiIcon type="check" color={theme.euiTheme.colors.primaryText} />
</EuiFlexItem>
<EuiFlexItem>
<EuiText size="s" color={theme.euiTheme.colors.primaryText}>
{i18n.translate('xpack.observability.coPilotPrompt.feedbackSubmittedText', {
defaultMessage:
"Thank you for submitting your feedback! We'll use this to improve responses.",
})}
</EuiText>
</EuiFlexItem>
</EuiFlexGroup>
);
}
return (
<EuiFlexGroup direction="row" gutterSize="s" alignItems="center">
<EuiFlexItem grow={false}>
<EuiText size="s" color={theme.euiTheme.colors.primaryText}>
{i18n.translate('xpack.observability.coPilotPrompt.feedbackActionTitle', {
defaultMessage: 'Did you find this response helpful?',
})}
</EuiText>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButtonEmpty
size="xs"
data-test-subj="CoPilotPromptButtonHappy"
iconType="faceHappy"
onClick={() => {
submitFeedback(true);
}}
>
{i18n.translate('xpack.observability.coPilotPrompt.likedFeedbackButtonTitle', {
defaultMessage: 'Yes',
})}
</EuiButtonEmpty>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButtonEmpty
size="xs"
data-test-subj="CoPilotPromptButtonSad"
iconType="faceSad"
onClick={() => {
submitFeedback(false);
}}
>
{i18n.translate('xpack.observability.coPilotPrompt.dislikedFeedbackButtonTitle', {
defaultMessage: 'No',
})}
</EuiButtonEmpty>
</EuiFlexItem>
</EuiFlexGroup>
);
}

View file

@ -6,9 +6,13 @@
*/
import { type HttpSetup } from '@kbn/core/public';
import { concatMap, delay, Observable, of } from 'rxjs';
import { type CreateChatCompletionResponseChunk } from '../../../common/co_pilot';
import { type CoPilotService, type PromptObservableState } from '../../typings/co_pilot';
import { ChatCompletionRequestMessage } from 'openai';
import { BehaviorSubject, concatMap, delay, of } from 'rxjs';
import {
type CreateChatCompletionResponseChunk,
loadCoPilotPrompts,
} from '../../../common/co_pilot';
import type { CoPilotService } from '../../typings/co_pilot';
function getMessageFromChunks(chunks: CreateChatCompletionResponseChunk[]) {
let message = '';
@ -18,94 +22,136 @@ function getMessageFromChunks(chunks: CreateChatCompletionResponseChunk[]) {
return message;
}
export function createCoPilotService({ enabled, http }: { enabled: boolean; http: HttpSetup }) {
export function createCoPilotService({
enabled,
trackingEnabled,
http,
}: {
enabled: boolean;
trackingEnabled: boolean;
http: HttpSetup;
}) {
const service: CoPilotService = {
isEnabled: () => enabled,
isTrackingEnabled: () => trackingEnabled,
prompt: (promptId, params) => {
return new Observable<PromptObservableState>((observer) => {
observer.next({ chunks: [], loading: true });
const subject = new BehaviorSubject({
messages: [] as ChatCompletionRequestMessage[],
loading: true,
message: '',
});
http
.post(`/internal/observability/copilot/prompts/${promptId}`, {
body: JSON.stringify(params),
asResponse: true,
rawResponse: true,
})
.then((response) => {
const status = response.response?.status;
loadCoPilotPrompts()
.then((coPilotPrompts) => {
const messages = coPilotPrompts[promptId].messages(params as any);
subject.next({
messages,
loading: true,
message: '',
});
if (!status || status >= 400) {
throw new Error(response.response?.statusText || 'Unexpected error');
}
http
.post(`/internal/observability/copilot/prompts/${promptId}`, {
body: JSON.stringify(params),
asResponse: true,
rawResponse: true,
})
.then((response) => {
const status = response.response?.status;
const reader = response.response.body?.getReader();
if (!status || status >= 400) {
throw new Error(response.response?.statusText || 'Unexpected error');
}
if (!reader) {
throw new Error('Could not get reader from response');
}
const reader = response.response.body?.getReader();
const decoder = new TextDecoder();
if (!reader) {
throw new Error('Could not get reader from response');
}
const chunks: CreateChatCompletionResponseChunk[] = [];
const decoder = new TextDecoder();
let prev: string = '';
const chunks: CreateChatCompletionResponseChunk[] = [];
function read() {
reader!.read().then(({ done, value }) => {
try {
if (done) {
observer.next({
chunks,
message: getMessageFromChunks(chunks),
loading: false,
let prev: string = '';
function read() {
reader!.read().then(({ done, value }) => {
try {
if (done) {
subject.next({
messages,
message: getMessageFromChunks(chunks),
loading: false,
});
subject.complete();
return;
}
let lines = (prev + decoder.decode(value)).split('\n');
const lastLine = lines[lines.length - 1];
const isPartialChunk = !!lastLine && lastLine !== 'data: [DONE]';
if (isPartialChunk) {
prev = lastLine;
lines.pop();
} else {
prev = '';
}
lines = lines
.map((str) => str.substr(6))
.filter((str) => !!str && str !== '[DONE]');
const nextChunks: CreateChatCompletionResponseChunk[] = lines.map((line) =>
JSON.parse(line)
);
nextChunks.forEach((chunk) => {
chunks.push(chunk);
subject.next({
messages,
message: getMessageFromChunks(chunks),
loading: true,
});
});
observer.complete();
} catch (err) {
subject.error(err);
return;
}
read();
});
}
let lines = (prev + decoder.decode(value)).split('\n');
const lastLine = lines[lines.length - 1];
const isPartialChunk = !!lastLine && lastLine !== 'data: [DONE]';
if (isPartialChunk) {
prev = lastLine;
lines.pop();
} else {
prev = '';
}
lines = lines
.map((str) => str.substr(6))
.filter((str) => !!str && str !== '[DONE]');
const nextChunks: CreateChatCompletionResponseChunk[] = lines.map((line) =>
JSON.parse(line)
);
nextChunks.forEach((chunk) => {
chunks.push(chunk);
observer.next({ chunks, message: getMessageFromChunks(chunks), loading: true });
});
} catch (err) {
observer.error(err);
return;
read();
})
.catch(async (err) => {
if ('response' in err) {
try {
const responseBody = await err.response.json();
err.message = responseBody.message;
} catch {
// leave message as-is
}
read();
});
}
}
subject.error(err);
});
})
.catch((err) => {});
read();
return () => {
reader.cancel();
};
})
.catch((err) => {
observer.error(err);
});
}).pipe(concatMap((value) => of(value).pipe(delay(50))));
return subject.pipe(concatMap((value) => of(value).pipe(delay(25))));
},
track: async ({ messages, response, responseTime, feedbackAction, promptId }) => {
await http.post(`/internal/observability/copilot/prompts/${promptId}/track`, {
body: JSON.stringify({
response,
feedbackAction,
messages,
responseTime,
}),
});
},
};

View file

@ -90,6 +90,9 @@ const withCore = makeDecorator({
compositeSlo: { enabled: false },
aiAssistant: {
enabled: false,
feedback: {
enabled: false,
},
},
};

View file

@ -46,8 +46,11 @@ jest.spyOn(pluginContext, 'usePluginContext').mockImplementation(() => ({
compositeSlo: {
enabled: false,
},
coPilot: {
aiAssistant: {
enabled: false,
feedback: {
enabled: false,
},
},
},
observabilityRuleTypeRegistry: createObservabilityRuleTypeRegistryMock(),

View file

@ -93,7 +93,10 @@ export interface ConfigSchema {
};
compositeSlo: { enabled: boolean };
aiAssistant?: {
enabled?: boolean;
enabled: boolean;
feedback: {
enabled: boolean;
};
};
}
export type ObservabilityPublicSetup = ReturnType<Plugin['setup']>;
@ -343,6 +346,7 @@ export class Plugin
this.coPilotService = createCoPilotService({
enabled: !!config.aiAssistant?.enabled,
http: coreSetup.http,
trackingEnabled: !!config.aiAssistant?.feedback.enabled,
});
return {

View file

@ -5,23 +5,28 @@
* 2.0.
*/
import type { ChatCompletionRequestMessage } from 'openai';
import type { Observable } from 'rxjs';
import {
type CoPilotPromptId,
type PromptParamsOf,
type CreateChatCompletionResponseChunk,
} from '../../common/co_pilot';
import type { CoPilotPromptId, PromptParamsOf } from '../../common/co_pilot';
export interface PromptObservableState {
chunks: CreateChatCompletionResponseChunk[];
message?: string;
messages: ChatCompletionRequestMessage[];
loading: boolean;
}
export interface CoPilotService {
isEnabled: () => boolean;
isTrackingEnabled: () => boolean;
prompt<TPromptId extends CoPilotPromptId>(
promptId: TPromptId,
params: PromptParamsOf<TPromptId>
): Observable<PromptObservableState>;
track: (options: {
messages: ChatCompletionRequestMessage[];
response: string;
promptId: CoPilotPromptId;
feedbackAction?: 'thumbsup' | 'thumbsdown';
responseTime: number;
}) => Promise<void>;
}

View file

@ -37,6 +37,9 @@ export function KibanaReactStorybookDecorator(Story: ComponentType) {
compositeSlo: { enabled: false },
aiAssistant: {
enabled: false,
feedback: {
enabled: false,
},
},
};
const mockTheme: CoreTheme = {

View file

@ -41,6 +41,9 @@ const defaultConfig: ConfigSchema = {
compositeSlo: { enabled: false },
aiAssistant: {
enabled: false,
feedback: {
enabled: false,
},
},
};

View file

@ -62,6 +62,9 @@ export const config: PluginConfigDescriptor = {
unsafe: true,
aiAssistant: {
enabled: true,
feedback: {
enabled: true,
},
},
},
schema: configSchema,

View file

@ -25,6 +25,7 @@ import { SharePluginSetup } from '@kbn/share-plugin/server';
import { SpacesPluginSetup } from '@kbn/spaces-plugin/server';
import type { GuidedOnboardingPluginSetup } from '@kbn/guided-onboarding-plugin/server';
import { UsageCollectionSetup } from '@kbn/usage-collection-plugin/server';
import { CloudSetup } from '@kbn/cloud-plugin/server';
import {
kubernetesGuideId,
kubernetesGuideConfig,
@ -57,6 +58,7 @@ interface PluginSetup {
share: SharePluginSetup;
spaces?: SpacesPluginSetup;
usageCollection?: UsageCollectionSetup;
cloud?: CloudSetup;
}
interface PluginStart {
@ -249,7 +251,12 @@ export class ObservabilityPlugin implements Plugin<ObservabilityPluginSetup> {
core.getStartServices().then(([coreStart, pluginStart]) => {
registerRoutes({
core,
config,
dependencies: {
pluginsSetup: {
...plugins,
core,
},
ruleDataService,
getRulesClientWithRequest: pluginStart.alerting.getRulesClientWithRequest,
getOpenAIClient: () => openAIService?.client,

View file

@ -6,11 +6,13 @@
*/
import Boom from '@hapi/boom';
import { ServerRoute } from '@kbn/server-route-repository';
import axios from 'axios';
import * as t from 'io-ts';
import { map } from 'lodash';
import { CreateChatCompletionResponse } from 'openai';
import { ChatCompletionRequestMessageRoleEnum, CreateChatCompletionResponse } from 'openai';
import { Readable } from 'stream';
import { CoPilotPromptMap, coPilotPrompts } from '../../../common/co_pilot';
import { CoPilotPromptMap } from '../../../common/co_pilot';
import { coPilotPrompts } from '../../../common/co_pilot/prompts';
import { createObservabilityServerRoute } from '../create_observability_server_route';
import { ObservabilityRouteCreateOptions, ObservabilityRouteHandlerResources } from '../types';
@ -31,7 +33,7 @@ const promptRoutes: {
body: prompt.params,
}),
options: {
tags: [],
tags: ['ai_assistant'],
},
handler: async (resources): Promise<CreateChatCompletionResponse | Readable> => {
const client = resources.dependencies.getOpenAIClient();
@ -40,12 +42,79 @@ const promptRoutes: {
throw Boom.notImplemented();
}
return client.chatCompletion.create(prompt.messages(resources.params.body as any));
try {
return await client.chatCompletion.create(prompt.messages(resources.params.body as any));
} catch (error: any) {
if (axios.isAxiosError(error) && error.response?.status === 401) {
throw Boom.forbidden(error.response?.statusText);
}
throw error;
}
},
});
})
);
const trackRoute = createObservabilityServerRoute({
endpoint: 'POST /internal/observability/copilot/prompts/{promptId}/track',
params: t.type({
path: t.type({
promptId: t.string,
}),
body: t.intersection([
t.type({
responseTime: t.number,
messages: t.array(
t.intersection([
t.type({
role: t.union([
t.literal(ChatCompletionRequestMessageRoleEnum.System),
t.literal(ChatCompletionRequestMessageRoleEnum.User),
t.literal(ChatCompletionRequestMessageRoleEnum.Assistant),
]),
content: t.string,
}),
t.partial({
name: t.string,
}),
])
),
response: t.string,
}),
t.partial({
feedbackAction: t.union([t.literal('thumbsup'), t.literal('thumbsdown')]),
}),
]),
}),
options: {
tags: ['ai_assistant'],
},
handler: async (resources): Promise<void> => {
const { params, config } = resources;
if (!config.aiAssistant?.enabled) {
throw Boom.notImplemented();
}
const feedbackBody = {
prompt_name: params.path.promptId,
feedback_action: params.body.feedbackAction,
model:
'openAI' in config.aiAssistant.provider
? config.aiAssistant.provider.openAI.model
: config.aiAssistant.provider.azureOpenAI.resourceName,
response_time: params.body.responseTime,
conversation: [
...params.body.messages.map(({ role, content }) => ({ role, content })),
{ role: 'system', content: params.body.response },
],
};
await axios.post(config.aiAssistant.feedback.url, feedbackBody);
},
});
export const observabilityCoPilotRouteRepository = {
...promptRoutes,
...trackRoute,
};

View file

@ -16,12 +16,14 @@ import {
} from '@kbn/server-route-repository';
import axios from 'axios';
import * as t from 'io-ts';
import { ObservabilityConfig } from '..';
import { getHTTPResponseCode, ObservabilityError } from '../errors';
import { IOpenAIClient } from '../services/openai/types';
import { ObservabilityRequestHandlerContext } from '../types';
import { AbstractObservabilityServerRouteRepository } from './types';
interface RegisterRoutes {
config: ObservabilityConfig;
core: CoreSetup;
repository: AbstractObservabilityServerRouteRepository;
logger: Logger;
@ -29,12 +31,15 @@ interface RegisterRoutes {
}
export interface RegisterRoutesDependencies {
pluginsSetup: {
core: CoreSetup;
};
ruleDataService: RuleDataPluginService;
getRulesClientWithRequest: (request: KibanaRequest) => RulesClientApi;
getOpenAIClient: () => IOpenAIClient | undefined;
}
export function registerRoutes({ repository, core, logger, dependencies }: RegisterRoutes) {
export function registerRoutes({ config, repository, core, logger, dependencies }: RegisterRoutes) {
const routes = Object.values(repository);
const router = core.http.createRouter();
@ -60,13 +65,14 @@ export function registerRoutes({ repository, core, logger, dependencies }: Regis
params ?? t.strict({})
);
const data = (await handler({
const data = await handler({
config,
context,
request,
logger,
params: decodedParams,
dependencies,
})) as any;
});
if (data === undefined) {
return response.noContent();

View file

@ -10,6 +10,7 @@ import { KibanaRequest, Logger } from '@kbn/core/server';
import { ObservabilityServerRouteRepository } from './get_global_observability_server_route_repository';
import { ObservabilityRequestHandlerContext } from '../types';
import { RegisterRoutesDependencies } from './register_routes';
import { ObservabilityConfig } from '..';
export type { ObservabilityServerRouteRepository };
@ -18,6 +19,7 @@ export interface ObservabilityRouteHandlerResources {
dependencies: RegisterRoutesDependencies;
logger: Logger;
request: KibanaRequest;
config: ObservabilityConfig;
}
export interface ObservabilityRouteCreateOptions {

View file

@ -24,6 +24,12 @@ export const azureOpenAIConfig = schema.object({
export const observabilityCoPilotConfig = schema.object({
enabled: schema.boolean({ defaultValue: false }),
feedback: schema.object({
enabled: schema.boolean({ defaultValue: false }),
url: schema.string({
defaultValue: `https://0d0uj24psl.execute-api.us-east-1.amazonaws.com/gaifeedback`,
}),
}),
provider: schema.oneOf([openAIConfig, azureOpenAIConfig]),
});

View file

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { EuiBetaBadge, IconType } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import React from 'react';
type Props = {
icon?: IconType;
} & Pick<React.ComponentProps<typeof EuiBetaBadge>, 'size' | 'style'>;
export function TechnicalPreviewBadge({ icon = 'beaker', size, style }: Props) {
return (
<EuiBetaBadge
label={i18n.translate('xpack.observabilityShared.technicalPreviewBadgeLabel', {
defaultMessage: 'Technical preview',
})}
tooltipContent={i18n.translate('xpack.observabilityShared.technicalPreviewBadgeDescription', {
defaultMessage:
'This functionality is in technical preview and may be changed or removed completely in a future release. Elastic will take a best effort approach to fix any issues, but features in technical preview are not subject to the support SLA of official GA features.',
})}
iconType={icon}
size={size}
style={style}
/>
);
}

View file

@ -35,6 +35,8 @@ export {
export type { SectionLinkProps } from './components/section/section';
export { LoadWhenInView } from './components/load_when_in_view/get_load_when_in_view_lazy';
export { TechnicalPreviewBadge } from './components/technical_preview_badge/technical_preview_badge';
export { InspectorContextProvider } from './contexts/inspector/inspector_context';
export type { AddInspectorRequest } from './contexts/inspector/inspector_context';
export { useInspectorContext } from './contexts/inspector/use_inspector_context';

View file

@ -105,16 +105,6 @@ export function FrameInformationWindow({ frame, totalSamples, totalSeconds, samp
</EuiFlexItem>
{coPilotService?.isEnabled() && promptParams ? (
<>
<EuiFlexItem>
<CoPilotPrompt
coPilot={coPilotService}
promptId={CoPilotPromptId.ProfilingExplainFunction}
params={promptParams}
title={i18n.translate('xpack.profiling.frameInformationWindow.explainFunction', {
defaultMessage: 'Explain function',
})}
/>
</EuiFlexItem>
<EuiFlexItem>
<CoPilotPrompt
coPilot={coPilotService}
@ -123,6 +113,7 @@ export function FrameInformationWindow({ frame, totalSamples, totalSeconds, samp
title={i18n.translate('xpack.profiling.frameInformationWindow.optimizeFunction', {
defaultMessage: 'Optimize function',
})}
feedbackEnabled={true}
/>
</EuiFlexItem>
</>

View file

@ -27,7 +27,7 @@ export const PROFILING_FEATURE = {
read: [],
},
ui: ['show'],
api: [PROFILING_SERVER_FEATURE_ID],
api: [PROFILING_SERVER_FEATURE_ID, 'ai_assistant'],
},
read: {
app: [PROFILING_SERVER_FEATURE_ID, 'ux', 'kibana'],
@ -36,7 +36,7 @@ export const PROFILING_FEATURE = {
read: [],
},
ui: ['show'],
api: [PROFILING_SERVER_FEATURE_ID],
api: [PROFILING_SERVER_FEATURE_ID, 'ai_assistant'],
},
},
};